diff --git a/CMakeLists.txt b/CMakeLists.txt index 7097d58f1f4cbb1e00771fbb4baa9002334ff0b0..82e4fabc20801f431f60f6264536b63ac44e827a 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -301,16 +301,16 @@ else(EXTERNAL_BOOST) endif() endif() # Check BLAS/LAPACK -if(CMAKE_MINOR_VERSION GREATER_EQUAL 14) - set(BLA_VENDOR Intel10_64lp) -else() - set(BLA_VENDOR Intel) -endif() +set(BLA_VENDOR Intel10_64lp) find_package(LAPACK) if(NOT LAPACK_FOUND) - set(BLA_VENDOR All) - find_package(LAPACK REQUIRED) + set(BLA_VENDOR Intel) + find_package(LAPACK) + if(NOT LAPACK_FOUND) + set(BLA_VENDOR All) + find_package(LAPACK REQUIRED) + endif() endif() list(GET LAPACK_LIBRARIES 0 LAPACK_LIBRARY) diff --git a/src/classification/ConvexHull1D.hpp b/src/classification/ConvexHull1D.hpp index c3294940b5768a6c0ba9bdfecec8f5500dafc647..d932a7bc9a7ff598e82604e4f05d02069680d279 100644 --- a/src/classification/ConvexHull1D.hpp +++ b/src/classification/ConvexHull1D.hpp @@ -67,7 +67,7 @@ public: * @param class_szs number of elements in each class * @returns The projection score for the particular feature */ - double overlap_1d(double* value, double width = 1e-5); + double overlap_1d(double* value, double width = 0.0); }; diff --git a/src/descriptor_identifier/SISSO_DI/SISSOClassifier.cpp b/src/descriptor_identifier/SISSO_DI/SISSOClassifier.cpp index 289a317cd50212c695ce03853b6e52263c785b17..172fa37d5600ca4e8bdaadae92b5bd00b2998cc9 100644 --- a/src/descriptor_identifier/SISSO_DI/SISSOClassifier.cpp +++ b/src/descriptor_identifier/SISSO_DI/SISSOClassifier.cpp @@ -231,7 +231,6 @@ void SISSOClassifier::l0_norm(std::vector<double>& prop, int n_dim) } } - std::vector<int> all_min_n_convex_overlap(_mpi_comm->size() * n_get_models); std::vector<double> all_min_svm_score(_mpi_comm->size() * n_get_models); std::vector<double> all_min_svm_margin(_mpi_comm->size() * n_get_models); diff --git a/src/feature_creation/feature_space/FeatureSpace.cpp b/src/feature_creation/feature_space/FeatureSpace.cpp index 1d20a41fa0d90a030d2d96568100ad9572a269bc..f75ae553b063b6ac5456ff61cd19a58ee7f45a51 100644 --- a/src/feature_creation/feature_space/FeatureSpace.cpp +++ b/src/feature_creation/feature_space/FeatureSpace.cpp @@ -103,55 +103,37 @@ void FeatureSpace::initialize_fs() else if(_max_phi - _n_rung_generate < _n_rung_store) throw std::logic_error("Requesting to store more rungs than what can be pre-generated."); - if(_mpi_comm->rank() == 0) - { - std::ofstream out_file_stream = std::ofstream(); - std::ofstream sum_file_stream = std::ofstream(); - out_file_stream.open(_feature_space_file); - sum_file_stream.open(_feature_space_summary_file); - out_file_stream << std::setw(14) <<std::left << "# FEAT_ID" << "Feature Postfix Expression (RPN)" << std::endl; - sum_file_stream << std::setw(14) <<std::left << "# FEAT_ID" << std::setw(24) << std::left << "Score" << "Feature Expression" << std::endl; - out_file_stream.close(); - sum_file_stream.close(); - } - - if(_project_type.compare("regression") == 0) - { - _project = project_funcs::project_r2; - _project_no_omp = project_funcs::project_r2_no_omp; - } - else if(_project_type.compare("classification") == 0) + node_value_arrs::set_task_sz_train(_task_sizes); + int n_max_ops = 0; + for(int rr = 0; rr < _max_phi - _n_rung_store; ++rr) + n_max_ops += std::pow(2, rr); + if((n_max_ops > _phi_0.size()) && (_n_rung_store == 0)) { - _project = project_funcs::project_classify; - _project_no_omp = project_funcs::project_classify_no_omp; + std::cerr << "WARNING: Setting _n_rung_store to 1 to prevent possible overwrite issues" << std::endl; + ++_n_rung_store; + _n_rung_generate -= (_n_rung_generate == 1) && (_n_rung_store + _n_rung_generate > _max_phi); } - else if(_project_type.compare("log_regression") == 0) - { - if(_task_sizes.size() > 1) - throw std::logic_error("Log Regression can not be done using multiple tasks."); - if(*std::min_element(_prop.begin(), _prop.end()) < 0.0) - throw std::logic_error("Log Regression can not be if the property is negative."); + initialize_fs_output_files(); + project_funcs::set_project_fxn(project_type, _task_sizes.size(), _project, _project_no_omp); + comp_feats::set_is_valid_fxn(project_type, _cross_cor_max, _n_samp, _is_valid, _is_valid_feat_list); + set_op_lists(); - _project = project_funcs::project_log_r2; - _project_no_omp = project_funcs::project_log_r2_no_omp; - } - else - throw std::logic_error("Wrong projection type passed to FeatureSpace constructor."); + double start = omp_get_wtime(); + generate_feature_space(prop); + _mpi_comm->barrier(); + double duration = omp_get_wtime() - start; + if(_mpi_comm->rank() == 0) + std::cout << "time to generate feat sapce: " << duration << " s" << std::endl; - if(_cross_cor_max < 0.99999) - { - _is_valid = comp_feats::valid_feature_against_selected; - _is_valid_feat_list = comp_feats::valid_feature_against_selected_feat_list; - } - else - { - _is_valid = comp_feats::valid_feature_against_selected_max_corr_1; - _is_valid_feat_list = comp_feats::valid_feature_against_selected_max_corr_1_feat_list; - } + mpi_reduce_op::set_op(_project_type, _cross_cor_max, _n_sis_select); - mpi_reduce_op::set_op(_cross_cor_max, _n_sis_select); + _scores.reserve(_phi.size()); + _scores.resize(_phi.size()); +} +void FeatureSpace::set_op_lists() +{ for(auto & op : _allowed_ops) { if((op.compare("add") == 0) || (op.compare("mult") == 0) || (op.compare("abs_diff") == 0) || (op.compare("sub") == 0)) @@ -173,17 +155,21 @@ void FeatureSpace::initialize_fs() _un_param_operators.push_back(allowed_op_maps::unary_param_operator_map[op]); } #endif +} - _mpi_comm->barrier(); - double start = omp_get_wtime(); - generate_feature_space(); - _mpi_comm->barrier(); - double duration = omp_get_wtime() - start; +void FeatureSpace::initialize_fs_output_files() +{ if(_mpi_comm->rank() == 0) - std::cout << "time to generate feat sapce: " << duration << " s" << std::endl; - - _scores.reserve(_phi.size()); - _scores.resize(_phi.size()); + { + std::ofstream out_file_stream = std::ofstream(); + std::ofstream sum_file_stream = std::ofstream(); + out_file_stream.open(_feature_space_file); + sum_file_stream.open(_feature_space_summary_file); + out_file_stream << std::setw(14) <<std::left << "# FEAT_ID" << "Feature Postfix Expression (RPN)" << std::endl; + sum_file_stream << std::setw(14) <<std::left << "# FEAT_ID" << std::setw(24) << std::left << "Score" << "Feature Expression" << std::endl; + out_file_stream.close(); + sum_file_stream.close(); + } } #ifdef PARAMETERIZE @@ -595,7 +581,7 @@ void FeatureSpace::project_generated(double* prop, int size, std::vector<node_pt #pragma omp for schedule(dynamic) for(auto feat = _phi.begin() + _start_gen.back() + _mpi_comm->rank(); feat < _phi.end(); feat += _mpi_comm->size()) { - unsigned long int feat_ind = _phi.size() + _n_sis_select * omp_get_num_threads(); + unsigned long int feat_ind = node_value_arrs::N_STORE_FEATURES + _n_sis_select * (omp_get_num_threads() + _mpi_comm->size()); node_value_arrs::clear_temp_reg_thread(); std::vector<node_ptr> generated_phi; @@ -620,9 +606,7 @@ void FeatureSpace::project_generated(double* prop, int size, std::vector<node_pt int ii = 0; while((ii < inds.size()) && (scores[inds[ii]] < -1.0)) - { ++ii; - } while((ii < inds.size()) && ((scores[inds[ii]] < worst_score) || (phi_sel_private.size() < _n_sis_select))) { @@ -631,16 +615,15 @@ void FeatureSpace::project_generated(double* prop, int size, std::vector<node_pt { if(scores_sel_private.size() == _n_sis_select) { - phi_sel_private[worst_score_ind]->set_selected(false); - phi_sel_private[worst_score_ind]->set_d_mat_ind(-1); - - generated_phi[inds[ii]]->reindex(_phi.size() + worst_score_ind + _n_sis_select * omp_get_thread_num()); + generated_phi[inds[ii]]->reindex(node_value_arrs::N_STORE_FEATURES + worst_score_ind + _n_sis_select * (omp_get_thread_num() + _mpi_comm->size())); + generated_phi[inds[ii]]->set_value(); phi_sel_private[worst_score_ind] = generated_phi[inds[ii]]; scores_sel_private[worst_score_ind] = cur_score; } else { - generated_phi[inds[ii]]->reindex(_phi.size() + scores_sel_private.size() + _n_sis_select * omp_get_thread_num()); + generated_phi[inds[ii]]->reindex(node_value_arrs::N_STORE_FEATURES + scores_sel_private.size() + _n_sis_select * (omp_get_thread_num() + _mpi_comm->size())); + generated_phi[inds[ii]]->set_value(); phi_sel_private.push_back(generated_phi[inds[ii]]); scores_sel_private.push_back(cur_score); } @@ -658,14 +641,19 @@ void FeatureSpace::project_generated(double* prop, int size, std::vector<node_pt { if(((phi_sel.size() < _n_sis_select) || (scores_sel_private[sc] < scores_sel[worst_score_ind])) && _is_valid_feat_list(phi_sel_private[sc]->value_ptr(0), _n_samp, _cross_cor_max, phi_sel, scores_sel, scores_sel_private[sc])) { + if(phi_sel.size() == _n_sis_select) { scores_sel[worst_score_ind] = scores_sel_private[sc]; + phi_sel_private[sc]->reindex(node_value_arrs::N_STORE_FEATURES + worst_score_ind + _n_sis_select * _mpi_comm->rank()); + phi_sel_private[sc]->set_value(); phi_sel[worst_score_ind] = phi_sel_private[sc]; } else { scores_sel.push_back(scores_sel_private[sc]); + phi_sel_private[sc]->reindex(node_value_arrs::N_STORE_FEATURES + phi_sel.size() + _n_sis_select * _mpi_comm->rank()); + phi_sel_private[sc]->set_value(); phi_sel.push_back(phi_sel_private[sc]); } worst_score_ind = std::max_element(scores_sel.begin(), scores_sel.end()) - scores_sel.begin(); @@ -687,7 +675,7 @@ void FeatureSpace::sis(std::vector<double>& prop) sum_file_stream.open(_feature_space_summary_file, std::ios::app); std::vector<node_ptr> phi_sel; - std::vector<double> scores_sel(_n_sis_select, 0.0); + std::vector<double> scores_sel(_n_sis_select, std::numeric_limits<double>::infinity()); phi_sel.reserve(_n_sis_select); @@ -741,30 +729,23 @@ void FeatureSpace::sis(std::vector<double>& prop) std::cout << "Time to get best features on rank : " << omp_get_wtime() - start << " s" << std::endl; start = omp_get_wtime(); + + for(auto& feat : phi_sel) + { + feat->set_selected(false); + feat->set_d_mat_ind(-1); + } + if(_n_rung_generate > 0) { phi_sel.resize(cur_feat_local); - scores_sel.resize(cur_feat_local); - project_generated(prop.data(), prop.size(), phi_sel, scores_sel); - node_value_arrs::clear_temp_reg(); - node_value_arrs::clear_temp_test_reg(); - for(auto& feat : _phi) - { - feat->set_selected(false); - feat->set_d_mat_ind(-1); - } _mpi_comm->barrier(); if(_mpi_comm->rank() == 0) std::cout << "Projection time for features generated on the fly: " << omp_get_wtime() - start << " s" << std::endl; } - for(auto& feat : phi_sel) - { - feat->set_selected(false); - feat->set_d_mat_ind(-1); - } std::fill_n(&scores_sel_all[cur_feat], _n_sis_select, 0.0); // If we are only on one process then phi_sel are the selected features start = omp_get_wtime(); @@ -776,7 +757,6 @@ void FeatureSpace::sis(std::vector<double>& prop) local_sel.push_back(mpi_reduce_op::make_node_sc_pair(phi_sel[ff], scores_sel[ff])); std::vector<node_sc_pair> selected(_n_sis_select); - mpi::all_reduce( *_mpi_comm, local_sel, diff --git a/src/feature_creation/feature_space/FeatureSpace.hpp b/src/feature_creation/feature_space/FeatureSpace.hpp index 76b81ba6239abedb358cff1357cfd1457fede952..cf55c80b3c66c76343d1e6f3dbcc26285fe70461 100644 --- a/src/feature_creation/feature_space/FeatureSpace.hpp +++ b/src/feature_creation/feature_space/FeatureSpace.hpp @@ -130,6 +130,15 @@ public: */ void initialize_fs(); + /** + * @brief Uses _allowed_ops to set the operator lists + */ + void set_op_lists(); + + /** + * @brief Initializes the output files for SIS + */ + void initialize_fs_output_files(); /** * @brief Generate the full feature set from the allowed operators and initial feature set * @details populates phi with all features from an initial set and the allowed operators diff --git a/src/feature_creation/node/FeatureNode.cpp b/src/feature_creation/node/FeatureNode.cpp index de7c9b37e6deee36da3f5a43fcfb26169a96ac11..1d3b293bf003fb8413456e2a1f1ab84992f23e0c 100644 --- a/src/feature_creation/node/FeatureNode.cpp +++ b/src/feature_creation/node/FeatureNode.cpp @@ -21,6 +21,21 @@ FeatureNode::FeatureNode(unsigned long int feat_ind, std::string expr, std::vect FeatureNode::~FeatureNode() {} +bool FeatureNode::is_const() +{ + bool is_c = false; + int pos = 0; + + double* val_ptr = value_ptr(); + for(auto& sz : node_value_arrs::TASK_SZ_TRAIN) + { + double mean = util_funcs::mean(val_ptr + pos, sz); + is_c = is_c || std::all_of(val_ptr + pos, val_ptr + pos + sz, [&mean](double d){return std::abs(d - mean) < 1e-12;}); + pos += sz; + } + return is_c; +} + void FeatureNode::update_add_sub_leaves(std::map<std::string, int>& add_sub_leaves, int pl_mn, int& expected_abs_tot) { if(add_sub_leaves.count(_expr) > 0) diff --git a/src/feature_creation/node/FeatureNode.hpp b/src/feature_creation/node/FeatureNode.hpp index efdddbdc6b6f7d061dd55176d468c3be94f567a6..83a71e749ebac129f2efbbab39f671cea5e41106 100644 --- a/src/feature_creation/node/FeatureNode.hpp +++ b/src/feature_creation/node/FeatureNode.hpp @@ -191,11 +191,7 @@ public: /** * @brief Check if feature is constant */ - inline bool is_const() - { - double mean = util_funcs::mean(value_ptr(), _n_samp); - return std::all_of(value_ptr(), value_ptr() + _n_samp, [&mean](double d){return std::abs(d - mean) < 1e-12;}); - } + bool is_const(); /** * @brief Returns the type of node this is diff --git a/src/feature_creation/node/operator_nodes/OperatorNode.hpp b/src/feature_creation/node/operator_nodes/OperatorNode.hpp index 3dab50697b991c7325e7accbd2f867567a0c41f4..93b18863b3a782417cfdfba8a7382182428f9792 100644 --- a/src/feature_creation/node/operator_nodes/OperatorNode.hpp +++ b/src/feature_creation/node/operator_nodes/OperatorNode.hpp @@ -197,7 +197,9 @@ public: offset = (offset == -1) ? rung() : offset; if((rung() > node_value_arrs::N_RUNGS_STORED) && (node_value_arrs::temp_storage_reg(_arr_ind, offset) != _feat_ind)) + { set_value(offset); + } return node_value_arrs::get_value_ptr(_arr_ind, _feat_ind, offset); } @@ -214,7 +216,9 @@ public: { offset = (offset == -1) ? rung() : offset; if((rung() > node_value_arrs::N_RUNGS_STORED) && (node_value_arrs::temp_storage_test_reg(_arr_ind, offset) != _feat_ind)) + { set_test_value(offset); + } return node_value_arrs::get_test_value_ptr(_arr_ind, _feat_ind, offset); } @@ -223,15 +227,28 @@ public: /** * @brief Check if the feature contains NaN */ - inline bool is_nan(){return std::any_of(value_ptr(), value_ptr() + _n_samp, [](double d){return !std::isfinite(d);});} + inline bool is_nan() + { + double* val_ptr = value_ptr(); + return std::any_of(val_ptr, val_ptr + _n_samp, [](double d){return !std::isfinite(d);}); + } // DocString: op_node_is_const /** * @brief Check if feature is constant */ - inline bool is_const() + bool is_const() { - return util_funcs::stand_dev(value_ptr(), _n_samp) < 1.0e-13; + double* val_ptr = value_ptr(); + + bool is_c = false;//util_funcs::stand_dev(val_ptr, _n_samp) < 1.0e-13; + int pos = 0; + for(auto& sz : node_value_arrs::TASK_SZ_TRAIN) + { + is_c = is_c || (util_funcs::stand_dev(val_ptr + pos, sz) < 1.0e-13); + pos += sz; + } + return is_c; } // DocString: op_node_rung diff --git a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/abs/absolute_value.cpp b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/abs/absolute_value.cpp index a9e4339da5ead059377093032a960066da1bd572..52241255ed421ca40626ed553adf5ac5ccd74491 100644 --- a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/abs/absolute_value.cpp +++ b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/abs/absolute_value.cpp @@ -12,13 +12,13 @@ void generateAbsNode(std::vector<node_ptr>& feat_list, node_ptr feat, unsigned l if(*std::min_element(val_ptr, val_ptr + feat->n_samp()) > 0.0) return; - val_ptr = node_value_arrs::get_value_ptr(feat_ind, feat_ind, offset); - allowed_op_funcs::abs(feat->n_samp(), feat->value_ptr(offset + 2), 1.0, 0.0, val_ptr); + node_ptr new_feat = std::make_shared<AbsNode>(feat, feat_ind); + val_ptr = new_feat->value_ptr(); - if((util_funcs::stand_dev(val_ptr, feat->n_samp()) < 1.0e-13) || std::any_of(val_ptr, val_ptr + feat->n_samp(), [&u_bound](double d){return !std::isfinite(d) || (std::abs(d) > u_bound);}) || (util_funcs::max_abs_val<double>(val_ptr, feat->n_samp()) < l_bound)) + if(new_feat->is_const() || std::any_of(val_ptr, val_ptr + new_feat->n_samp(), [&u_bound](double d){return !std::isfinite(d) || (std::abs(d) > u_bound);}) || (util_funcs::max_abs_val<double>(val_ptr, new_feat->n_samp()) < l_bound)) return; - feat_list.push_back(std::make_shared<AbsNode>(feat, feat_ind)); + feat_list.push_back(new_feat); } AbsNode::AbsNode() diff --git a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/abs_diff/absolute_difference.cpp b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/abs_diff/absolute_difference.cpp index db41051b3419de2692a4a67e2eb98634be85284d..7422e108fb2a832757e130eabbe059b4fc577330 100644 --- a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/abs_diff/absolute_difference.cpp +++ b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/abs_diff/absolute_difference.cpp @@ -15,24 +15,21 @@ void generateAbsDiffNode(std::vector<node_ptr>& feat_list, node_ptr feat_1, node return; int add_sub_tot_first = std::abs(add_sub_leaves.begin()->second); - if((std::abs(add_sub_tot_first) > 1) && std::all_of(add_sub_leaves.begin(), add_sub_leaves.end(), [&add_sub_tot_first](auto el){return std::abs(el.second) == add_sub_tot_first;})) return; - - int offset = std::max(feat_1->rung(), feat_2->rung()) + 1; - double* val_ptr = node_value_arrs::get_value_ptr(feat_ind, feat_ind, offset); - - allowed_op_funcs::sub(feat_1->n_samp(), feat_1->value_ptr(offset + 2), feat_2->value_ptr(offset + 1), 1.0, 0.0, val_ptr); - if(*std::min_element(val_ptr, val_ptr + feat_1->n_samp()) > l_bound) + node_ptr new_feat = std::make_shared<SubNode>(feat_1, feat_2, feat_ind); + double* val_ptr = new_feat->value_ptr(); + if(*std::min_element(val_ptr, val_ptr + new_feat->n_samp()) > l_bound) return; - allowed_op_funcs::abs_diff(feat_1->n_samp(), feat_1->value_ptr(offset + 2), feat_2->value_ptr(offset + 1), 1.0, 0.0, val_ptr); + new_feat = std::make_shared<AbsDiffNode>(feat_1, feat_2, feat_ind); + new_feat->set_value(); - if((util_funcs::stand_dev(val_ptr, feat_1->n_samp()) < 1.0e-13) || std::any_of(val_ptr, val_ptr + feat_1->n_samp(), [&u_bound](double d){return !std::isfinite(d) || (std::abs(d) > u_bound);}) || (util_funcs::max_abs_val<double>(val_ptr, feat_1->n_samp()) < l_bound)) + if(new_feat->is_const() || std::any_of(val_ptr, val_ptr + new_feat->n_samp(), [&u_bound](double d){return !std::isfinite(d) || (std::abs(d) > u_bound);}) || (util_funcs::max_abs_val<double>(val_ptr, new_feat->n_samp()) < l_bound)) return; - feat_list.push_back(std::make_shared<AbsDiffNode>(feat_1, feat_2, feat_ind)); + feat_list.push_back(new_feat); } AbsDiffNode::AbsDiffNode() diff --git a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/add/add.cpp b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/add/add.cpp index b01a02c1d28b9045df684548e56fdf9fe3f83372..ae6736434743042d5938b3f8bb10972be8ed9e89 100644 --- a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/add/add.cpp +++ b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/add/add.cpp @@ -18,14 +18,12 @@ void generateAddNode(std::vector<node_ptr>& feat_list, node_ptr feat_1, node_ptr if((std::abs(add_sub_tot_first) > 1) && std::all_of(add_sub_leaves.begin(), add_sub_leaves.end(), [&add_sub_tot_first](auto el){return std::abs(el.second) == add_sub_tot_first;})) return; - int offset = feat_1->rung() + 1; - double* val_ptr = node_value_arrs::get_value_ptr(feat_ind, feat_ind, offset); - allowed_op_funcs::add(feat_1->n_samp(), feat_1->value_ptr(offset + 2), feat_2->value_ptr(offset + 1), 1.0, 0.0, val_ptr); + node_ptr new_feat = std::make_shared<AddNode>(feat_1, feat_2, feat_ind); + double* val_ptr = new_feat->value_ptr(); + if(new_feat->is_const() || std::any_of(val_ptr, val_ptr + new_feat->n_samp(), [&u_bound](double d){return !std::isfinite(d) || (std::abs(d) > u_bound);}) || (util_funcs::max_abs_val<double>(val_ptr, new_feat->n_samp()) < l_bound)) + return; - if((util_funcs::stand_dev(val_ptr, feat_1->n_samp()) < 1.0e-13) || std::any_of(val_ptr, val_ptr + feat_1->n_samp(), [&u_bound](double d){return !std::isfinite(d) || (std::abs(d) > u_bound);}) || (util_funcs::max_abs_val<double>(val_ptr, feat_1->n_samp()) < l_bound)) - return; - - feat_list.push_back(std::make_shared<AddNode>(feat_1, feat_2, feat_ind)); + feat_list.push_back(new_feat); } AddNode::AddNode() diff --git a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/cb/cube.cpp b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/cb/cube.cpp index 88b636c40ae53dff6b4f5c62ed7fe69c4e22f1da..2c3396fd68670b2e40942098b8e4f9904c788ca4 100644 --- a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/cb/cube.cpp +++ b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/cb/cube.cpp @@ -6,14 +6,14 @@ void generateCbNode(std::vector<node_ptr>& feat_list, node_ptr feat, unsigned lo if((feat->type() == NODE_TYPE::CBRT) || (feat->type() == NODE_TYPE::SQ) || (feat->type() == NODE_TYPE::INV)) return; - int offset = feat->rung() + 1; - double* val_ptr = node_value_arrs::get_value_ptr(feat_ind, feat_ind, offset); - allowed_op_funcs::cb(feat->n_samp(), feat->value_ptr(offset + 2), 1.0, 0.0, val_ptr); + node_ptr new_feat = std::make_shared<CbNode>(feat, feat_ind); + double* val_ptr = new_feat->value_ptr(); - if((util_funcs::stand_dev(val_ptr, feat->n_samp()) < 1.0e-13) || std::any_of(val_ptr, val_ptr + feat->n_samp(), [&u_bound](double d){return !std::isfinite(d) || (std::abs(d) > u_bound);}) || (util_funcs::max_abs_val<double>(val_ptr, feat->n_samp()) < l_bound)) + // No is_const check since cube function can only be constant if feat is constant + if(std::any_of(val_ptr, val_ptr + new_feat->n_samp(), [&u_bound](double d){return !std::isfinite(d) || (std::abs(d) > u_bound);}) || (util_funcs::max_abs_val<double>(val_ptr, new_feat->n_samp()) < l_bound)) return; - feat_list.push_back(std::make_shared<CbNode>(feat, feat_ind)); + feat_list.push_back(new_feat); } CbNode::CbNode() @@ -30,7 +30,9 @@ CbNode::CbNode(node_ptr feat, unsigned long int feat_ind, double l_bound, double throw InvalidFeatureException(); set_value(); - if(is_nan() || is_const() || (util_funcs::max_abs_val<double>(value_ptr(), _n_samp) > u_bound) || (util_funcs::max_abs_val<double>(value_ptr(), _n_samp) < l_bound)) + + // No is_const check since cube function can only be constant if feat is constant + if(is_nan() || (util_funcs::max_abs_val<double>(value_ptr(), _n_samp) > u_bound) || (util_funcs::max_abs_val<double>(value_ptr(), _n_samp) < l_bound)) throw InvalidFeatureException(); } diff --git a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/cb/parameterized_cube.cpp b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/cb/parameterized_cube.cpp index 14befdb4aa0f09434b10302d7b18f86bcde3c34e..2792a7ab70deccbc199de3920f440327fc93abcd 100644 --- a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/cb/parameterized_cube.cpp +++ b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/cb/parameterized_cube.cpp @@ -10,7 +10,8 @@ void generateCbParamNode(std::vector<node_ptr>& feat_list, node_ptr feat, unsign return; new_feat->set_value(); - if(new_feat->is_nan() || new_feat->is_const() || (util_funcs::max_abs_val<double>(new_feat->value_ptr(), new_feat->n_samp()) > u_bound) || (util_funcs::max_abs_val<double>(new_feat->value_ptr(), new_feat->n_samp()) < l_bound)) + // No is_const check since cube function can only be constant if feat is constant + if(new_feat->is_nan() || (util_funcs::max_abs_val<double>(new_feat->value_ptr(), new_feat->n_samp()) > u_bound) || (util_funcs::max_abs_val<double>(new_feat->value_ptr(), new_feat->n_samp()) < l_bound)) return; feat_list.push_back(new_feat); @@ -26,7 +27,8 @@ CbParamNode::CbParamNode(node_ptr feat, unsigned long int feat_ind, double l_bou get_parameters(optimizer); set_value(); - if((std::abs(_params[0]) <= 1e-10) || is_nan() || is_const() || (util_funcs::max_abs_val<double>(value_ptr(), _n_samp) > u_bound) || (util_funcs::max_abs_val<double>(value_ptr(), _n_samp) < l_bound)) + // No is_const check since cube function can only be constant if feat is constant + if((std::abs(_params[0]) <= 1e-10) || is_nan() || (util_funcs::max_abs_val<double>(value_ptr(), _n_samp) > u_bound) || (util_funcs::max_abs_val<double>(value_ptr(), _n_samp) < l_bound)) throw InvalidFeatureException(); } diff --git a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/cbrt/cube_root.cpp b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/cbrt/cube_root.cpp index 5d5d14b5ba3903cd6c41620785d7fec386330996..3b6e5ac8d740123bd412e2a88e2621476eda0012 100644 --- a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/cbrt/cube_root.cpp +++ b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/cbrt/cube_root.cpp @@ -6,19 +6,14 @@ void generateCbrtNode(std::vector<node_ptr>& feat_list, node_ptr feat, unsigned if((feat->type() == NODE_TYPE::CB) || (feat->type() == NODE_TYPE::SQ) || (feat->type() == NODE_TYPE::SIX_POW) || (feat->type() == NODE_TYPE::INV)) return; - int offset = feat->rung() + 1; + node_ptr new_feat = std::make_shared<CbrtNode>(feat, feat_ind); + double* val_ptr = new_feat->value_ptr(); - double* val_ptr = feat->value_ptr(offset + 2); - if(*std::min_element(val_ptr, val_ptr + feat->n_samp()) < 0.0) + // No is_const check since cube function can only be constant if feat is constant + if(std::any_of(val_ptr, val_ptr + new_feat->n_samp(), [&u_bound](double d){return !std::isfinite(d) || (std::abs(d) > u_bound);}) || (util_funcs::max_abs_val<double>(val_ptr, new_feat->n_samp()) < l_bound)) return; - val_ptr = node_value_arrs::get_value_ptr(feat_ind, feat_ind, offset); - allowed_op_funcs::cbrt(feat->n_samp(), feat->value_ptr(offset + 2), 1.0, 0.0, val_ptr); - - if((util_funcs::stand_dev(val_ptr, feat->n_samp()) < 1.0e-13) || std::any_of(val_ptr, val_ptr + feat->n_samp(), [&u_bound](double d){return !std::isfinite(d) || (std::abs(d) > u_bound);}) || (util_funcs::max_abs_val<double>(val_ptr, feat->n_samp()) < l_bound)) - return; - - feat_list.push_back(std::make_shared<CbrtNode>(feat, feat_ind)); + feat_list.push_back(new_feat); } CbrtNode::CbrtNode() @@ -39,9 +34,10 @@ CbrtNode::CbrtNode(node_ptr feat, unsigned long int feat_ind, double l_bound, do throw InvalidFeatureException(); set_value(); - if(is_nan() || is_const() || (util_funcs::max_abs_val<double>(value_ptr(), _n_samp) > u_bound) || (util_funcs::max_abs_val<double>(value_ptr(), _n_samp) < l_bound)) - throw InvalidFeatureException(); + // No is_const check since cube function can only be constant if feat is constant + if(is_nan() || (util_funcs::max_abs_val<double>(value_ptr(), _n_samp) > u_bound) || (util_funcs::max_abs_val<double>(value_ptr(), _n_samp) < l_bound)) + throw InvalidFeatureException(); } void CbrtNode::update_add_sub_leaves(std::map<std::string, int>& add_sub_leaves, int pl_mn, int& expected_abs_tot) diff --git a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/cbrt/parameterized_cube_root.cpp b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/cbrt/parameterized_cube_root.cpp index ab9b657bde1e52c1f7772c4f55a4e392c71f17fa..ee913d0758b0dfca33639964a5f2ffca5e697f56 100644 --- a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/cbrt/parameterized_cube_root.cpp +++ b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/cbrt/parameterized_cube_root.cpp @@ -10,7 +10,8 @@ void generateCbrtParamNode(std::vector<node_ptr>& feat_list, node_ptr feat, unsi return; new_feat->set_value(); - if(new_feat->is_nan() || new_feat->is_const() || (util_funcs::max_abs_val<double>(new_feat->value_ptr(), new_feat->n_samp()) > u_bound) || (util_funcs::max_abs_val<double>(new_feat->value_ptr(), new_feat->n_samp()) < l_bound)) + // No is_const check since cube function can only be constant if feat is constant + if(new_feat->is_nan() || (util_funcs::max_abs_val<double>(new_feat->value_ptr(), new_feat->n_samp()) > u_bound) || (util_funcs::max_abs_val<double>(new_feat->value_ptr(), new_feat->n_samp()) < l_bound)) return; feat_list.push_back(new_feat); @@ -26,7 +27,8 @@ CbrtParamNode::CbrtParamNode(node_ptr feat, unsigned long int feat_ind, double l _params.resize(n_params(), 0.0); get_parameters(optimizer); - if((std::abs(_params[0]) <= 1e-10) || is_nan() || is_const() || (util_funcs::max_abs_val<double>(value_ptr(), _n_samp) > u_bound) || (util_funcs::max_abs_val<double>(value_ptr(), _n_samp) < l_bound)) + // No is_const check since cube function can only be constant if feat is constant + if((std::abs(_params[0]) <= 1e-10) || is_nan() || (util_funcs::max_abs_val<double>(value_ptr(), _n_samp) > u_bound) || (util_funcs::max_abs_val<double>(value_ptr(), _n_samp) < l_bound)) throw InvalidFeatureException(); } diff --git a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/cos/cos.cpp b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/cos/cos.cpp index afca45d4da890fb47c3caaf6d36f25a48bfb4c0d..b6462cf08c4e4ad1feba553b336dbb091735fc8d 100644 --- a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/cos/cos.cpp +++ b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/cos/cos.cpp @@ -6,13 +6,13 @@ void generateCosNode(std::vector<node_ptr>& feat_list, node_ptr feat, unsigned l if(feat->unit() != Unit() || (feat->type() == NODE_TYPE::SIN) || (feat->type() == NODE_TYPE::COS)) return; - int offset = feat->rung() + 1; - double* val_ptr = node_value_arrs::get_value_ptr(feat_ind, feat_ind, offset); - allowed_op_funcs::cos(feat->n_samp(), feat->value_ptr(offset + 2), 1.0, 0.0, val_ptr); + node_ptr new_feat = std::make_shared<CosNode>(feat, feat_ind); + double* val_ptr = new_feat->value_ptr(); - if((util_funcs::stand_dev(val_ptr, feat->n_samp()) < 1.0e-13) || std::any_of(val_ptr, val_ptr + feat->n_samp(), [&u_bound](double d){return !std::isfinite(d) || (std::abs(d) > u_bound);}) || (util_funcs::max_abs_val<double>(val_ptr, feat->n_samp()) < l_bound)) + if(new_feat->is_const() || std::any_of(val_ptr, val_ptr + new_feat->n_samp(), [&u_bound](double d){return !std::isfinite(d) || (std::abs(d) > u_bound);}) || (util_funcs::max_abs_val<double>(val_ptr, new_feat->n_samp()) < l_bound)) return; - feat_list.push_back(std::make_shared<CosNode>(feat, feat_ind)); + + feat_list.push_back(new_feat); } CosNode::CosNode() diff --git a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/div/divide.cpp b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/div/divide.cpp index e26009893c74827b8d94ee766d88b2fca7f1575b..8dc797aeeea644ea5d883a1bc39c5915df028146 100644 --- a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/div/divide.cpp +++ b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/div/divide.cpp @@ -14,14 +14,16 @@ void generateDivNode(std::vector<node_ptr>& feat_list, node_ptr feat_1, node_ptr if((div_mult_leaves.size() < 2) || (std::abs(std::accumulate(div_mult_leaves.begin(), div_mult_leaves.end(), -1.0*expected_abs_tot, [](double tot, auto el){return tot + std::abs(el.second);})) > 1e-12)) return; - int offset = std::max(feat_1->rung(), feat_2->rung()) + 1; - double* val_ptr = node_value_arrs::get_value_ptr(feat_ind, feat_ind, offset); - allowed_op_funcs::div(feat_1->n_samp(), feat_1->value_ptr(offset + 2), feat_2->value_ptr(offset + 1), 1.0, 0.0, val_ptr); + double div_mult_tot_first = div_mult_leaves.begin()->second; + if((std::abs(div_mult_tot_first) != 1.0) && std::all_of(div_mult_leaves.begin(), div_mult_leaves.end(), [&div_mult_tot_first](auto el){return el.second == div_mult_tot_first;})) + return; - if((util_funcs::stand_dev(val_ptr, feat_1->n_samp()) < 1.0e-13) || std::any_of(val_ptr, val_ptr + feat_1->n_samp(), [&u_bound](double d){return !std::isfinite(d) || (std::abs(d) > u_bound);}) || (util_funcs::max_abs_val<double>(val_ptr, feat_1->n_samp()) < l_bound)) + node_ptr new_feat = std::make_shared<DivNode>(feat_1, feat_2, feat_ind); + double* val_ptr = new_feat->value_ptr(); + if(new_feat->is_const() || std::any_of(val_ptr, val_ptr + new_feat->n_samp(), [&u_bound](double d){return !std::isfinite(d) || (std::abs(d) > u_bound);}) || (util_funcs::max_abs_val<double>(val_ptr, new_feat->n_samp()) < l_bound)) return; - feat_list.push_back(std::make_shared<DivNode>(feat_1, feat_2, feat_ind)); + feat_list.push_back(new_feat); } DivNode::DivNode() diff --git a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/exp/exponential.cpp b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/exp/exponential.cpp index d765d807f950297f34aac9936daeb88852b5fa4c..f47902ec5452d983863b16c722899e3ca63023f6 100644 --- a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/exp/exponential.cpp +++ b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/exp/exponential.cpp @@ -6,14 +6,13 @@ void generateExpNode(std::vector<node_ptr>& feat_list, node_ptr feat, unsigned l if((feat->unit() != Unit()) || (feat->type() == NODE_TYPE::NEG_EXP) || (feat->type() == NODE_TYPE::EXP) || (feat->type() == NODE_TYPE::ADD) || (feat->type() == NODE_TYPE::SUB) || (feat->type() == NODE_TYPE::LOG)) return; - int offset = feat->rung() + 1; - double* val_ptr = node_value_arrs::get_value_ptr(feat_ind, feat_ind, offset); - allowed_op_funcs::exp(feat->n_samp(), feat->value_ptr(offset + 2), 1.0, 0.0, val_ptr); + node_ptr new_feat = std::make_shared<ExpNode>(feat, feat_ind); + double* val_ptr = new_feat->value_ptr(); - if(std::any_of(val_ptr, val_ptr + feat->n_samp(), [&u_bound](double d){return !std::isfinite(d) || (std::abs(d) > u_bound);}) || (util_funcs::max_abs_val<double>(val_ptr, feat->n_samp()) < l_bound)) - return; - - feat_list.push_back(std::make_shared<ExpNode>(feat, feat_ind)); + // No is_const check since cube function can only be constant if feat is constant + if(std::any_of(val_ptr, val_ptr + new_feat->n_samp(), [&u_bound](double d){return !std::isfinite(d) || (std::abs(d) > u_bound);}) || (util_funcs::max_abs_val<double>(val_ptr, new_feat->n_samp()) < l_bound)) + return; + feat_list.push_back(new_feat); } ExpNode::ExpNode() @@ -33,6 +32,8 @@ ExpNode::ExpNode(node_ptr feat, unsigned long int feat_ind, double l_bound, doub throw InvalidFeatureException(); set_value(); + + // No is_const check since cube function can only be constant if feat is constant if(is_nan() || (util_funcs::max_abs_val<double>(value_ptr(), _n_samp) > u_bound) || (util_funcs::max_abs_val<double>(value_ptr(), _n_samp) < l_bound)) throw InvalidFeatureException(); diff --git a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/exp/parameterized_exponential.cpp b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/exp/parameterized_exponential.cpp index a9c3046c109a7820cddc027e7428d13b91dbebe6..b98c89b03d98b39bfa60b1523f92cf6a8f66b942 100644 --- a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/exp/parameterized_exponential.cpp +++ b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/exp/parameterized_exponential.cpp @@ -14,7 +14,8 @@ void generateExpParamNode(std::vector<node_ptr>& feat_list, node_ptr feat, unsig return; new_feat->set_value(); - if(new_feat->is_nan() || new_feat->is_const() || (util_funcs::max_abs_val<double>(new_feat->value_ptr(), new_feat->n_samp()) > u_bound) || (util_funcs::max_abs_val<double>(new_feat->value_ptr(), new_feat->n_samp()) < l_bound)) + // No is_const check since cube function can only be constant if feat is constant + if(new_feat->is_nan() || (util_funcs::max_abs_val<double>(new_feat->value_ptr(), new_feat->n_samp()) > u_bound) || (util_funcs::max_abs_val<double>(new_feat->value_ptr(), new_feat->n_samp()) < l_bound)) return; feat_list.push_back(new_feat); @@ -32,7 +33,8 @@ ExpParamNode::ExpParamNode(node_ptr feat, unsigned long int feat_ind, double l_b _params.resize(n_params(), 0.0); get_parameters(optimizer); - if((std::abs(_params[0]) <= 1e-10) || is_nan() || is_const() || (util_funcs::max_abs_val<double>(value_ptr(), _n_samp) > u_bound) || (util_funcs::max_abs_val<double>(value_ptr(), _n_samp) < l_bound)) + // No is_const check since cube function can only be constant if feat is constant + if((std::abs(_params[0]) <= 1e-10) || is_nan() || (util_funcs::max_abs_val<double>(value_ptr(), _n_samp) > u_bound) || (util_funcs::max_abs_val<double>(value_ptr(), _n_samp) < l_bound)) throw InvalidFeatureException(); } diff --git a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/inv/inverse.cpp b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/inv/inverse.cpp index e5832e534c99358383b1dd118fbf7a6b642a7020..0b8cd16850f09d4ec60765e15cc2d4273182650b 100644 --- a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/inv/inverse.cpp +++ b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/inv/inverse.cpp @@ -7,14 +7,14 @@ void generateInvNode(std::vector<node_ptr>& feat_list, node_ptr feat, unsigned l if((feat->type() == NODE_TYPE::DIV) || (feat->type() == NODE_TYPE::EXP) || (feat->type() == NODE_TYPE::NEG_EXP) || (feat->type() == NODE_TYPE::INV)) return; - int offset = feat->rung() + 1; - double* val_ptr = node_value_arrs::get_value_ptr(feat_ind, feat_ind, offset); - allowed_op_funcs::inv(feat->n_samp(), feat->value_ptr(offset + 2), 1.0, 0.0, val_ptr); + node_ptr new_feat = std::make_shared<InvNode>(feat, feat_ind); + double* val_ptr = new_feat->value_ptr(); - if((util_funcs::stand_dev(val_ptr, feat->n_samp()) < 1.0e-13) || std::any_of(val_ptr, val_ptr + feat->n_samp(), [&u_bound](double d){return !std::isfinite(d) || (std::abs(d) > u_bound);}) || (util_funcs::max_abs_val<double>(val_ptr, feat->n_samp()) < l_bound)) - return; + // No is_const check since cube function can only be constant if feat is constant + if(std::any_of(val_ptr, val_ptr + new_feat->n_samp(), [&u_bound](double d){return !std::isfinite(d) || (std::abs(d) > u_bound);}) || (util_funcs::max_abs_val<double>(val_ptr, new_feat->n_samp()) < l_bound)) + return; - feat_list.push_back(std::make_shared<InvNode>(feat, feat_ind)); + feat_list.push_back(new_feat); } InvNode::InvNode() @@ -31,7 +31,9 @@ InvNode::InvNode(node_ptr feat, unsigned long int feat_ind, double l_bound, doub throw InvalidFeatureException(); set_value(); - if(is_nan() || is_const() || (util_funcs::max_abs_val<double>(value_ptr(), _n_samp) > u_bound) || (util_funcs::max_abs_val<double>(value_ptr(), _n_samp) < l_bound)) + + // No is_const check since cube function can only be constant if feat is constant + if(is_nan() || (util_funcs::max_abs_val<double>(value_ptr(), _n_samp) > u_bound) || (util_funcs::max_abs_val<double>(value_ptr(), _n_samp) < l_bound)) throw InvalidFeatureException(); } diff --git a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/inv/parameterized_inverse.cpp b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/inv/parameterized_inverse.cpp index 51672fac9dfdcfa7e06b8ea1d93092e0a3909396..de97c39a40dbed4f9d7ad39e3a1907a4c84b5753 100644 --- a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/inv/parameterized_inverse.cpp +++ b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/inv/parameterized_inverse.cpp @@ -14,7 +14,8 @@ void generateInvParamNode(std::vector<node_ptr>& feat_list, node_ptr feat, unsig return; new_feat->set_value(); - if(new_feat->is_nan() || new_feat->is_const() || (util_funcs::max_abs_val<double>(new_feat->value_ptr(), new_feat->n_samp()) > u_bound) || (util_funcs::max_abs_val<double>(new_feat->value_ptr(), new_feat->n_samp()) < l_bound)) + // No is_const check since cube function can only be constant if feat is constant + if(new_feat->is_nan() || (util_funcs::max_abs_val<double>(new_feat->value_ptr(), new_feat->n_samp()) > u_bound) || (util_funcs::max_abs_val<double>(new_feat->value_ptr(), new_feat->n_samp()) < l_bound)) return; feat_list.push_back(new_feat); @@ -32,7 +33,8 @@ InvParamNode::InvParamNode(node_ptr feat, unsigned long int feat_ind, double l_b _params.resize(n_params(), 0.0); get_parameters(optimizer); - if((std::abs(_params[0]) <= 1e-10) || is_nan() || is_const() || (util_funcs::max_abs_val<double>(value_ptr(), _n_samp) > u_bound) || (util_funcs::max_abs_val<double>(value_ptr(), _n_samp) < l_bound)) + // No is_const check since cube function can only be constant if feat is constant + if((std::abs(_params[0]) <= 1e-10) || is_nan() || (util_funcs::max_abs_val<double>(value_ptr(), _n_samp) > u_bound) || (util_funcs::max_abs_val<double>(value_ptr(), _n_samp) < l_bound)) throw InvalidFeatureException(); } diff --git a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/log/log.cpp b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/log/log.cpp index 95e7f1ece42ac44a645f01118d9f3ff8c76a1a9d..454df9221929ab2cf6d4963bde72e13b1b8c8ae5 100644 --- a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/log/log.cpp +++ b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/log/log.cpp @@ -6,14 +6,14 @@ void generateLogNode(std::vector<node_ptr>& feat_list, node_ptr feat, unsigned l if(feat->unit() != Unit() || (feat->type() == NODE_TYPE::NEG_EXP) || (feat->type() == NODE_TYPE::EXP) || (feat->type() == NODE_TYPE::DIV) || (feat->type() == NODE_TYPE::INV) || (feat->type() == NODE_TYPE::MULT) || (feat->type() == NODE_TYPE::LOG) || (feat->type() == NODE_TYPE::SIX_POW) || (feat->type() == NODE_TYPE::CB) || (feat->type() == NODE_TYPE::SQ) || (feat->type() == NODE_TYPE::CBRT) || (feat->type() == NODE_TYPE::SQRT)) return; - int offset = feat->rung() + 1; - double* val_ptr = node_value_arrs::get_value_ptr(feat_ind, feat_ind, offset); - allowed_op_funcs::log(feat->n_samp(), feat->value_ptr(offset + 2), 1.0, 0.0, val_ptr); + node_ptr new_feat = std::make_shared<LogNode>(feat, feat_ind); + double* val_ptr = new_feat->value_ptr(); - if((util_funcs::stand_dev(val_ptr, feat->n_samp()) < 1.0e-13) || std::any_of(val_ptr, val_ptr + feat->n_samp(), [&u_bound](double d){return !std::isfinite(d) || (std::abs(d) > u_bound);}) || (util_funcs::max_abs_val<double>(val_ptr, feat->n_samp()) < l_bound)) + // No is_const check since cube function can only be constant if feat is constant + if(std::any_of(val_ptr, val_ptr + new_feat->n_samp(), [&u_bound](double d){return !std::isfinite(d) || (std::abs(d) > u_bound);}) || (util_funcs::max_abs_val<double>(val_ptr, new_feat->n_samp()) < l_bound)) return; - feat_list.push_back(std::make_shared<LogNode>(feat, feat_ind)); + feat_list.push_back(new_feat); } LogNode::LogNode() @@ -33,7 +33,9 @@ LogNode::LogNode(node_ptr feat, unsigned long int feat_ind, double l_bound, doub throw InvalidFeatureException(); set_value(); - if(is_nan() || is_const() || (util_funcs::max_abs_val<double>(value_ptr(), _n_samp) > u_bound) || (util_funcs::max_abs_val<double>(value_ptr(), _n_samp) < l_bound)) + + // No is_const check since cube function can only be constant if feat is constant + if(is_nan() || (util_funcs::max_abs_val<double>(value_ptr(), _n_samp) > u_bound) || (util_funcs::max_abs_val<double>(value_ptr(), _n_samp) < l_bound)) throw InvalidFeatureException(); set_test_value(); diff --git a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/log/parameterized_log.cpp b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/log/parameterized_log.cpp index 464ccb303d19d06245f0e590ef320cec81507e83..35770632bc12c9201dad811b05d8f316847a2235 100644 --- a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/log/parameterized_log.cpp +++ b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/log/parameterized_log.cpp @@ -14,7 +14,8 @@ void generateLogParamNode(std::vector<node_ptr>& feat_list, node_ptr feat, unsig return; new_feat->set_value(); - if(new_feat->is_nan() || new_feat->is_const() || (util_funcs::max_abs_val<double>(new_feat->value_ptr(), new_feat->n_samp()) > u_bound) || (util_funcs::max_abs_val<double>(new_feat->value_ptr(), new_feat->n_samp()) < l_bound)) + // No is_const check since cube function can only be constant if feat is constant + if(new_feat->is_nan() || (util_funcs::max_abs_val<double>(new_feat->value_ptr(), new_feat->n_samp()) > u_bound) || (util_funcs::max_abs_val<double>(new_feat->value_ptr(), new_feat->n_samp()) < l_bound)) return; feat_list.push_back(new_feat); @@ -32,7 +33,8 @@ LogParamNode::LogParamNode(node_ptr feat, unsigned long int feat_ind, double l_b _params.resize(n_params(), 0.0); get_parameters(optimizer); - if((std::abs(_params[0]) <= 1e-10) || is_nan() || is_const() || (util_funcs::max_abs_val<double>(value_ptr(), _n_samp) > u_bound) || (util_funcs::max_abs_val<double>(value_ptr(), _n_samp) < l_bound)) + // No is_const check since cube function can only be constant if feat is constant + if((std::abs(_params[0]) <= 1e-10) || is_nan() || (util_funcs::max_abs_val<double>(value_ptr(), _n_samp) > u_bound) || (util_funcs::max_abs_val<double>(value_ptr(), _n_samp) < l_bound)) throw InvalidFeatureException(); } diff --git a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/mult/multiply.cpp b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/mult/multiply.cpp index 0d003fe22f3e376beefcc2be94ee339e4f711e97..0a7ad6edcd5010ec64c73b37d06a2e0cf86b4af0 100644 --- a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/mult/multiply.cpp +++ b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/mult/multiply.cpp @@ -19,14 +19,12 @@ void generateMultNode(std::vector<node_ptr>& feat_list, node_ptr feat_1, node_pt if((std::abs(div_mult_tot_first) - 1.0 > 1e-12) && std::all_of(div_mult_leaves.begin(), div_mult_leaves.end(), [&div_mult_tot_first](auto el){return std::abs(el.second) == div_mult_tot_first;})) return; - int offset = std::max(feat_1->rung(), feat_2->rung()) + 1; - double* val_ptr = node_value_arrs::get_value_ptr(feat_ind, feat_ind, offset); - allowed_op_funcs::mult(feat_1->n_samp(), feat_1->value_ptr(offset + 2), feat_2->value_ptr(offset + 1), 1.0, 0.0, val_ptr); - - if((util_funcs::stand_dev(val_ptr, feat_1->n_samp()) < 1.0e-13) || std::any_of(val_ptr, val_ptr + feat_1->n_samp(), [&u_bound](double d){return !std::isfinite(d) || (std::abs(d) > u_bound);}) || (util_funcs::max_abs_val<double>(val_ptr, feat_1->n_samp()) < l_bound)) + node_ptr new_feat = std::make_shared<MultNode>(feat_1, feat_2, feat_ind); + double* val_ptr = new_feat->value_ptr(); + if(new_feat->is_const() || std::any_of(val_ptr, val_ptr + new_feat->n_samp(), [&u_bound](double d){return !std::isfinite(d) || (std::abs(d) > u_bound);}) || (util_funcs::max_abs_val<double>(val_ptr, new_feat->n_samp()) < l_bound)) return; - feat_list.push_back(std::make_shared<MultNode>(feat_1, feat_2, feat_ind)); + feat_list.push_back(new_feat); } MultNode::MultNode() diff --git a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/neg_exp/negative_exponential.cpp b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/neg_exp/negative_exponential.cpp index 5cccab349faccfd3ffe3b62ed41edb4a0b4ca342..f966de42c32f821ef5cd8717b84da97877777dbd 100644 --- a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/neg_exp/negative_exponential.cpp +++ b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/neg_exp/negative_exponential.cpp @@ -6,14 +6,14 @@ void generateNegExpNode(std::vector<node_ptr>& feat_list, node_ptr feat, unsigne if(feat->unit() != Unit() || (feat->type() == NODE_TYPE::NEG_EXP) || (feat->type() == NODE_TYPE::EXP) || (feat->type() == NODE_TYPE::ADD) || (feat->type() == NODE_TYPE::SUB) || (feat->type() == NODE_TYPE::LOG)) return; - int offset = feat->rung() + 1; - double* val_ptr = node_value_arrs::get_value_ptr(feat_ind, feat_ind, offset); - allowed_op_funcs::neg_exp(feat->n_samp(), feat->value_ptr(offset + 2), 1.0, 0.0, val_ptr); + node_ptr new_feat = std::make_shared<NegExpNode>(feat, feat_ind); + double* val_ptr = new_feat->value_ptr(); - if((util_funcs::stand_dev(val_ptr, feat->n_samp()) < 1.0e-13) || std::any_of(val_ptr, val_ptr + feat->n_samp(), [&u_bound](double d){return !std::isfinite(d) || (std::abs(d) > u_bound);}) || (util_funcs::max_abs_val<double>(val_ptr, feat->n_samp()) < l_bound)) + // No is_const check since cube function can only be constant if feat is constant + if(std::any_of(val_ptr, val_ptr + new_feat->n_samp(), [&u_bound](double d){return !std::isfinite(d) || (std::abs(d) > u_bound);}) || (util_funcs::max_abs_val<double>(val_ptr, new_feat->n_samp()) < l_bound)) return; - feat_list.push_back(std::make_shared<NegExpNode>(feat, feat_ind)); + feat_list.push_back(new_feat); } NegExpNode::NegExpNode() @@ -33,7 +33,9 @@ NegExpNode::NegExpNode(node_ptr feat, unsigned long int feat_ind, double l_bound throw InvalidFeatureException(); set_value(); - if(is_nan() || is_const() || (util_funcs::max_abs_val<double>(value_ptr(), _n_samp) > u_bound) || (util_funcs::max_abs_val<double>(value_ptr(), _n_samp) < l_bound)) + + // No is_const check since cube function can only be constant if feat is constant + if(is_nan() || (util_funcs::max_abs_val<double>(value_ptr(), _n_samp) > u_bound) || (util_funcs::max_abs_val<double>(value_ptr(), _n_samp) < l_bound)) throw InvalidFeatureException(); } diff --git a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/neg_exp/parameterized_negative_exponential.cpp b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/neg_exp/parameterized_negative_exponential.cpp index b75078864edea758ac6800ca8d89e99aad444fec..dbf3334b8380bcc051d3fd854cb4d82b9e25cbae 100644 --- a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/neg_exp/parameterized_negative_exponential.cpp +++ b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/neg_exp/parameterized_negative_exponential.cpp @@ -14,7 +14,8 @@ void generateNegExpParamNode(std::vector<node_ptr>& feat_list, node_ptr feat, un return; new_feat->set_value(); - if(new_feat->is_nan() || new_feat->is_const() || (util_funcs::max_abs_val<double>(new_feat->value_ptr(), new_feat->n_samp()) > u_bound) || (util_funcs::max_abs_val<double>(new_feat->value_ptr(), new_feat->n_samp()) < l_bound)) + // No is_const check since cube function can only be constant if feat is constant + if(new_feat->is_nan() || (util_funcs::max_abs_val<double>(new_feat->value_ptr(), new_feat->n_samp()) > u_bound) || (util_funcs::max_abs_val<double>(new_feat->value_ptr(), new_feat->n_samp()) < l_bound)) return; feat_list.push_back(new_feat); @@ -32,7 +33,8 @@ NegExpParamNode::NegExpParamNode(node_ptr feat, unsigned long int feat_ind, doub _params.resize(n_params(), 0.0); get_parameters(optimizer); - if((std::abs(_params[0]) <= 1e-10) || is_nan() || is_const() || (util_funcs::max_abs_val<double>(value_ptr(), _n_samp) > u_bound) || (util_funcs::max_abs_val<double>(value_ptr(), _n_samp) < l_bound)) + // No is_const check since cube function can only be constant if feat is constant + if((std::abs(_params[0]) <= 1e-10) || is_nan() || (util_funcs::max_abs_val<double>(value_ptr(), _n_samp) > u_bound) || (util_funcs::max_abs_val<double>(value_ptr(), _n_samp) < l_bound)) throw InvalidFeatureException(); } diff --git a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/sin/sin.cpp b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/sin/sin.cpp index 95ae383ac0fa69bd2826ca7ef21a39a7c1e654ff..8e242afb968d3f25d3027ca6fea77fa80c0d9c1e 100644 --- a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/sin/sin.cpp +++ b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/sin/sin.cpp @@ -6,14 +6,13 @@ void generateSinNode(std::vector<node_ptr>& feat_list, node_ptr feat, unsigned l if(feat->unit() != Unit() || (feat->type() == NODE_TYPE::SIN) || (feat->type() == NODE_TYPE::COS)) return; - int offset = feat->rung() + 1; - double* val_ptr = node_value_arrs::get_value_ptr(feat_ind, feat_ind, offset); - allowed_op_funcs::sin(feat->n_samp(), feat->value_ptr(offset + 2), 1.0, 0.0, val_ptr); + node_ptr new_feat = std::make_shared<SinNode>(feat, feat_ind); + double* val_ptr = new_feat->value_ptr(); - if((util_funcs::stand_dev(val_ptr, feat->n_samp()) < 1.0e-13) || std::any_of(val_ptr, val_ptr + feat->n_samp(), [&u_bound](double d){return !std::isfinite(d) || (std::abs(d) > u_bound);}) || (util_funcs::max_abs_val<double>(val_ptr, feat->n_samp()) < l_bound)) - return; + if(new_feat->is_const() || std::any_of(val_ptr, val_ptr + new_feat->n_samp(), [&u_bound](double d){return !std::isfinite(d) || (std::abs(d) > u_bound);}) || (util_funcs::max_abs_val<double>(val_ptr, new_feat->n_samp()) < l_bound)) + return; - feat_list.push_back(std::make_shared<SinNode>(feat, feat_ind)); + feat_list.push_back(new_feat); } SinNode::SinNode() diff --git a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/six_pow/sixth_power.cpp b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/six_pow/sixth_power.cpp index 06470e3dfb5967e136c2e744f2d93802a1b3f964..5e579ad4566f6104bb3823e871eb7c0f95c66c37 100644 --- a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/six_pow/sixth_power.cpp +++ b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/six_pow/sixth_power.cpp @@ -6,14 +6,13 @@ void generateSixPowNode(std::vector<node_ptr>& feat_list, node_ptr feat, unsigne if((feat->type() == NODE_TYPE::CBRT) || (feat->type() == NODE_TYPE::SQRT) || (feat->type() == NODE_TYPE::SQ) || (feat->type() == NODE_TYPE::CB) || (feat->type() == NODE_TYPE::INV)) return; - int offset = feat->rung() + 1; - double* val_ptr = node_value_arrs::get_value_ptr(feat_ind, feat_ind, offset); - allowed_op_funcs::sixth_pow(feat->n_samp(), feat->value_ptr(offset + 2), 1.0, 0.0, val_ptr); + node_ptr new_feat = std::make_shared<SixPowNode>(feat, feat_ind); + double* val_ptr = new_feat->value_ptr(); - if((util_funcs::stand_dev(val_ptr, feat->n_samp()) < 1.0e-13) || std::any_of(val_ptr, val_ptr + feat->n_samp(), [&u_bound](double d){return (!std::isfinite(d)) || (std::abs(d) > u_bound);}) || (util_funcs::max_abs_val<double>(val_ptr, feat->n_samp()) < l_bound)) + if(new_feat->is_const() || std::any_of(val_ptr, val_ptr + new_feat->n_samp(), [&u_bound](double d){return (!std::isfinite(d)) || (std::abs(d) > u_bound);}) || (util_funcs::max_abs_val<double>(val_ptr, new_feat->n_samp()) < l_bound)) return; - feat_list.push_back(std::make_shared<SixPowNode>(feat, feat_ind)); + feat_list.push_back(new_feat); } SixPowNode::SixPowNode() diff --git a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/sq/square.cpp b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/sq/square.cpp index c7bb5616e741e33cd16b7fea4934f9b3a934c67d..9ece13f7541bfb761cee877a6da33881678e6274 100644 --- a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/sq/square.cpp +++ b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/sq/square.cpp @@ -6,14 +6,13 @@ void generateSqNode(std::vector<node_ptr>& feat_list, node_ptr feat, unsigned lo if((feat->type() == NODE_TYPE::SQRT) || (feat->type() == NODE_TYPE::INV)) return; - int offset = feat->rung() + 1; - double* val_ptr = node_value_arrs::get_value_ptr(feat_ind, feat_ind, offset); - allowed_op_funcs::sq(feat->n_samp(), feat->value_ptr(offset + 2), 1.0, 0.0, val_ptr); + node_ptr new_feat = std::make_shared<SqNode>(feat, feat_ind); + double* val_ptr = new_feat->value_ptr(); - if((util_funcs::stand_dev(val_ptr, feat->n_samp()) < 1.0e-13) || std::any_of(val_ptr, val_ptr + feat->n_samp(), [&u_bound](double d){return !std::isfinite(d) || (std::abs(d) > u_bound);}) || (util_funcs::max_abs_val<double>(val_ptr, feat->n_samp()) < l_bound)) + if(new_feat->is_const() || std::any_of(val_ptr, val_ptr + new_feat->n_samp(), [&u_bound](double d){return !std::isfinite(d) || (std::abs(d) > u_bound);}) || (util_funcs::max_abs_val<double>(val_ptr, new_feat->n_samp()) < l_bound)) return; - feat_list.push_back(std::make_shared<SqNode>(feat, feat_ind)); + feat_list.push_back(new_feat); } SqNode::SqNode() diff --git a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/sqrt/parameterized_square_root.cpp b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/sqrt/parameterized_square_root.cpp index 4253636afb2c1184fde7ee22b705316345c2315c..99f29da3779dbfbcae5aded3a06207628e1ec8ca 100644 --- a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/sqrt/parameterized_square_root.cpp +++ b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/sqrt/parameterized_square_root.cpp @@ -11,7 +11,8 @@ void generateSqrtParamNode(std::vector<node_ptr>& feat_list, node_ptr feat, unsi return; new_feat->set_value(); - if(new_feat->is_nan() || new_feat->is_const() || (util_funcs::max_abs_val<double>(new_feat->value_ptr(), new_feat->n_samp()) > u_bound) || (util_funcs::max_abs_val<double>(new_feat->value_ptr(), new_feat->n_samp()) < l_bound)) + // No is_const check since cube function can only be constant if feat is constant + if(new_feat->is_nan() || (util_funcs::max_abs_val<double>(new_feat->value_ptr(), new_feat->n_samp()) > u_bound) || (util_funcs::max_abs_val<double>(new_feat->value_ptr(), new_feat->n_samp()) < l_bound)) return; feat_list.push_back(new_feat); @@ -27,7 +28,8 @@ SqrtParamNode::SqrtParamNode(node_ptr feat, unsigned long int feat_ind, double l _params.resize(n_params(), 0.0); get_parameters(optimizer); - if((std::abs(_params[0]) <= 1e-10) || is_nan() || is_const() || (util_funcs::max_abs_val<double>(value_ptr(), _n_samp) > u_bound) || (util_funcs::max_abs_val<double>(value_ptr(), _n_samp) < l_bound)) + // No is_const check since cube function can only be constant if feat is constant + if((std::abs(_params[0]) <= 1e-10) || is_nan() || (util_funcs::max_abs_val<double>(value_ptr(), _n_samp) > u_bound) || (util_funcs::max_abs_val<double>(value_ptr(), _n_samp) < l_bound)) throw InvalidFeatureException(); } diff --git a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/sqrt/square_root.cpp b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/sqrt/square_root.cpp index d7da0473a7b43279b21b21bfbb4543ae6c2ce716..562bc6698a4126dca80748dfd4a2ee3aeb31322b 100644 --- a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/sqrt/square_root.cpp +++ b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/sqrt/square_root.cpp @@ -6,14 +6,13 @@ void generateSqrtNode(std::vector<node_ptr>& feat_list, node_ptr feat, unsigned if((feat->type() == NODE_TYPE::SQ) || (feat->type() == NODE_TYPE::CB) || (feat->type() == NODE_TYPE::SIX_POW) || (feat->type() == NODE_TYPE::CBRT) || (feat->type() == NODE_TYPE::INV)) return; - int offset = feat->rung() + 1; - double* val_ptr = node_value_arrs::get_value_ptr(feat_ind, feat_ind, offset); - allowed_op_funcs::sqrt(feat->n_samp(), feat->value_ptr(offset + 2), 1.0, 0.0, val_ptr); + node_ptr new_feat = std::make_shared<SqrtNode>(feat, feat_ind); + double* val_ptr = new_feat->value_ptr(); - if((util_funcs::stand_dev(val_ptr, feat->n_samp()) < 1.0e-13) || std::any_of(val_ptr, val_ptr + feat->n_samp(), [&u_bound](double d){return !std::isfinite(d) || (std::abs(d) > u_bound);}) || (util_funcs::max_abs_val<double>(val_ptr, feat->n_samp()) < l_bound)) + if(std::any_of(val_ptr, val_ptr + new_feat->n_samp(), [&u_bound](double d){return !std::isfinite(d) || (std::abs(d) > u_bound);}) || (util_funcs::max_abs_val<double>(val_ptr, new_feat->n_samp()) < l_bound)) return; - feat_list.push_back(std::make_shared<SqrtNode>(feat, feat_ind)); + feat_list.push_back(new_feat); } SqrtNode::SqrtNode() diff --git a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/sub/subtract.cpp b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/sub/subtract.cpp index 3d9e8c970caf49d4e2fb9fd4e85238de82381d50..61cdc8495bb4cac28ffce68f1d41bca9bedfec30 100644 --- a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/sub/subtract.cpp +++ b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/sub/subtract.cpp @@ -18,14 +18,13 @@ void generateSubNode(std::vector<node_ptr>& feat_list, node_ptr feat_1, node_ptr if((std::abs(add_sub_tot_first) > 1) && std::all_of(add_sub_leaves.begin(), add_sub_leaves.end(), [&add_sub_tot_first](auto el){return std::abs(el.second) == add_sub_tot_first;})) return; - int offset = std::max(feat_1->rung(), feat_2->rung()) + 1; - double* val_ptr = node_value_arrs::get_value_ptr(feat_ind, feat_ind, offset); - allowed_op_funcs::sub(feat_1->n_samp(), feat_1->value_ptr(offset + 2), feat_2->value_ptr(offset + 1), 1.0, 0.0, val_ptr); + node_ptr new_feat = std::make_shared<SubNode>(feat_1, feat_2, feat_ind); + double* val_ptr = new_feat->value_ptr(); - if((util_funcs::stand_dev(val_ptr, feat_1->n_samp()) < 1.0e-13) || std::any_of(val_ptr, val_ptr + feat_1->n_samp(), [&u_bound](double d){return !std::isfinite(d) || (std::abs(d) > u_bound);}) || (util_funcs::max_abs_val<double>(val_ptr, feat_1->n_samp()) < l_bound)) - return; + if(new_feat->is_const() || std::any_of(val_ptr, val_ptr + new_feat->n_samp(), [&u_bound](double d){return !std::isfinite(d) || (std::abs(d) > u_bound);}) || (util_funcs::max_abs_val<double>(val_ptr, new_feat->n_samp()) < l_bound)) + return; - feat_list.push_back(std::make_shared<SubNode>(feat_1, feat_2, feat_ind)); + feat_list.push_back(new_feat); } SubNode::SubNode() diff --git a/src/feature_creation/node/value_storage/nodes_value_containers.cpp b/src/feature_creation/node/value_storage/nodes_value_containers.cpp index e5a343a18ca931188af7248ecfacc0fa16e011ff..3bd1c88e6228b59cff52bd9d61ded90a60b7565b 100644 --- a/src/feature_creation/node/value_storage/nodes_value_containers.cpp +++ b/src/feature_creation/node/value_storage/nodes_value_containers.cpp @@ -11,14 +11,23 @@ int node_value_arrs::MAX_N_THREADS = omp_get_max_threads(); std::vector<int> node_value_arrs::TEMP_STORAGE_REG; std::vector<int> node_value_arrs::TEMP_STORAGE_TEST_REG; +std::vector<int> node_value_arrs::TASK_SZ_TRAIN; +std::vector<int> node_value_arrs::TASK_SZ_TEST; + std::vector<double> node_value_arrs::D_MATRIX; std::vector<double> node_value_arrs::VALUES_ARR; std::vector<double> node_value_arrs::TEST_VALUES_ARR; std::vector<double> node_value_arrs::TEMP_STORAGE_ARR; std::vector<double> node_value_arrs::TEMP_STORAGE_TEST_ARR; -void node_value_arrs::initialize_values_arr(int n_samples, int n_samples_test, int n_primary_feat) +void node_value_arrs::initialize_values_arr(int n_samples, int n_samples_test, int n_primary_feat, bool set_task_sz) { + if(set_task_sz) + TASK_SZ_TRAIN = {n_samples}; + + if(set_task_sz) + TASK_SZ_TEST = {n_samples_test}; + N_SAMPLES = n_samples; N_SAMPLES_TEST = n_samples_test; N_RUNGS_STORED = 0; @@ -34,6 +43,33 @@ void node_value_arrs::initialize_values_arr(int n_samples, int n_samples_test, i TEMP_STORAGE_TEST_REG = std::vector<int>(MAX_N_THREADS * (3 * N_STORE_FEATURES + 1), -1); } +void node_value_arrs::initialize_values_arr(std::vector<int> task_sz_train, std::vector<int> task_sz_test, int n_primary_feat) +{ + TASK_SZ_TRAIN = task_sz_train; + TASK_SZ_TEST = task_sz_test; + + initialize_values_arr( + std::accumulate(task_sz_train.begin(), task_sz_train.end(), 0), + std::accumulate(task_sz_test.begin(), task_sz_test.end(), 0), + n_primary_feat, + false + ); +} + +void node_value_arrs::set_task_sz_train(std::vector<int> task_sz_train) +{ + if(std::accumulate(task_sz_train.begin(), task_sz_train.end(), 0) != N_SAMPLES) + throw std::logic_error("The total number of samples has changed, task_sz_train is wrong."); + TASK_SZ_TRAIN = task_sz_train; +} + +void node_value_arrs::set_task_sz_test(std::vector<int> task_sz_test) +{ + if(std::accumulate(task_sz_test.begin(), task_sz_test.end(), 0) != N_SAMPLES_TEST) + throw std::logic_error("The total number of test samples has changed, task_sz_test is wrong."); + TASK_SZ_TEST = task_sz_test; +} + void node_value_arrs::resize_values_arr(int n_dims, int n_feat, bool use_temp) { N_RUNGS_STORED = n_dims; diff --git a/src/feature_creation/node/value_storage/nodes_value_containers.hpp b/src/feature_creation/node/value_storage/nodes_value_containers.hpp index b16ed2b9aa9ae96a7933680016c752014b807370..1e73ab5f094394fd506539a76e05de7e2b7a238f 100644 --- a/src/feature_creation/node/value_storage/nodes_value_containers.hpp +++ b/src/feature_creation/node/value_storage/nodes_value_containers.hpp @@ -12,6 +12,7 @@ #include <algorithm> #include <memory> +#include <numeric> #include <vector> #include <omp.h> @@ -28,12 +29,27 @@ namespace node_value_arrs extern std::vector<int> TEMP_STORAGE_REG; //!< Register to see which feature is stored in each slot for the training data extern std::vector<int> TEMP_STORAGE_TEST_REG; //!< Register to see which feature is stored in each slot for the test data + extern std::vector<int> TASK_SZ_TRAIN; //!< Number of training samples per task + extern std::vector<int> TASK_SZ_TEST; //!< Number of test sample per task + extern int N_SELECTED; //!< Number of features selected extern int N_SAMPLES; //!< Number of training samples for each feature extern int N_SAMPLES_TEST; //!< Number of test samples for each feature extern int N_STORE_FEATURES; //!< Number of features with stored values extern int N_RUNGS_STORED; //!< Number of rungs with values stored extern int MAX_N_THREADS; //!< Get the maximum number of threads possible + + /** + * @brief Initialize the node value arrays + * @details Using the size of the initial feature space constructor the storage arrays + * + * @param n_samples Number of training samples for each feature + * @param n_samples_test Number of test samples for each feature + * @param n_primary_feat Number of primary features + * @param set_test_task_sz If True reset the task_sz vectors + */ + void initialize_values_arr(int n_samples, int n_samples_test, int n_primary_feat, bool et_task_sz); + /** * @brief Initialize the node value arrays * @details Using the size of the initial feature space constructor the storage arrays @@ -42,7 +58,20 @@ namespace node_value_arrs * @param n_samples_test Number of test samples for each feature * @param n_primary_feat Number of primary features */ - void initialize_values_arr(int n_samples, int n_samples_test, int n_primary_feat); + inline void initialize_values_arr(int n_samples, int n_samples_test, int n_primary_feat) + { + initialize_values_arr(n_samples, n_samples_test, n_primary_feat, true); + } + + /** + * @brief Initialize the node value arrays + * @details Using the size of the initial feature space constructor the storage arrays + * + * @param task_sz_train Number of training samples per task + * @param task_sz_test Number of test sample per task + * @param n_primary_feat Number of primary features + */ + void initialize_values_arr(std::vector<int> task_sz_train, std::vector<int> task_sz_test, int n_primary_feat); /** * @brief Resize the node value arrays @@ -52,7 +81,7 @@ namespace node_value_arrs * @param n_feat number of features to store * @param use_temp If true keep the temporary_storage */ - void resize_values_arr(int n_dims, int n_feat, bool use_temp); + void resize_values_arr(int n_dims, unsigned long int n_feat, bool use_temp); /** * @brief Initialize the descriptor matrix @@ -69,6 +98,20 @@ namespace node_value_arrs */ void resize_d_matrix_arr(int n_select); + /** + * @brief Reset the global TASK_SZ_TRAIN vector + * + * @param task_sz_train the new task_sz train + */ + void set_task_sz_train(std::vector<int> task_sz_train); + + /** + * @brief Reset the global TASK_SZ_TEST vector + * + * @param task_sz_train the new test_sz train + */ + void set_task_sz_test(std::vector<int> task_sz_test); + /** * @brief Get a reference slot/feature register of the training data * diff --git a/src/mpi_interface/MPI_ops.cpp b/src/mpi_interface/MPI_ops.cpp index 5502c9d9b5a33147f9703ff7d3afccc2484aef48..00c3cbf39980f66b9d243f58280dcdbc3dbd9ac2 100644 --- a/src/mpi_interface/MPI_ops.cpp +++ b/src/mpi_interface/MPI_ops.cpp @@ -5,15 +5,25 @@ std::function<bool(double*, int, double, std::vector<node_sc_pair>&, double)> mp double mpi_reduce_op::CROSS_COR_MAX; int mpi_reduce_op::N_SIS_SELECT; -void mpi_reduce_op::set_op(double cross_cor_max, int n_sis_select) +void mpi_reduce_op::set_op(std::string project_type, double cross_cor_max, int n_sis_select) { // MPI_Op_create(*select_top_feats, 1, &top_feats) N_SIS_SELECT = n_sis_select; CROSS_COR_MAX = cross_cor_max; - if(CROSS_COR_MAX < 0.99999) - IS_VALID = comp_feats::valid_feature_against_selected_feat_sc_list; + if(project_type.compare("classification") == 0) + { + if(CROSS_COR_MAX < 0.99999) + IS_VALID = comp_feats::valid_feature_against_spearman_selected_feat; + else + IS_VALID = comp_feats::valid_feature_against_spearman_selected_max_corr_1; + } else - IS_VALID = comp_feats::valid_feature_against_selected_max_corr_1_feat_sc_list; + { + if(CROSS_COR_MAX < 0.99999) + IS_VALID = comp_feats::valid_feature_against_pearson_selected_feat; + else + IS_VALID = comp_feats::valid_feature_against_pearson_selected_max_corr_1; + } } std::vector<node_sc_pair> mpi_reduce_op::select_top_feats(std::vector<node_sc_pair> in_vec_1, std::vector<node_sc_pair> in_vec_2) diff --git a/src/mpi_interface/MPI_ops.hpp b/src/mpi_interface/MPI_ops.hpp index 7ed0410da62d5987be962b0818829f509cedd440..2875213f2550d95b40c556f088b609a48ac66a1e 100644 --- a/src/mpi_interface/MPI_ops.hpp +++ b/src/mpi_interface/MPI_ops.hpp @@ -26,7 +26,7 @@ namespace mpi_reduce_op std::vector<node_sc_pair> select_top_feats(std::vector<node_sc_pair> in_vec_1, std::vector<node_sc_pair> in_vec_2); - void set_op(double cross_cor_max, int n_sis_select); + void set_op(std::string project_type, double cross_cor_max, int n_sis_select); } #endif diff --git a/src/python/bindings_docstring_keyed.cpp b/src/python/bindings_docstring_keyed.cpp index 2666215f5d04841b1ee982429b58a2be74c879e4..675eeac52953b556448fe528708791ae43459640 100644 --- a/src/python/bindings_docstring_keyed.cpp +++ b/src/python/bindings_docstring_keyed.cpp @@ -37,8 +37,10 @@ void sisso::register_all() sisso::feature_creation::node::registerSqrtNode(); sisso::feature_creation::node::registerSixPowNode(); + void (*init_val_ar)(int, int, int) = &node_value_arrs::initialize_values_arr; + def("phi_selected_from_file", &str2node::phi_selected_from_file_py); - def("initialize_values_arr", &node_value_arrs::initialize_values_arr); + def("initialize_values_arr", init_val_ar); def("initialize_d_matrix_arr", &node_value_arrs::initialize_d_matrix_arr); diff --git a/src/python/feature_creation/FeatureSpace.cpp b/src/python/feature_creation/FeatureSpace.cpp index f68e6f2d4452de2ecf64f6a042111afeda6f6180..88baa033496624b2c4139630a2e08f0b8e952027 100644 --- a/src/python/feature_creation/FeatureSpace.cpp +++ b/src/python/feature_creation/FeatureSpace.cpp @@ -83,7 +83,6 @@ FeatureSpace::FeatureSpace( { initialize_fs(); } - FeatureSpace::FeatureSpace( std::string feature_file, py::list phi_0, @@ -111,49 +110,9 @@ FeatureSpace::FeatureSpace( _n_samp(_phi_0[0]->n_samp()), _max_param_depth(-1) { - if(_project_type.compare("regression") == 0) - { - _project = project_funcs::project_r2; - _project_no_omp = project_funcs::project_r2_no_omp; - } - else if(_project_type.compare("classification") == 0) - { - _project = project_funcs::project_classify; - _project_no_omp = project_funcs::project_classify_no_omp; - } - else if(_project_type.compare("log_regression") == 0) - { - if(_task_sizes.size() > 1) - throw std::logic_error("Log Regression can not be done using multiple tasks."); - _project = project_funcs::project_log_r2; - _project_no_omp = project_funcs::project_log_r2_no_omp; - } - else - { - throw std::logic_error("Wrong projection type passed to FeatureSpace constructor."); - } - - if(_cross_cor_max < 0.99999) - { - _is_valid = comp_feats::valid_feature_against_selected; - _is_valid_feat_list = comp_feats::valid_feature_against_selected_feat_list; - } - else - { - _is_valid = comp_feats::valid_feature_against_selected_max_corr_1; - _is_valid_feat_list = comp_feats::valid_feature_against_selected_max_corr_1_feat_list; - } - - if(_cross_cor_max < 0.99999) - { - _is_valid = comp_feats::valid_feature_against_selected; - _is_valid_feat_list = comp_feats::valid_feature_against_selected_feat_list; - } - else - { - _is_valid = comp_feats::valid_feature_against_selected_max_corr_1; - _is_valid_feat_list = comp_feats::valid_feature_against_selected_max_corr_1_feat_list; - } + project_funcs::set_project_fxn(project_type, _task_sizes.size(), _project, _project_no_omp); + comp_feats::set_is_valid_fxn(project_type, _cross_cor_max, _n_samp, _is_valid, _is_valid_feat_list); + mpi_reduce_op::set_op(_project_type, _cross_cor_max, _n_sis_select); std::vector<node_ptr> phi_temp = str2node::phi_from_file(feature_file, _phi_0); @@ -243,49 +202,9 @@ FeatureSpace::FeatureSpace( _n_samp(_phi_0[0]->n_samp()), _max_param_depth(-1) { - if(project_type.compare("regression") == 0) - { - _project = project_funcs::project_r2; - _project_no_omp = project_funcs::project_r2_no_omp; - } - else if(project_type.compare("classification") == 0) - { - _project = project_funcs::project_classify; - _project_no_omp = project_funcs::project_classify_no_omp; - } - else if(project_type.compare("log_regression") == 0) - { - if(_task_sizes.size() > 1) - throw std::logic_error("Log Regression can not be done using multiple tasks."); - _project = project_funcs::project_log_r2; - _project_no_omp = project_funcs::project_log_r2_no_omp; - } - else - { - throw std::logic_error("Wrong projection type passed to FeatureSpace constructor."); - } - - if(_cross_cor_max < 0.99999) - { - _is_valid = comp_feats::valid_feature_against_selected; - _is_valid_feat_list = comp_feats::valid_feature_against_selected_feat_list; - } - else - { - _is_valid = comp_feats::valid_feature_against_selected_max_corr_1; - _is_valid_feat_list = comp_feats::valid_feature_against_selected_max_corr_1_feat_list; - } - - if(_cross_cor_max < 0.99999) - { - _is_valid = comp_feats::valid_feature_against_selected; - _is_valid_feat_list = comp_feats::valid_feature_against_selected_feat_list; - } - else - { - _is_valid = comp_feats::valid_feature_against_selected_max_corr_1; - _is_valid_feat_list = comp_feats::valid_feature_against_selected_max_corr_1_feat_list; - } + project_funcs::set_project_fxn(project_type, _task_sizes.size(), _project, _project_no_omp); + comp_feats::set_is_valid_fxn(project_type, _cross_cor_max, _n_samp, _is_valid, _is_valid_feat_list); + mpi_reduce_op::set_op(_project_type, _cross_cor_max, _n_sis_select); std::vector<node_ptr> phi_temp = str2node::phi_from_file(feature_file, _phi_0); @@ -349,7 +268,6 @@ FeatureSpace::FeatureSpace( _scores.resize(_n_feat); } - py::list FeatureSpace::phi0_py() { py::list feat_lst; diff --git a/src/utils/compare_features.cpp b/src/utils/compare_features.cpp index 0f2cce1444971511a4a8b60d1486a7de304b7a92..1475bc2a177ae74f21c9f38f3304415f97282f6a 100644 --- a/src/utils/compare_features.cpp +++ b/src/utils/compare_features.cpp @@ -1,7 +1,50 @@ #include <utils/compare_features.hpp> std::vector<double> comp_feats::CORR_CHECK; -bool comp_feats::valid_feature_against_selected_max_corr_1(double* val_ptr, int n_samp, double cross_cor_max, std::vector<double>& scores_sel, double cur_score, int end_sel, int start_sel) +std::vector<double> comp_feats::RANK; +std::vector<int> comp_feats::INDEX; + +void comp_feats::set_is_valid_fxn( + const std::string project_type, + const double max_corr, + const int n_samp, + std::function<bool(double*, int, double, std::vector<double>&, double, int, int)>& is_valid, + std::function<bool(double*, int, double, std::vector<node_ptr>&, std::vector<double>&, double)>& is_valid_feat_list +) +{ + if(project_type.compare("classification") != 0) + { + if(max_corr < 0.99999) + { + is_valid = valid_feature_against_selected_pearson; + is_valid_feat_list = valid_feature_against_selected_pearson; + } + else + { + is_valid = valid_feature_against_selected_pearson_max_corr_1; + is_valid_feat_list = valid_feature_against_selected_pearson_max_corr_1; + } + } + else + { + // Resize the rank and index to fit all of the data for each thread with padding + RANK.resize(4 * omp_get_max_threads() * n_samp); + INDEX.resize(2 * omp_get_max_threads() * n_samp); + if(max_corr < 0.99999) + { + is_valid = valid_feature_against_selected_spearman; + is_valid_feat_list = valid_feature_against_selected_spearman; + } + else + { + is_valid = valid_feature_against_selected_spearman_max_corr_1; + is_valid_feat_list = valid_feature_against_selected_spearman_max_corr_1; + } + } +} + + +bool comp_feats::valid_feature_against_selected_pearson_max_corr_1(double* val_ptr, int n_samp, double cross_cor_max, std::vector<double>& scores_sel, double cur_score, int end_sel, int start_sel) { double base_val = util_funcs::r(val_ptr, val_ptr, n_samp); @@ -16,7 +59,7 @@ bool comp_feats::valid_feature_against_selected_max_corr_1(double* val_ptr, int return true; } -bool comp_feats::valid_feature_against_selected_max_corr_1_feat_list(double* val_ptr, int n_samp, double cross_cor_max, std::vector<node_ptr>& selected, std::vector<double>& scores_sel, double cur_score) +bool comp_feats::valid_feature_against_selected_pearson_max_corr_1(double* val_ptr, int n_samp, double cross_cor_max, std::vector<node_ptr>& selected, std::vector<double>& scores_sel, double cur_score) { double base_val = util_funcs::r(val_ptr, val_ptr, n_samp); @@ -31,8 +74,23 @@ bool comp_feats::valid_feature_against_selected_max_corr_1_feat_list(double* val return true; } +bool comp_feats::valid_feature_against_selected_pearson_max_corr_1(double* val_ptr, int n_samp, double cross_cor_max, std::vector<node_sc_pair>& out_vec, double cur_score) +{ + double base_val = util_funcs::r(val_ptr, val_ptr, n_samp); + + for(auto& feat_sc : out_vec) + { + if(abs(cur_score - std::get<1>(feat_sc)) > 1e-5) + continue; + + if((base_val - std::abs(util_funcs::r(std::get<0>(feat_sc)->value_ptr(1), val_ptr, n_samp))) < 1e-9) + return false; + } + return true; +} -bool comp_feats::valid_feature_against_selected(double* val_ptr, int n_samp, double cross_cor_max, std::vector<double>& scores_sel, double cur_score, int end_sel, int start_sel) + +bool comp_feats::valid_feature_against_selected_pearson(double* val_ptr, int n_samp, double cross_cor_max, std::vector<double>& scores_sel, double cur_score, int end_sel, int start_sel) { double base_val = util_funcs::r(val_ptr, val_ptr, n_samp); volatile bool is_valid = true; @@ -49,7 +107,7 @@ bool comp_feats::valid_feature_against_selected(double* val_ptr, int n_samp, dou return is_valid; } -bool comp_feats::valid_feature_against_selected_feat_list(double* val_ptr, int n_samp, double cross_cor_max, std::vector<node_ptr>& selected, std::vector<double>& scores_sel, double cur_score) +bool comp_feats::valid_feature_against_selected_pearson(double* val_ptr, int n_samp, double cross_cor_max, std::vector<node_ptr>& selected, std::vector<double>& scores_sel, double cur_score) { double base_val = util_funcs::r(val_ptr, val_ptr, n_samp); @@ -61,28 +119,110 @@ bool comp_feats::valid_feature_against_selected_feat_list(double* val_ptr, int n return true; } -bool comp_feats::valid_feature_against_selected_max_corr_1_feat_sc_list(double* val_ptr, int n_samp, double cross_cor_max, std::vector<node_sc_pair>& out_vec, double cur_score) +bool comp_feats::valid_feature_against_selected_pearson(double* val_ptr, int n_samp, double cross_cor_max, std::vector<node_sc_pair>& out_vec, double cur_score) { double base_val = util_funcs::r(val_ptr, val_ptr, n_samp); for(auto& feat_sc : out_vec) { - if(abs(cur_score - std::get<1>(feat_sc)) > 1e-5) + if((base_val - std::abs(util_funcs::r(std::get<0>(feat_sc)->value_ptr(1), val_ptr, n_samp))) < (1.0 - cross_cor_max + 1e-10)) + return false; + } + return true; +} + + +bool comp_feats::valid_feature_against_selected_spearman_max_corr_1(double* val_ptr, int n_samp, double cross_cor_max, std::vector<double>& scores_sel, double cur_score, int end_sel, int start_sel) +{ + double base_val = std::abs(util_funcs::spearman_r(val_ptr, val_ptr, &RANK[omp_get_thread_num() * 4 * n_samp], &RANK[(omp_get_thread_num() * 4 + 2) * n_samp], &INDEX[omp_get_thread_num() * 2 * n_samp], n_samp)); + + for(int dd = start_sel; dd < end_sel; ++dd) + { + if(abs(std::floor(cur_score) - std::floor(scores_sel[dd])) > 1e-5) continue; - if((base_val - std::abs(util_funcs::r(std::get<0>(feat_sc)->value_ptr(1), val_ptr, n_samp))) < 1e-9) + // Rank the new variable and take the Pearson correlation of the rank variables (val_ptr rank still in &RANK[(omp_get_thread_num() * 4 + 2) * n_samp]) + util_funcs::rank(node_value_arrs::get_d_matrix_ptr(dd), &RANK[omp_get_thread_num() * 4 * n_samp], &INDEX[omp_get_thread_num() * 2 * n_samp], n_samp); + if((base_val - std::abs(util_funcs::r(&RANK[omp_get_thread_num() * 4 * n_samp], &RANK[(omp_get_thread_num() * 4 + 2) * n_samp], n_samp))) < 1e-9) return false; } return true; } -bool comp_feats::valid_feature_against_selected_feat_sc_list(double* val_ptr, int n_samp, double cross_cor_max, std::vector<node_sc_pair>& out_vec, double cur_score) +bool comp_feats::valid_feature_against_selected_spearman_max_corr_1(double* val_ptr, int n_samp, double cross_cor_max, std::vector<node_ptr>& selected, std::vector<double>& scores_sel, double cur_score) { - double base_val = util_funcs::r(val_ptr, val_ptr, n_samp); + double base_val = std::abs(util_funcs::spearman_r(val_ptr, val_ptr, &RANK[omp_get_thread_num() * 4 * n_samp], &RANK[(omp_get_thread_num() * 4 + 2) * n_samp], &INDEX[omp_get_thread_num() * 2 * n_samp], n_samp)); + + for(int ff = 0; ff < selected.size(); ++ff) + { + if(abs(std::floor(cur_score) - std::floor(scores_sel[ff])) > 1e-5) + continue; + + // Rank the new variable and take the Pearson correlation of the rank variables (val_ptr rank still in &RANK[(omp_get_thread_num() * 4 + 2) * n_samp]) + util_funcs::rank(selected[ff]->value_ptr(1), &RANK[omp_get_thread_num() * 4 * n_samp], &INDEX[omp_get_thread_num() * 2 * n_samp], n_samp); + if((base_val - std::abs(util_funcs::r(&RANK[omp_get_thread_num() * 4 * n_samp], &RANK[(omp_get_thread_num() * 4 + 2) * n_samp], n_samp))) < 1e-9) + return false; + } + return true; +} + +bool comp_feats::valid_feature_against_selected_spearman_max_corr_1(double* val_ptr, int n_samp, double cross_cor_max, std::vector<node_sc_pair>& out_vec, double cur_score) +{ + double base_val = std::abs(util_funcs::spearman_r(val_ptr, val_ptr, &RANK[omp_get_thread_num() * 4 * n_samp], &RANK[(omp_get_thread_num() * 4 + 2) * n_samp], &INDEX[omp_get_thread_num() * 2 * n_samp], n_samp)); for(auto& feat_sc : out_vec) { - if((base_val - std::abs(util_funcs::r(std::get<0>(feat_sc)->value_ptr(1), val_ptr, n_samp))) < (1.0 - cross_cor_max + 1e-10)) + if(abs(std::floor(cur_score) - std::floor(std::get<1>(feat_sc))) > 1e-5) + continue; + + util_funcs::rank(std::get<0>(feat_sc)->value_ptr(1), &RANK[omp_get_thread_num() * 4 * n_samp], &INDEX[omp_get_thread_num() * 2 * n_samp], n_samp); + if((base_val - std::abs(util_funcs::r(&RANK[omp_get_thread_num() * 4 * n_samp], &RANK[(omp_get_thread_num() * 4 + 2) * n_samp], n_samp))) < cross_cor_max + 1e-9) + return false; + } + return true; +} + +bool comp_feats::valid_feature_against_selected_spearman(double* val_ptr, int n_samp, double cross_cor_max, std::vector<double>& scores_sel, double cur_score, int end_sel, int start_sel) +{ + double base_val = std::abs(util_funcs::spearman_r(val_ptr, val_ptr, &RANK[omp_get_thread_num() * 4 * n_samp], &RANK[(omp_get_thread_num() * 4 + 2) * n_samp], &INDEX[omp_get_thread_num() * 2 * n_samp], n_samp)); + volatile bool is_valid = true; + + #pragma omp parallel for schedule(dynamic) + for(int dd = start_sel; dd < end_sel; ++dd) + { + if(!is_valid) + continue; + + // Rank the new variable and take the Pearson correlation of the rank variables (val_ptr rank still in &RANK[(omp_get_thread_num() * 4 + 2) * n_samp]) + util_funcs::rank(node_value_arrs::get_d_matrix_ptr(dd), &RANK[omp_get_thread_num() * 4 * n_samp], &INDEX[omp_get_thread_num() * 2 * n_samp], n_samp); + if((base_val - std::abs(util_funcs::r(&RANK[omp_get_thread_num() * 4 * n_samp], &RANK[(omp_get_thread_num() * 4 + 2) * n_samp], n_samp))) < cross_cor_max + 1e-9) + is_valid = false; + } + return is_valid; +} + +bool comp_feats::valid_feature_against_selected_spearman(double* val_ptr, int n_samp, double cross_cor_max, std::vector<node_ptr>& selected, std::vector<double>& scores_sel, double cur_score) +{ + double base_val = std::abs(util_funcs::spearman_r(val_ptr, val_ptr, &RANK[omp_get_thread_num() * 4 * n_samp], &RANK[(omp_get_thread_num() * 4 + 2) * n_samp], &INDEX[omp_get_thread_num() * 2 * n_samp], n_samp)); + + for(auto& feat : selected) + { + // Rank the new variable and take the Pearson correlation of the rank variables (val_ptr rank still in &RANK[(omp_get_thread_num() * 4 + 2) * n_samp]) + util_funcs::rank(feat->value_ptr(1), &RANK[omp_get_thread_num() * 4 * n_samp], &INDEX[omp_get_thread_num() * 2 * n_samp], n_samp); + if((base_val - std::abs(util_funcs::r(&RANK[omp_get_thread_num() * 4 * n_samp], &RANK[(omp_get_thread_num() * 4 + 2) * n_samp], n_samp))) < cross_cor_max + 1e-9) + return false; + } + return true; +} + +bool comp_feats::valid_feature_against_selected_spearman(double* val_ptr, int n_samp, double cross_cor_max, std::vector<node_sc_pair>& out_vec, double cur_score) +{ + double base_val = std::abs(util_funcs::spearman_r(val_ptr, val_ptr, &RANK[omp_get_thread_num() * 4 * n_samp], &RANK[(omp_get_thread_num() * 4 + 2) * n_samp], &INDEX[omp_get_thread_num() * 2 * n_samp], n_samp)); + + for(auto& feat_sc : out_vec) + { + util_funcs::rank(std::get<0>(feat_sc)->value_ptr(1), &RANK[omp_get_thread_num() * 4 * n_samp], &INDEX[omp_get_thread_num() * 2 * n_samp], n_samp); + if((base_val - std::abs(util_funcs::r(&RANK[omp_get_thread_num() * 4 * n_samp], &RANK[(omp_get_thread_num() * 4 + 2) * n_samp], n_samp))) < cross_cor_max + 1e-9) return false; } return true; diff --git a/src/utils/compare_features.hpp b/src/utils/compare_features.hpp index d8963e2bb73f3cb1ba244e6166cf646e152944b4..0badade62e81b40cc32c5698ac26a7e2e6e666a1 100644 --- a/src/utils/compare_features.hpp +++ b/src/utils/compare_features.hpp @@ -16,8 +16,28 @@ typedef std::tuple<node_ptr, double> node_sc_pair; namespace comp_feats { extern std::vector<double> CORR_CHECK; //!< vector for storing cross_corelation values + extern std::vector<double> RANK; //!< Global variable used to store the rank variables for Spearman correlation + extern std::vector<int> INDEX; //!< Global variable used to store the sorting indexes for Spearman correlation + + /** + * @brief Set the is_valid and is_valid_feat_list functions for SIS + * + * @param project_type Type of projection to perform + * @param max_corr Maximum cross-correlation value + @param n_samp number of samples in the training set + * @param is_valid The _is_valid function from the feature_space + * @param is_valid_feat_list The _is_valid_feat_list function from the feature_space + */ + void set_is_valid_fxn( + const std::string project_type, + const double max_corr, + const int n_samp, + std::function<bool(double*, int, double, std::vector<double>&, double, int, int)>& is_valid, + std::function<bool(double*, int, double, std::vector<node_ptr>&, std::vector<double>&, double)>& is_valid_feat_list + ); + /** - * @brief Checks the feature to see if it is still valid against previously selected features + * @brief Checks the feature to see if it is still valid against previously selected features (using the Pearson correlation coefficient with a maximum cross-correlation of 1.0) * * @param val_ptr pointer to value array of the current feature * @param scores_sel the scores of the previously selected features @@ -27,10 +47,10 @@ namespace comp_feats * * @return True if the feature is still valid */ - bool valid_feature_against_selected_max_corr_1(double* val_ptr, int n_samp, double cross_cor_max, std::vector<double>& scores_sel, double cur_score, int end_sel, int start_sel = 0); + bool valid_feature_against_selected_pearson_max_corr_1(double* val_ptr, int n_samp, double cross_cor_max, std::vector<double>& scores_sel, double cur_score, int end_sel, int start_sel = 0); /** - * @brief Checks the feature to see if it is still valid against previously selected features + * @brief Checks the feature to see if it is still valid against previously selected features (using the Pearson correlation coefficient with a maximum cross-correlation of 1.0) * * @param val_ptr pointer to value array of the current feature * @param selected list of previously selected features @@ -39,10 +59,24 @@ namespace comp_feats * * @return True if the feature is still valid */ - bool valid_feature_against_selected_max_corr_1_feat_list(double* val_ptr, int n_samp, double cross_cor_max, std::vector<node_ptr>& selected, std::vector<double>& scores_sel, double cur_score); + bool valid_feature_against_selected_pearson_max_corr_1(double* val_ptr, int n_samp, double cross_cor_max, std::vector<node_ptr>& selected, std::vector<double>& scores_sel, double cur_score); + + /** + * @brief Checks the feature to see if it is still valid against previously selected features (using the Pearson correlation coefficient with a maximum cross-correlation of 1.0) + * + * @param val_ptr pointer to value array of the current feature + * @param n_samp number of samples per feature + * @param cross_cor_max maximum cross correlation for the featues + * @param start pointer to the first feature to compare against + * @param end pointer to the final feature to compare against + * @param cur_score The score of the current candidate feature + * + * @return True if the feature is still valid + */ + bool valid_feature_against_selected_pearson_max_corr_1(double* val_ptr, int n_samp, double cross_cor_max, std::vector<node_sc_pair>& out_vec, double cur_score); /** - * @brief Checks the feature to see if it is still valid against previously selected features + * @brief Checks the feature to see if it is still valid against previously selected features (using the Pearson correlation coefficient) * * @param val_ptr pointer to value array of the current feature * @param scores_sel the scores of the previously selected features (not used, but passed to keep consistency) @@ -52,10 +86,10 @@ namespace comp_feats * * @return True if the feature is still valid */ - bool valid_feature_against_selected(double* val_ptr, int n_samp, double cross_cor_max, std::vector<double>& scores_sel, double cur_score, int end_sel, int start_sel = 0); + bool valid_feature_against_selected_pearson(double* val_ptr, int n_samp, double cross_cor_max, std::vector<double>& scores_sel, double cur_score, int end_sel, int start_sel = 0); /** - * @brief Checks the feature to see if it is still valid against previously selected features + * @brief Checks the feature to see if it is still valid against previously selected features (using the Pearson correlation coefficient) * * @param val_ptr pointer to value array of the current feature * @param selected list of previously selected features @@ -64,37 +98,99 @@ namespace comp_feats * * @return True if the feature is still valid */ - bool valid_feature_against_selected_feat_list(double* val_ptr, int n_samp, double cross_cor_max, std::vector<node_ptr>& selected, std::vector<double>& scores_sel, double cur_score); + bool valid_feature_against_selected_pearson(double* val_ptr, int n_samp, double cross_cor_max, std::vector<node_ptr>& selected, std::vector<double>& scores_sel, double cur_score); /** - * @brief Checks the feature to see if it is still valid against previously selected features + * @brief Checks the feature to see if it is still valid against previously selected features (using the Pearson correlation coefficient) * * @param val_ptr pointer to value array of the current feature * @param n_samp number of samples per feature * @param cross_cor_max maximum cross correlation for the featues * @param start pointer to the first feature to compare against * @param end pointer to the final feature to compare against + * @param cur_score The score of the current candidate feature (not used, but passed to keep consistency) + * + * @return True if the feature is still valid + */ + bool valid_feature_against_selected_pearson(double* val_ptr, int n_samp, double cross_cor_max, std::vector<node_sc_pair>& out_vec, double cur_score); + + /** + * @brief Checks the feature to see if it is still valid against previously selected features (using the rank correlation coefficient with a maximum cross-correlation of 1.0) + * + * @param val_ptr pointer to value array of the current feature + * @param scores_sel the scores of the previously selected features * @param cur_score The score of the current candidate feature + * @param end_sel index of the feature to stop checking + * @param start_sel index of the feature to stop checking * * @return True if the feature is still valid */ - bool valid_feature_against_selected_max_corr_1_feat_sc_list(double* val_ptr, int n_samp, double cross_cor_max, std::vector<node_sc_pair>& out_vec, double cur_score); + bool valid_feature_against_selected_spearman_max_corr_1(double* val_ptr, int n_samp, double cross_cor_max, std::vector<double>& scores_sel, double cur_score, int end_sel, int start_sel = 0); /** - * @brief Checks the feature to see if it is still valid against previously selected features + * @brief Checks the feature to see if it is still valid against previously selected features (using the rank correlation coefficient with a maximum cross-correlation of 1.0) + * + * @param val_ptr pointer to value array of the current feature + * @param selected list of previously selected features + * @param scores_sel the scores of the previously selected features + * @param cur_score The score of the current candidate feature + * + * @return True if the feature is still valid + */ + bool valid_feature_against_selected_spearman_max_corr_1(double* val_ptr, int n_samp, double cross_cor_max, std::vector<node_ptr>& selected, std::vector<double>& scores_sel, double cur_score); + + /** + * @brief Checks the feature to see if it is still valid against previously selected features (using the rank correlation coefficient with a maximum cross-correlation of 1.0) * * @param val_ptr pointer to value array of the current feature * @param n_samp number of samples per feature * @param cross_cor_max maximum cross correlation for the featues * @param start pointer to the first feature to compare against * @param end pointer to the final feature to compare against + * @param cur_score The score of the current candidate feature + * + * @return True if the feature is still valid + */ + bool valid_feature_against_selected_spearman_max_corr_1(double* val_ptr, int n_samp, double cross_cor_max, std::vector<node_sc_pair>& out_vec, double cur_score); + + /** + * @brief Checks the feature to see if it is still valid against previously selected features (using the rank correlation coefficient) + * + * @param val_ptr pointer to value array of the current feature + * @param scores_sel the scores of the previously selected features (not used, but passed to keep consistency) * @param cur_score The score of the current candidate feature (not used, but passed to keep consistency) + * @param end_sel index of the feature to stop checking + * @param start_sel index of the feature to stop checking * * @return True if the feature is still valid */ - bool valid_feature_against_selected_feat_sc_list(double* val_ptr, int n_samp, double cross_cor_max, std::vector<node_sc_pair>& out_vec, double cur_score); + bool valid_feature_against_selected_spearman(double* val_ptr, int n_samp, double cross_cor_max, std::vector<double>& scores_sel, double cur_score, int end_sel, int start_sel = 0); + /** + * @brief Checks the feature to see if it is still valid against previously selected features (using the rank correlation coefficient) + * + * @param val_ptr pointer to value array of the current feature + * @param selected list of previously selected features + * @param scores_sel the scores of the previously selected features (not used, but passed to keep consistency) + * @param cur_score The score of the current candidate feature (not used, but passed to keep consistency) + * + * @return True if the feature is still valid + */ + bool valid_feature_against_selected_spearman(double* val_ptr, int n_samp, double cross_cor_max, std::vector<node_ptr>& selected, std::vector<double>& scores_sel, double cur_score); + /** + * @brief Checks the feature to see if it is still valid against previously selected features (using the rank correlation coefficient) + * + * @param val_ptr pointer to value array of the current feature + * @param n_samp number of samples per feature + * @param cross_cor_max maximum cross correlation for the featues + * @param start pointer to the first feature to compare against + * @param end pointer to the final feature to compare against + * @param cur_score The score of the current candidate feature (not used, but passed to keep consistency) + * + * @return True if the feature is still valid + */ + bool valid_feature_against_selected_spearman(double* val_ptr, int n_samp, double cross_cor_max, std::vector<node_sc_pair>& out_vec, double cur_score); } #endif diff --git a/src/utils/math_funcs.cpp b/src/utils/math_funcs.cpp index c8e02a275b1cdaeadf5e70c9926e2cd109eae985..4d4b231b99858d3a477379bbcf5e1f0eba73fd6e 100644 --- a/src/utils/math_funcs.cpp +++ b/src/utils/math_funcs.cpp @@ -118,3 +118,53 @@ std::vector<int> util_funcs::argsort(std::vector<int>& vec) return index; } + +void util_funcs::rank(double* a, double* rank, int* index, int size) +{ + std::iota(index, index + size, 0); + std::sort(index, index + size, [a](int i1, int i2){return a[i1] < a[i2];}); + int ii = 1; + int ii_start = 0; + while(ii_start < size) + { + // while((ii < size) && (round(a[index[ii]], 10) == round(a[index[ii - 1]], 10))) + while((ii < size) && (std::abs(a[index[ii]] - a[index[ii - 1]]) < 1e-10 * std::pow(10.0, std::floor(std::log10(a[index[ii]]))))) + ++ii; + + for(int jj = ii_start; jj < ii; ++jj) + rank[index[jj]] = static_cast<double>(ii + ii_start - 1) / 2.0 + 1.0; + ii_start = ii; + ++ii; + } +} + +double util_funcs::spearman_r(double* a, double* b, double* rank_a, double* rank_b, int* index, int size) +{ + rank(a, rank_a, index, size); + rank(b, rank_b, index, size); + return r(rank_a, rank_b, size); +} + +double util_funcs::spearman_r(double* a, double* b, double* rank_a, double* rank_b, int* index, std::vector<int>& sizes) +{ + double result = 0.0; + int pos = 0; + for(auto& sz : sizes) + { + result += spearman_r(a + pos, b + pos, rank_a, rank_b, index, sz); + pos += sz; + } + return result / static_cast<double>(sizes.size()); +} + +double util_funcs::spearman_r(double* a, double* b, double* rank_a, double* rank_b, int* index, int* sz, int n_tasks) +{ + double result = 0.0; + int pos = 0; + for(int nt = 0; nt < n_tasks; ++nt) + { + result += spearman_r(a + pos, b + pos, rank_a, rank_b, index, sz[nt]); + pos += sz[nt]; + } + return result / static_cast<double>(n_tasks); +} diff --git a/src/utils/math_funcs.hpp b/src/utils/math_funcs.hpp index 9811767c733d14c83fe1f11d684adc706a65e7da..2368121eeb4611da7ecefd0b4d24809d116c5f00 100644 --- a/src/utils/math_funcs.hpp +++ b/src/utils/math_funcs.hpp @@ -21,6 +21,12 @@ namespace util_funcs template<typename T> inline T sign(T number){return (number > T(0)) - (number < T(0));}; + inline double round(double num, int n) + { + double exp = std::floor(std::log10(num)); + return std::pow(10.0, exp - static_cast<double>(n)) * std::round(num * std::pow(10.0, static_cast<double>(n) - exp)); + } + /** * @brief Find the mean of of a vector * @@ -197,6 +203,7 @@ namespace util_funcs * * @param a the pointer to the head of the first vector * @param b the pointer to the head of the second vector + * @param log_a the pointer to the head of the vector used to store the log_transformed a value * @param size the size of the vector * @return The Coefficient of Determination */ @@ -207,6 +214,7 @@ namespace util_funcs * * @param a the pointer to the head of the first vector * @param b the pointer to the head of the second vector + * @param log_a the pointer to the head of the vector used to store the log_transformed a value * @param sizes the sizes of the tasks to calculate the correlation on * @return The average Coefficient of Determination */ @@ -217,12 +225,63 @@ namespace util_funcs * * @param a the pointer to the head of the first vector * @param b the pointer to the head of the second vector + * @param log_a the pointer to the head of the vector used to store the log_transformed a value * @param sz the start of vector that describes the sizes of the tasks to calculate the correlation on * @param n_tasks number of tasks to average over * @return The average Coefficient of Determination */ double log_r2(double* a, double* b, double* log_a, const int* sz, int n_tasks); + /** + * @brief Gets the rank variables for a vector + * + * @param a pointer to the head of the vector to find the rank of + * @param rank pointer to the head of the vector to store the resulting ranks + * @param index pointer to vector used store the sorted indexes + * @param sz The size of the vector + */ + void rank(double* a, double* rank, int* index, int size); + + /** + * @brief Calculate the Spearman's rank correlation coefficient between two vectors + * + * @param a the pointer to the head of the first vector + * @param b the pointer to the head of the second vector + * @param rank_a the pointer to the head of the vector used to rank of a + * @param rank_b the pointer to the head of the vector used to rank of b + * @param index the pointer used to store the sorted indexes + * @param size the size of the vector + * @return The Coefficient of Determination + */ + double spearman_r(double* a, double* b, double* rank_a, double* rank_b, int* index, int size); + + /** + * @brief Calculate the average Spearman's rank correlation coefficient between two vectors + * + * @param a the pointer to the head of the first vector + * @param b the pointer to the head of the second vector + * @param rank_a the pointer to the head of the vector used to rank of a + * @param rank_b the pointer to the head of the vector used to rank of b + * @param index the pointer used to store the sorted indexes + * @param sizes the sizes of the tasks to calculate the correlation on + * @return The average Coefficient of Determination + */ + double spearman_r(double* a, double* b, double* rank_a, double* rank_b, int* index, std::vector<int>& sizes); + + /** + * @brief Calculate the average Spearman's rank correlation coefficient between two vectors + * + * @param a the pointer to the head of the first vector + * @param b the pointer to the head of the second vector + * @param rank_a the pointer to the head of the vector used to rank of a + * @param rank_b the pointer to the head of the vector used to rank of b + * @param index the pointer used to store the sorted indexes + * @param sz the start of vector that describes the sizes of the tasks to calculate the correlation on + * @param n_tasks number of tasks to average over + * @return The average Coefficient of Determination + */ + double spearman_r(double* a, double* b, double* rank_a, double* rank_b, int* index, int* sz, int n_tasks); + /** * @brief Sort a vector and return the indexes of the unsorted array that corresponds to the sorted one * diff --git a/src/utils/project.cpp b/src/utils/project.cpp index 048b5c5b23913fcd67d9c36a7608944196ed1d7f..8efa8739001702adcd0d2ed23f312d0d19a8641c 100644 --- a/src/utils/project.cpp +++ b/src/utils/project.cpp @@ -1,5 +1,34 @@ #include <utils/project.hpp> +void project_funcs::set_project_fxn( + const std::string project_type, + const int n_task, + std::function<void(double*, double*, std::vector<node_ptr>&, std::vector<int>&, int)>& project, + std::function<void(double*, double*, std::vector<node_ptr>&, std::vector<int>&, int)>& project_no_omp +) +{ + if(project_type.compare("regression") == 0) + { + project = project_r2; + project_no_omp = project_r2_no_omp; + } + else if(project_type.compare("classification") == 0) + { + project = project_classify; + project_no_omp = project_classify_no_omp; + } + else if(project_type.compare("log_regression") == 0) + { + if(n_task > 1) + throw std::logic_error("Log Regression can not be done using multiple tasks."); + project = project_log_r2; + project_no_omp = project_log_r2_no_omp; + } + else + throw std::logic_error("Wrong projection type passed to FeatureSpace constructor."); + +} + void project_funcs::project_r(double* prop, double* scores, std::vector<node_ptr>& phi, const std::vector<int>& sizes, int n_prop) { int n_samp = std::accumulate(sizes.begin(), sizes.end(), 0); @@ -83,7 +112,6 @@ void project_funcs::project_classify(double* prop, double* scores, std::vector<n #pragma omp barrier } std::transform(scores + start, scores + end, scores + start, [](double score){return std::isnan(score) ? std::numeric_limits<double>::infinity() : score;}); - } } diff --git a/src/utils/project.hpp b/src/utils/project.hpp index afa5038d7e966545f3114f7d64e830f053591b71..24f7afc922cd442ab6e2904d1c2f6b914028bbc5 100644 --- a/src/utils/project.hpp +++ b/src/utils/project.hpp @@ -17,6 +17,20 @@ namespace project_funcs { + /** + * @brief Set the project functions for SIS + * + * @param project_type Type of projection to perform + * @param poroject The _poroject function from the feature_space + * @param project_no_omp The _project_no_omp function from the feature_space + */ + void set_project_fxn( + const std::string project_type, + const int n_task, + std::function<void(double*, double*, std::vector<node_ptr>&, std::vector<int>&, int)>& project, + std::function<void(double*, double*, std::vector<node_ptr>&, std::vector<int>&, int)>& project_no_omp + ); + /** * @brief Calculate the projection scores of a set of features to a vector via Pearson correlation * diff --git a/tests/exec_test/classification/data.csv b/tests/exec_test/classification/data.csv index 6c0ead3c18944a69d6573acb817c7ae51f9f76f8..711bcdb56da1d6e97e6645b854912d9574b7d4f8 100644 --- a/tests/exec_test/classification/data.csv +++ b/tests/exec_test/classification/data.csv @@ -1,101 +1,101 @@ index,prop,A,B,C,D,E,F,G,H,I,J -0,1.0,0.01,-0.01,10.0,10.0,-0.49282517909827384,0.17302597769416206,0.598942935224295,-0.2987544751968252,-0.5812549090102688,-0.11065649421055634 -1,1.0,-1.8944281037421362,-1.3199613439800735,0.9557138968762431,0.4781172014274879,0.7775861911003294,0.6553697167785566,0.17491417142796584,-0.2884988775306041,-0.04531653614948916,-0.6065861937524113 -2,1.0,-1.4746015071142384,-1.2261496452343335,0.33014029248479626,-0.38950549868991224,0.13893526582480842,-0.8718692821675553,0.3747246204870105,0.1641859118951301,0.29356070144371693,0.28560894022002103 -3,1.0,-1.3021341433673468,-1.8262126241881185,-0.71381302228685,0.9687695850076805,0.6831481792028635,0.7991250925387956,0.30947917352650367,0.7280520310034677,0.7254955809948858,-0.6765763028042482 -4,1.0,-1.7393863226933401,-1.583498665054882,-0.06778341709581581,0.22998854989132322,0.5824275980446467,0.2619471491848253,-0.31573435079735024,0.6117812214426803,-0.18705821696723812,0.8717643476903345 -5,1.0,-1.5666089663239755,-1.058618149021826,-0.21280098230276434,0.9154597761466068,-0.2634655525918126,-0.6661264959880135,-0.19502899649000716,-0.23717005768011568,-0.9333588585968833,-0.19726273171241027 -6,1.0,-1.5534087615389538,-1.2520923128583763,0.9725857879017872,-0.8725028871856755,0.7291109108144516,0.26524478721086564,-0.7269009736244325,0.2486261701079393,0.8090043968802652,-0.27849406478047856 -7,1.0,-1.5462532513644658,-1.8123888845081852,0.4316458762210631,-0.5952232732793832,0.9535570697581124,-0.22291521912156265,0.25767093907617356,0.7289278790983178,-0.5797830554176866,-0.17996031942809454 -8,1.0,-1.1273555452403516,-1.6926149744472814,0.8270911990526928,0.20878148291075949,-0.5733171873615286,-0.004887589213519616,0.3419187160346375,-0.2929016713562016,-0.34531700628784034,0.7669205476300731 -9,1.0,-1.3536783481588408,-1.381410564729617,0.023965203621815423,-0.8054125694184839,0.8068101397184948,-0.8065762631278186,-0.3927997785617203,-0.4638193637740795,0.16095248005356044,-0.5534197471316085 -10,1.0,-1.1785315188879562,-1.2770582929850374,0.916015229666356,0.027594664515699696,-0.8295070079776354,-0.7000636893272012,-0.7156014565887143,-0.4397991651435269,-0.4872412204948866,0.24527926705612058 -11,1.0,-1.1754704976687549,-1.0561328124666542,0.4045577188977567,0.17599736106236108,0.22738373082247842,0.7742546525779774,-0.6160809969176364,-0.948639007451084,0.9540764333752245,-0.49710200117233927 -12,1.0,-1.6727791503394291,-1.8619023988358836,0.9067088448860641,-0.5927370303736976,0.8852298038909494,0.8220692972419072,0.20436748556299245,0.24610324883504986,-0.079476866422163,-0.24400699558343364 -13,1.0,-1.9632616543888368,-1.316804580896929,-0.2845703941884137,-0.19868606157423807,-0.1688976095411121,-0.9293925911269099,0.26589905967191263,-0.8287276425847814,-0.42745309747416815,-0.7383680916088831 -14,1.0,-1.7949776980848098,-1.1394821735708154,0.011556817105956574,0.49940831475722924,0.25316342477447806,0.017645446880421023,0.40173516709526447,-0.6502876172985015,-0.262217482830833,-0.5101021201305884 -15,1.0,-1.079571205362615,-1.9324595507799076,-0.5201611742812009,-0.3924784593473962,0.14749583956586831,0.7808796064740751,-0.28104468779917324,-0.14803690813578552,-0.2088104722240931,0.2789619297181276 -16,1.0,-1.5255514503750653,-1.7245520919673556,0.562803219191695,0.3236760616369958,0.49073713641037187,-0.9547619269949603,0.028603504036769145,-0.2462952196505066,-0.29773629359773923,-0.6324738309576534 -17,1.0,-1.2914230950731502,-1.950696152621199,0.8596694610541045,0.4857728192540889,0.26888359882500934,0.253553321699552,-0.04574308756939516,0.6679340327824896,0.3085919639199468,0.7710843014640274 -18,1.0,-1.234047871210012,-1.6817351928784738,-0.11887110046241278,0.15900093776813162,0.29854288417560015,-0.20382920533253834,-0.6379456952513525,0.6580496908109092,0.9495295891340081,-0.577812553880056 -19,1.0,-1.1351305002955077,-1.3119036669604036,0.7403741109304496,0.558782660077505,-0.0960521263545826,0.5291198173605371,0.37219060477014443,0.6886564662536832,-0.8194331653155049,-0.12814415930810963 -20,1.0,-0.01,0.01,-10.0,-10.0,0.7852373497328908,-0.38721773049540054,-0.9424092188994484,0.16080657729767522,-0.7239699836619722,-0.4526501344158229 -21,1.0,1.1507658618080976,1.726050539272399,-0.9229073425132097,0.46575138421963236,-0.81727500527083,-0.18247264092662796,0.8875260706203563,0.11159288397848788,0.3864350788801618,-0.44001721122127235 -22,1.0,1.9038976822470102,1.7188075931659075,0.2803397954645097,0.379365407838544,0.6348430081926235,0.37175391878083874,-0.6116533053698634,0.7325679278741848,0.85803611350317,-0.5779734417084115 -23,1.0,1.777519764523806,1.2869705037057755,0.22285089894507704,-0.4240570888282873,-0.27619426781835954,0.6169416676806943,-0.696779972923147,0.23612770730498034,0.7607058897805274,0.34004139732032956 -24,1.0,1.6531432749387367,1.162828102113115,-0.3395011973829445,0.8985295913658116,-0.8815382282315818,0.09072882666430049,-0.8583958707809345,0.9072723315158959,0.16053173561906742,0.6789118117688409 -25,1.0,1.309551805582044,1.3682775573764787,-0.4445767545785626,-0.7887117451257197,0.2466257730701833,-0.6634740188183126,-0.4463555520604636,0.7503127731346337,-0.9895952297013597,-0.1501201098407059 -26,1.0,1.4492443117189315,1.4032886458116898,-0.38867957733440184,-0.7081934507919516,0.8503100848003078,-0.7672563385316118,0.37050931732919423,0.38735492101575075,0.5331603211649865,0.14939021245513073 -27,1.0,1.613625013917531,1.054483144145669,-0.35964468015596895,0.6825554041477278,-0.5319540093654402,0.9341016895908625,-0.7360028401883201,-0.2996029145405299,0.3511255963558182,-0.1878428846692788 -28,1.0,1.0243392720597995,1.9105960212113278,-0.24940915747071712,-0.13707002438664384,-0.707128531134104,0.9449320492342947,0.43123336605298723,0.44954399095926245,0.9129019322800267,0.7739461096382698 -29,1.0,1.9944467859460666,1.6720498444130638,0.9353026424804634,0.8337209665238072,0.25416795671734294,-0.007922712021390721,-0.11477629537676681,-0.2760428960022421,-0.8130984031254187,0.3419220522124746 -30,1.0,1.4011033028792645,1.109011516195995,0.4257167722550872,0.5441748037327634,0.492863854358204,-0.04758979171716571,-0.7438407906336721,0.5252894890604114,0.8296117155449363,0.01519322184552152 -31,1.0,1.9499509062547116,1.0572741079996884,0.12665368551441047,0.469705238170149,0.8744362482730081,-0.7595711754681347,0.31023073561926484,-0.8034208437448502,-0.4624310824864766,-0.40716588675912946 -32,1.0,1.472646250429945,1.1891364327906537,-0.7313930180310388,0.46110222460300854,-0.8845283918853222,-0.41989394484079834,0.6475182143890668,0.6881263264084854,0.7546563716916033,0.1168819230678162 -33,1.0,1.4590167720750702,1.1702436403729433,-0.5274334249471311,-0.5987746978086463,0.11319779160167642,-0.50528865259863,0.11757211428893855,0.590400320594326,-0.15515938676912566,0.3548279774131968 -34,1.0,1.3204274424304074,1.198019529303835,-0.8186266224051648,-0.0290085645105993,0.9523159683784683,0.8174957842139241,0.18222455484504252,-0.013773043646529981,-0.2627319529358798,-0.8595305628086736 -35,1.0,1.8813823797628926,1.0367008183967852,0.3052186880166259,-0.06088564766002724,-0.38790344660551357,-0.10806404273546488,-0.9629804050096822,-0.42428960420351114,-0.2534422930772855,0.30963736841129674 -36,1.0,1.9986688782460984,1.3690925712861786,0.5431284474003899,0.5913724730408367,-0.8353670866934573,0.19831525342250655,-0.18143473978380187,0.6364291057549478,0.42062864699233105,0.9901223646646209 -37,1.0,1.504558184990442,1.190949743496727,-0.6532636073327616,0.35935945086837595,0.30131719114182043,0.6495817943565889,0.9422689556330865,0.8846598944893771,-0.4731712393443981,0.039635066570717026 -38,1.0,1.008333615471538,1.981506300008274,-0.8123524571767606,0.2197661015909831,-0.6502106779028902,0.4236216902915564,-0.5886509927579104,0.061487886019890414,-0.2377374740160869,0.6412843473808252 -39,1.0,1.6017918572461922,1.1250859962714062,-0.8298193869407411,-0.3451046875738015,0.48516607054511907,-0.2588397274480556,-0.9206152083268813,0.27549821587142675,-0.629075534110342,-0.6425278879606868 -40,0.0,0.01,0.01,10.0,-10.0,0.04151985651136059,0.2330346162909498,-0.49723324619118703,-0.023544587617094992,-0.41854083777000306,-0.5502339327925116 -41,0.0,-1.0914757437035456,1.704187017012845,-0.4803164747027948,-0.7537847103406319,-0.6132342356169982,0.16795557366247427,0.45563663131504173,-0.3802276359532064,0.48021383007368956,-0.45367492988510794 -42,0.0,-1.9425392252914977,1.5931139414465358,0.31009805091338705,-0.835007082906627,0.40758014085085303,0.5569242475965532,-0.38861660463934644,0.6021510475141199,-0.9843221980987535,-0.9963328889833365 -43,0.0,-1.403024210449149,1.0504137974303827,-0.8984564534469641,-0.7970299242453491,0.47491891024478017,0.48019322053841695,-0.7508561635586863,-0.043960372032017636,-0.24265139180566164,-0.10923906105400616 -44,0.0,-1.45810616907354,1.084683264970634,0.5713295229340183,-0.6773798263796229,0.09839698468883196,-0.9615991701040352,-0.7539225919221575,0.3614358912575588,-0.6380304554939824,0.40434902484390767 -45,0.0,-1.6042143290163837,1.5773097324751753,0.40243320555526796,-0.06801187450077983,-0.3730896611520318,0.23970878487105018,0.41645110664336094,-0.5059916627143299,-0.8866903480674095,0.3036452361644302 -46,0.0,-1.5486866135010242,1.3288318457670756,-0.17492524550976607,0.05033039145153584,-0.38867679574193215,-0.7230760497855304,0.6460761077249644,-0.0010558990958802195,0.4919287207437726,-0.647995101369186 -47,0.0,-1.8920756792534963,1.7657625846115277,0.28941076121752496,0.4856651089687205,0.33868477386080054,0.3743195814396485,-0.10547901462716669,0.004520417892417861,0.2228622619759395,0.23538363683763985 -48,0.0,-1.5144292231365304,1.6984040931515498,-0.19904456301740736,-0.8558041127811826,0.9475720005649064,0.6549395628101518,0.802084131057488,0.010033694468233323,0.44976636625057376,0.11997413461843287 -49,0.0,-1.334691441718412,1.8012484689328736,-0.34672956898995055,-0.614828863660672,0.5781503720015266,-0.6973564899083871,-0.27249617742754695,-0.3266795053631859,0.40318590749462313,0.6598349869723568 -50,0.0,-1.392160865916832,1.96030807097305,-0.4709187754332349,-0.16596517376787534,-0.11837327580213919,-0.8046715422993092,-0.27309628387497664,0.9646762900347077,-0.2407860162851745,0.7810927507182175 -51,0.0,-1.1081877449652693,1.1321805921252017,-0.5463150777240524,-0.26339780806113056,0.0734161121606478,-0.5615845135833513,-0.003812545601593831,-0.06790170865979794,-0.7973376248924131,-0.5024942886762793 -52,0.0,-1.1273342237834545,1.222900933909083,-0.23961815168048672,0.2812826831120643,-0.12225333824316409,-0.4163409124224706,-0.3029448237633121,0.9506971678575753,0.08477434826975472,0.24564363747814055 -53,0.0,-1.5450458531844702,1.4646555655585867,0.6724652616073976,0.9636771128762993,-0.7328669447410141,0.26987900702231227,-0.7341217639847928,-0.1847500436486904,0.49478360423045675,-0.5634696889084065 -54,0.0,-1.6972898977881243,1.9342793806461098,0.916674666213795,0.7441006696135166,-0.5363256808793411,0.7453493138967058,-0.6084949711216283,-0.036147807131094334,0.7300972119817077,-0.9860206879212554 -55,0.0,-1.467166856883277,1.9195073363935855,-0.04001237513761069,0.2482575243891476,-0.795936343325832,-0.755933622220192,0.6649430625674231,-0.5608250699419657,-0.987328335835364,0.009181823833889657 -56,0.0,-1.507858084142104,1.11065681931139,-0.7565527152681395,-0.43396597947565385,-0.9258208007633866,0.6212043805382641,-0.7253554358023508,-0.08719504527829058,0.5000400077995837,-0.3510240708674768 -57,0.0,-1.793339477832935,1.6461561157023556,0.5936703687181848,0.7412541556633099,-0.835056311664806,-0.12828334096535055,0.7957690701135833,0.3380628722493768,0.9616102822792876,-0.5197559610490992 -58,0.0,-1.685623286883061,1.7913664511633125,-0.9177920046292005,-0.2248076520670288,0.7511725309540487,0.7449254977655742,0.05482138754018062,-0.26814612271904337,-0.373795753322288,-0.0236199006955784 -59,0.0,-1.7032511687316396,1.561738983983668,0.937331444475048,-0.18914659666867584,0.7267575281390293,0.571196020214809,0.1504784966595285,0.7163709047538911,0.6459479363917942,-0.09651249984138066 -60,0.0,-0.01,-0.01,-10.0,10.0,0.3037482340767377,0.0946840691842421,0.8466519087621067,0.5057109910970319,-0.6648466204250763,-0.7229347856701709 -61,0.0,1.517475034607442,-1.5797683396912157,-0.31385345647165575,-0.6706416904370422,0.33748118903604074,-0.6950596675808771,0.38251266476664836,-0.7540512945658595,-0.5409128937716641,-0.15273659248128935 -62,0.0,1.3672941639996612,-1.5494260699524456,0.7462797650357975,0.3206679093982656,0.0757122783161257,0.5570890283268028,-0.31445996245727414,-0.09117939535299069,-0.7125726183527377,-0.8625237702649187 -63,0.0,1.8755185956540283,-1.0124502444775816,0.9616342423045714,0.9990251718017698,0.4285764726207524,0.7902542298430564,-0.16273214801418256,0.05710841557502144,0.09962536752119067,-0.4177957372666703 -64,0.0,1.8407338686869008,-1.5868070635995237,-0.29373799492321306,-0.6546037139247634,-0.15830470325220958,-0.45061718235930015,0.10621728605636549,-0.25016507950845557,-0.5988943508598357,-0.8603824760047425 -65,0.0,1.4799923864034554,-1.6886196544558572,0.6613747096357248,0.3354136960485343,0.29540846912662677,-0.3407250803665458,0.6119612274582391,0.5332770226092296,-0.960254363897463,0.9132513378340925 -66,0.0,1.0735581028251975,-1.060524245309374,-0.7902813350132356,0.37259465524782054,-0.9400757902613452,0.9721066172153674,-0.24687488719815498,-0.5015445240130325,-0.13494761193218818,0.1300908069763218 -67,0.0,1.6376974303400835,-1.649460990932646,-0.6005900469726237,0.28162130970935295,0.8362440030881721,0.5625055617944303,-0.24424824400159317,0.2742731104136069,0.9882291644128922,-0.9034928924297636 -68,0.0,1.9226795203724978,-1.5881079200154482,0.23039784446724942,0.458000795025685,0.16053436480789807,0.10676023110363286,0.08437633629048147,-0.4102570968096315,-0.3889759130323822,0.23368493276044622 -69,0.0,1.428218106951723,-1.758329763791654,0.12289411290053698,-0.19374642536783515,0.6024111339994527,-0.8956945110997676,0.34728022344428666,0.045175117581032875,-0.2326617713895407,-0.3146487851555211 -70,0.0,1.426028756973607,-1.1608245105048438,0.9060271622161762,0.7364181822252924,-0.04128485443820251,0.30852412684049657,0.3692055404974064,0.33319303146616197,0.9854449773409701,-0.253876502721057 -71,0.0,1.7300201940414208,-1.8094742195380222,-0.6776614683334685,0.07388223501889013,-0.6821472673109052,0.02412639199219635,0.8489462496789089,-0.5162539947354388,0.2026274256350431,-0.8974772498432042 -72,0.0,1.1160580867858627,-1.0562234913753827,0.4924315133007724,-0.737330353527688,0.5948943274413478,0.8054360371547522,-0.9127166792458934,-0.39019932233826227,-0.7358052031844453,-0.058032643451690014 -73,0.0,1.5287830677917298,-1.5282207370489649,-0.8638215305852943,-0.9871259051181829,-0.6981909166452223,-0.17859271120363984,-0.9024979934000754,0.7774480505476058,0.03349780154212989,0.5698021932461961 -74,0.0,1.6960209130376898,-1.6879132950675153,-0.9196790361121787,-0.08379502301562369,0.49207875063490536,-0.10278600265499382,0.1680009845018644,-0.9849109111206711,-0.9010178860550528,0.6398135602683432 -75,0.0,1.8229209542705762,-1.7992151616780458,0.10745593717114521,-0.8547117567503333,0.3449692462697873,0.5190929861298248,0.41023065780507606,-0.9121646126915404,0.0339436116875278,-0.3066433169799614 -76,0.0,1.153822450324946,-1.9125109596392957,-0.8084861601829396,-0.010443047871684152,-0.7062967902838859,0.8221182617361114,0.16332743077240175,0.25278629136411546,-0.501338527911191,-0.28349201031842997 -77,0.0,1.1952162783159461,-1.4347201247937995,-0.8144168383678148,-0.029402316469989476,-0.8414282024081439,-0.004586605289200518,-0.6064347305419278,0.7142773164379119,-0.4448189769242301,-0.7536984563026647 -78,0.0,1.9935896172064345,-1.5249947828194232,-0.8776374613798481,0.4144055355504066,-0.033655814948979845,0.6246920435596353,-0.8324026588913145,-0.7230280627324008,-0.8670990346040541,-0.18563237806149768 -79,0.0,1.623519204932399,-1.5204567735605674,0.9778286856360292,-0.5750238094139197,-0.4026176094620355,0.6319679592519518,-0.42650442043409664,0.4805794604963276,0.6863380782764676,-0.7938128517078891 -80,1.0,-1.9061964810894976,-1.2890845064683893,10.0,10.0,0.10110213628450881,-0.416199695149021,-0.49485098716478193,-0.5686984484832116,-0.18478238247187462,-0.5522304988566058 -81,1.0,-1.1233456870613596,-1.4319272868794908,-0.20267104500415667,-0.13891416360392483,-0.9371567107968573,-0.11679010938437773,-0.0942374319418513,-0.8967611185539714,-0.18342332063686673,0.4586246330654187 -82,1.0,-1.8593800902098794,-1.2014277824817987,-0.6994029020523282,0.8016069079080759,0.618074329335756,-0.17256870875707642,-0.07569344530437294,0.48881526808669196,-0.6122253862675848,0.5154748580158188 -83,1.0,-1.4459305927616168,-1.507381441431149,-0.14646706623716144,0.7737171788723411,0.4987969849603513,-0.01586272159205504,0.48716282764946706,-0.0020169225903672316,-0.4803954556572778,0.14066039485631854 -84,1.0,-1.5068337349461003,-1.3960574872196596,0.9352733360226106,-0.6584023257750704,0.2540286154963187,-0.2079493637863219,0.49423396418171595,0.3425440151560937,-0.2790717466048003,0.6817667819203079 -85,1.0,1.2945952163736183,1.2595474551517882,-10.0,-10.0,-0.7729483005820612,-0.7555910808571309,-0.7956918977844933,0.1406538353922091,-0.16048348692278092,0.46092093570445214 -86,1.0,1.0468940151290935,1.4889992490615644,-0.10280602307649511,-0.2322567217543967,0.9824873120780633,0.22063948796997224,0.46610825105829923,-0.32823900060322386,0.9556882858690123,0.9840121424736405 -87,1.0,1.5883047440360363,1.7022605521341374,-0.40090994887229314,-0.8872400296917884,-0.7963665539711986,-0.18901134135900155,-0.9842642698324229,0.22853934832310796,0.6960450376429215,-0.7349411665560721 -88,1.0,1.0700121628460502,1.818456986404958,-0.9066754218923723,0.6435018002723063,0.29644429045149967,-0.21233982252142947,-0.6249473476636442,-0.07650553418511508,0.6900069458740186,0.6031788656970374 -89,1.0,1.478188533919311,1.1810797217515985,-0.7778783717821762,0.15870040018507803,0.7700838694175804,0.31820158149436617,-0.577373286340777,0.2079154087822559,0.16989820716894366,-0.13471834974110908 -90,0.0,-1.3979253633769553,1.8903759983708976,10.0,-10.0,-0.3815436230444891,-0.15060860491731232,0.9402009350589582,-0.26012695659385154,0.011178432296194751,-0.5526461887962022 -91,0.0,-1.3418191928050067,1.3777038429060606,-0.2738961073464674,0.9218628887177005,0.1833297141250405,0.7949957967753243,0.4703407862424096,0.5871591279939063,0.6560651905340187,0.7103783594351551 -92,0.0,-1.0853574965532813,1.2568456448317546,-0.09343868466017535,0.8673637319098968,0.5019793353377939,0.9291335314667162,0.8530385462334946,0.23164737184209572,-0.9213639337894683,0.9955206665909002 -93,0.0,-1.5078347061732043,1.755372973469426,-0.35304762896340125,0.6869964596284959,0.12650715249211952,-0.5841575512334931,0.6780119845973502,0.1301840756737609,-0.5413658827498185,0.804095414322346 -94,0.0,-1.6723266529177478,1.9139884218475265,-0.055989266428471796,0.08397268885628328,0.49540687896065805,-0.5318515111518416,-0.6829875503825202,-0.7627193412374218,0.044183568378214355,0.5694928604351057 -95,0.0,1.5219674737320155,-1.8127243158447541,-10.0,10.0,-0.5924273489245648,-0.24521529180917545,0.45028680560933676,-0.6172008060217697,-0.07832380637663072,-0.13840019966409445 -96,0.0,1.3427761908932137,-1.0426461453585447,-0.8405236108806917,-0.5797680617663143,0.20708806522492362,-0.30689024242517027,-0.7073198325932093,0.06720948720809505,-0.21904144161504235,0.6516183145928414 -97,0.0,1.729966706858185,-1.2614818535634313,-0.07134725891047888,-0.5716479318807921,0.002484974059520084,0.4103461232511616,0.29425426224880424,0.6980183692479018,0.6525532678930528,-0.9606212198157282 -98,0.0,1.6367960859950461,-1.4048311726687266,0.13335534338270483,0.7851836236372127,0.10649410652264102,0.45700338475494173,-0.31447076807019614,-0.053371126918829725,0.8614734514136297,-0.7701671581075855 -99,0.0,1.2253193252857404,-1.3983212310825488,0.7518196805414694,0.8434776597312679,0.880714646905367,0.20665859661747032,-0.8505399954222603,0.7702440358432017,-0.790477429383416,-0.21937326040566685 +0,1,0.1,-0.1,10,10,-0.492825179098274,0.173025977694162,0.598942935224295,-0.298754475196825,-0.581254909010269,-0.110656494210556 +1,1,-1.89442810374214,-1.31996134398007,0.955713896876243,0.478117201427488,0.777586191100329,0.655369716778557,0.174914171427966,-0.288498877530604,-0.045316536149489,-0.606586193752411 +2,1,-1.47460150711424,-1.22614964523433,0.330140292484796,-0.389505498689912,0.138935265824808,-0.871869282167555,0.37472462048701,0.16418591189513,0.293560701443717,0.285608940220021 +3,1,-1.30213414336735,-1.82621262418812,-0.71381302228685,0.968769585007681,0.683148179202864,0.799125092538796,0.309479173526504,0.728052031003468,0.725495580994886,-0.676576302804248 +4,1,-1.73938632269334,-1.58349866505488,-0.067783417095816,0.229988549891323,0.582427598044647,0.261947149184825,-0.31573435079735,0.61178122144268,-0.187058216967238,0.871764347690334 +5,1,-1.56660896632398,-1.05861814902183,-0.212800982302764,0.915459776146607,-0.263465552591813,-0.666126495988014,-0.195028996490007,-0.237170057680116,-0.933358858596883,-0.19726273171241 +6,1,-1.55340876153895,-1.25209231285838,0.972585787901787,-0.872502887185675,0.729110910814452,0.265244787210866,-0.726900973624432,0.248626170107939,0.809004396880265,-0.278494064780479 +7,1,-1.54625325136447,-1.81238888450819,0.431645876221063,-0.595223273279383,0.953557069758112,-0.222915219121563,0.257670939076174,0.728927879098318,-0.579783055417687,-0.179960319428095 +8,1,-1.12735554524035,-1.69261497444728,0.827091199052693,0.208781482910759,-0.573317187361529,-0.00488758921352,0.341918716034638,-0.292901671356202,-0.34531700628784,0.766920547630073 +9,1,-1.35367834815884,-1.38141056472962,0.023965203621816,-0.805412569418484,0.806810139718495,-0.806576263127819,-0.39279977856172,-0.463819363774079,0.16095248005356,-0.553419747131608 +10,1,-1.17853151888796,-1.27705829298504,0.916015229666356,0.0275946645157,-0.829507007977635,-0.700063689327201,-0.715601456588714,-0.439799165143527,-0.487241220494887,0.245279267056121 +11,1,-1.17547049766875,-1.05613281246665,0.404557718897757,0.175997361062361,0.227383730822478,0.774254652577977,-0.616080996917636,-0.948639007451084,0.954076433375225,-0.497102001172339 +12,1,-1.67277915033943,-1.86190239883588,0.906708844886064,-0.592737030373698,0.885229803890949,0.822069297241907,0.204367485562992,0.24610324883505,-0.079476866422163,-0.244006995583434 +13,1,-1.96326165438884,-1.31680458089693,-0.284570394188414,-0.198686061574238,-0.168897609541112,-0.92939259112691,0.265899059671913,-0.828727642584781,-0.427453097474168,-0.738368091608883 +14,1,-1.79497769808481,-1.13948217357082,0.011556817105957,0.499408314757229,0.253163424774478,0.017645446880421,0.401735167095264,-0.650287617298501,-0.262217482830833,-0.510102120130588 +15,1,-1.07957120536262,-1.93245955077991,-0.520161174281201,-0.392478459347396,0.147495839565868,0.780879606474075,-0.281044687799173,-0.148036908135786,-0.208810472224093,0.278961929718128 +16,1,-1.52555145037507,-1.72455209196736,0.562803219191695,0.323676061636996,0.490737136410372,-0.95476192699496,0.028603504036769,-0.246295219650507,-0.297736293597739,-0.632473830957653 +17,1,-1.29142309507315,-1.9506961526212,0.859669461054104,0.485772819254089,0.268883598825009,0.253553321699552,-0.045743087569395,0.66793403278249,0.308591963919947,0.771084301464027 +18,1,-1.23404787121001,-1.68173519287847,-0.118871100462413,0.159000937768132,0.2985428841756,-0.203829205332538,-0.637945695251352,0.658049690810909,0.949529589134008,-0.577812553880056 +19,1,-1.13513050029551,-1.3119036669604,0.74037411093045,0.558782660077505,-0.096052126354583,0.529119817360537,0.372190604770144,0.688656466253683,-0.819433165315505,-0.12814415930811 +20,1,-0.1,0.1,-10,-10,0.785237349732891,-0.387217730495401,-0.942409218899448,0.160806577297675,-0.723969983661972,-0.452650134415823 +21,1,1.1507658618081,1.7260505392724,-0.92290734251321,0.465751384219632,-0.81727500527083,-0.182472640926628,0.887526070620356,0.111592883978488,0.386435078880162,-0.440017211221272 +22,1,1.90389768224701,1.71880759316591,0.28033979546451,0.379365407838544,0.634843008192624,0.371753918780839,-0.611653305369863,0.732567927874185,0.85803611350317,-0.577973441708411 +23,1,1.77751976452381,1.28697050370578,0.222850898945077,-0.424057088828287,-0.27619426781836,0.616941667680694,-0.696779972923147,0.23612770730498,0.760705889780527,0.34004139732033 +24,1,1.65314327493874,1.16282810211312,-0.339501197382944,0.898529591365812,-0.881538228231582,0.090728826664301,-0.858395870780934,0.907272331515896,0.160531735619067,0.678911811768841 +25,1,1.30955180558204,1.36827755737648,-0.444576754578563,-0.78871174512572,0.246625773070183,-0.663474018818313,-0.446355552060464,0.750312773134634,-0.98959522970136,-0.150120109840706 +26,1,1.44924431171893,1.40328864581169,-0.388679577334402,-0.708193450791952,0.850310084800308,-0.767256338531612,0.370509317329194,0.387354921015751,0.533160321164986,0.149390212455131 +27,1,1.61362501391753,1.05448314414567,-0.359644680155969,0.682555404147728,-0.53195400936544,0.934101689590862,-0.73600284018832,-0.29960291454053,0.351125596355818,-0.187842884669279 +28,1,1.0243392720598,1.91059602121133,-0.249409157470717,-0.137070024386644,-0.707128531134104,0.944932049234295,0.431233366052987,0.449543990959262,0.912901932280027,0.77394610963827 +29,1,1.99444678594607,1.67204984441306,0.935302642480463,0.833720966523807,0.254167956717343,-0.007922712021391,-0.114776295376767,-0.276042896002242,-0.813098403125419,0.341922052212475 +30,1,1.40110330287926,1.109011516196,0.425716772255087,0.544174803732763,0.492863854358204,-0.047589791717166,-0.743840790633672,0.525289489060411,0.829611715544936,0.015193221845522 +31,1,1.94995090625471,1.05727410799969,0.12665368551441,0.469705238170149,0.874436248273008,-0.759571175468135,0.310230735619265,-0.80342084374485,-0.462431082486477,-0.407165886759129 +32,1,1.47264625042994,1.18913643279065,-0.731393018031039,0.461102224603009,-0.884528391885322,-0.419893944840798,0.647518214389067,0.688126326408485,0.754656371691603,0.116881923067816 +33,1,1.45901677207507,1.17024364037294,-0.527433424947131,-0.598774697808646,0.113197791601676,-0.50528865259863,0.117572114288939,0.590400320594326,-0.155159386769126,0.354827977413197 +34,1,1.32042744243041,1.19801952930384,-0.818626622405165,-0.029008564510599,0.952315968378468,0.817495784213924,0.182224554845043,-0.01377304364653,-0.26273195293588,-0.859530562808673 +35,1,1.88138237976289,1.03670081839679,0.305218688016626,-0.060885647660027,-0.387903446605514,-0.108064042735465,-0.962980405009682,-0.424289604203511,-0.253442293077285,0.309637368411297 +36,1,1.9986688782461,1.36909257128618,0.54312844740039,0.591372473040837,-0.835367086693457,0.198315253422507,-0.181434739783802,0.636429105754948,0.420628646992331,0.990122364664621 +37,1,1.50455818499044,1.19094974349673,-0.653263607332762,0.359359450868376,0.30131719114182,0.649581794356589,0.942268955633086,0.884659894489377,-0.473171239344398,0.039635066570717 +38,1,1.00833361547154,1.98150630000827,-0.812352457176761,0.219766101590983,-0.65021067790289,0.423621690291556,-0.58865099275791,0.061487886019891,-0.237737474016087,0.641284347380825 +39,1,1.60179185724619,1.12508599627141,-0.829819386940741,-0.345104687573802,0.485166070545119,-0.258839727448056,-0.920615208326881,0.275498215871427,-0.629075534110342,-0.642527887960687 +40,0,0.1,0.1,10,-10,0.041519856511361,0.23303461629095,-0.497233246191187,-0.023544587617095,-0.418540837770003,-0.550233932792512 +41,0,-1.09147574370355,1.70418701701285,-0.480316474702795,-0.753784710340632,-0.613234235616998,0.167955573662474,0.455636631315042,-0.380227635953206,0.48021383007369,-0.453674929885108 +42,0,-1.9425392252915,1.59311394144654,0.310098050913387,-0.835007082906627,0.407580140850853,0.556924247596553,-0.388616604639346,0.60215104751412,-0.984322198098753,-0.996332888983337 +43,0,-1.40302421044915,1.05041379743038,-0.898456453446964,-0.797029924245349,0.47491891024478,0.480193220538417,-0.750856163558686,-0.043960372032018,-0.242651391805662,-0.109239061054006 +44,0,-1.45810616907354,1.08468326497063,0.571329522934018,-0.677379826379623,0.098396984688832,-0.961599170104035,-0.753922591922157,0.361435891257559,-0.638030455493982,0.404349024843908 +45,0,-1.60421432901638,1.57730973247518,0.402433205555268,-0.06801187450078,-0.373089661152032,0.23970878487105,0.416451106643361,-0.50599166271433,-0.88669034806741,0.30364523616443 +46,0,-1.54868661350102,1.32883184576708,-0.174925245509766,0.050330391451536,-0.388676795741932,-0.72307604978553,0.646076107724964,-0.00105589909588,0.491928720743773,-0.647995101369186 +47,0,-1.8920756792535,1.76576258461153,0.289410761217525,0.48566510896872,0.338684773860801,0.374319581439648,-0.105479014627167,0.004520417892418,0.222862261975939,0.23538363683764 +48,0,-1.51442922313653,1.69840409315155,-0.199044563017407,-0.855804112781183,0.947572000564906,0.654939562810152,0.802084131057488,0.010033694468233,0.449766366250574,0.119974134618433 +49,0,-1.33469144171841,1.80124846893287,-0.346729568989951,-0.614828863660672,0.578150372001527,-0.697356489908387,-0.272496177427547,-0.326679505363186,0.403185907494623,0.659834986972357 +50,0,-1.39216086591683,1.96030807097305,-0.470918775433235,-0.165965173767875,-0.118373275802139,-0.804671542299309,-0.273096283874977,0.964676290034708,-0.240786016285174,0.781092750718218 +51,0,-1.10818774496527,1.1321805921252,-0.546315077724052,-0.263397808061131,0.073416112160648,-0.561584513583351,-0.003812545601594,-0.067901708659798,-0.797337624892413,-0.502494288676279 +52,0,-1.12733422378345,1.22290093390908,-0.239618151680487,0.281282683112064,-0.122253338243164,-0.416340912422471,-0.302944823763312,0.950697167857575,0.084774348269755,0.245643637478141 +53,0,-1.54504585318447,1.46465556555859,0.672465261607398,0.963677112876299,-0.732866944741014,0.269879007022312,-0.734121763984793,-0.18475004364869,0.494783604230457,-0.563469688908407 +54,0,-1.69728989778812,1.93427938064611,0.916674666213795,0.744100669613517,-0.536325680879341,0.745349313896706,-0.608494971121628,-0.036147807131094,0.730097211981708,-0.986020687921255 +55,0,-1.46716685688328,1.91950733639359,-0.040012375137611,0.248257524389148,-0.795936343325832,-0.755933622220192,0.664943062567423,-0.560825069941966,-0.987328335835364,0.00918182383389 +56,0,-1.5078580841421,1.11065681931139,-0.75655271526814,-0.433965979475654,-0.925820800763387,0.621204380538264,-0.725355435802351,-0.087195045278291,0.500040007799584,-0.351024070867477 +57,0,-1.79333947783294,1.64615611570236,0.593670368718185,0.74125415566331,-0.835056311664806,-0.128283340965351,0.795769070113583,0.338062872249377,0.961610282279288,-0.519755961049099 +58,0,-1.68562328688306,1.79136645116331,-0.917792004629201,-0.224807652067029,0.751172530954049,0.744925497765574,0.054821387540181,-0.268146122719043,-0.373795753322288,-0.023619900695578 +59,0,-1.70325116873164,1.56173898398367,0.937331444475048,-0.189146596668676,0.726757528139029,0.571196020214809,0.150478496659529,0.716370904753891,0.645947936391794,-0.096512499841381 +60,0,-0.1,-0.1,-10,10,0.303748234076738,0.094684069184242,0.846651908762107,0.505710991097032,-0.664846620425076,-0.722934785670171 +61,0,1.51747503460744,-1.57976833969122,-0.313853456471656,-0.670641690437042,0.337481189036041,-0.695059667580877,0.382512664766648,-0.754051294565859,-0.540912893771664,-0.152736592481289 +62,0,1.36729416399966,-1.54942606995245,0.746279765035798,0.320667909398266,0.075712278316126,0.557089028326803,-0.314459962457274,-0.091179395352991,-0.712572618352738,-0.862523770264919 +63,0,1.87551859565403,-1.01245024447758,0.961634242304571,0.99902517180177,0.428576472620752,0.790254229843056,-0.162732148014183,0.057108415575022,0.099625367521191,-0.41779573726667 +64,0,1.8407338686869,-1.58680706359952,-0.293737994923213,-0.654603713924763,-0.15830470325221,-0.4506171823593,0.106217286056366,-0.250165079508456,-0.598894350859836,-0.860382476004742 +65,0,1.47999238640346,-1.68861965445586,0.661374709635725,0.335413696048534,0.295408469126627,-0.340725080366546,0.611961227458239,0.53327702260923,-0.960254363897463,0.913251337834092 +66,0,1.0735581028252,-1.06052424530937,-0.790281335013236,0.372594655247821,-0.940075790261345,0.972106617215367,-0.246874887198155,-0.501544524013033,-0.134947611932188,0.130090806976322 +67,0,1.63769743034008,-1.64946099093265,-0.600590046972624,0.281621309709353,0.836244003088172,0.56250556179443,-0.244248244001593,0.274273110413607,0.988229164412892,-0.903492892429764 +68,0,1.9226795203725,-1.58810792001545,0.230397844467249,0.458000795025685,0.160534364807898,0.106760231103633,0.084376336290482,-0.410257096809632,-0.388975913032382,0.233684932760446 +69,0,1.42821810695172,-1.75832976379165,0.122894112900537,-0.193746425367835,0.602411133999453,-0.895694511099768,0.347280223444287,0.045175117581033,-0.232661771389541,-0.314648785155521 +70,0,1.42602875697361,-1.16082451050484,0.906027162216176,0.736418182225292,-0.041284854438203,0.308524126840497,0.369205540497406,0.333193031466162,0.98544497734097,-0.253876502721057 +71,0,1.73002019404142,-1.80947421953802,-0.677661468333469,0.07388223501889,-0.682147267310905,0.024126391992196,0.848946249678909,-0.516253994735439,0.202627425635043,-0.897477249843204 +72,0,1.11605808678586,-1.05622349137538,0.492431513300772,-0.737330353527688,0.594894327441348,0.805436037154752,-0.912716679245893,-0.390199322338262,-0.735805203184445,-0.05803264345169 +73,0,1.52878306779173,-1.52822073704896,-0.863821530585294,-0.987125905118183,-0.698190916645222,-0.17859271120364,-0.902497993400075,0.777448050547606,0.03349780154213,0.569802193246196 +74,0,1.69602091303769,-1.68791329506752,-0.919679036112179,-0.083795023015624,0.492078750634905,-0.102786002654994,0.168000984501864,-0.984910911120671,-0.901017886055053,0.639813560268343 +75,0,1.82292095427058,-1.79921516167805,0.107455937171145,-0.854711756750333,0.344969246269787,0.519092986129825,0.410230657805076,-0.91216461269154,0.033943611687528,-0.306643316979961 +76,0,1.15382245032495,-1.9125109596393,-0.80848616018294,-0.010443047871684,-0.706296790283886,0.822118261736111,0.163327430772402,0.252786291364115,-0.501338527911191,-0.28349201031843 +77,0,1.19521627831595,-1.4347201247938,-0.814416838367815,-0.02940231646999,-0.841428202408144,-0.004586605289201,-0.606434730541928,0.714277316437912,-0.44481897692423,-0.753698456302665 +78,0,1.99358961720643,-1.52499478281942,-0.877637461379848,0.414405535550407,-0.03365581494898,0.624692043559635,-0.832402658891314,-0.723028062732401,-0.867099034604054,-0.185632378061498 +79,0,1.6235192049324,-1.52045677356057,0.977828685636029,-0.57502380941392,-0.402617609462035,0.631967959251952,-0.426504420434097,0.480579460496328,0.686338078276468,-0.793812851707889 +80,1,-1.9061964810895,-1.28908450646839,10,10,0.101102136284509,-0.416199695149021,-0.494850987164782,-0.568698448483212,-0.184782382471875,-0.552230498856606 +81,1,-1.12334568706136,-1.43192728687949,-0.202671045004157,-0.138914163603925,-0.937156710796857,-0.116790109384378,-0.094237431941851,-0.896761118553971,-0.183423320636867,0.458624633065419 +82,1,-1.85938009020988,-1.2014277824818,-0.699402902052328,0.801606907908076,0.618074329335756,-0.172568708757076,-0.075693445304373,0.488815268086692,-0.612225386267585,0.515474858015819 +83,1,-1.44593059276162,-1.50738144143115,-0.146467066237161,0.773717178872341,0.498796984960351,-0.015862721592055,0.487162827649467,-0.002016922590367,-0.480395455657278,0.140660394856319 +84,1,-1.5068337349461,-1.39605748721966,0.935273336022611,-0.65840232577507,0.254028615496319,-0.207949363786322,0.494233964181716,0.342544015156094,-0.2790717466048,0.681766781920308 +85,1,1.29459521637362,1.25954745515179,-10,-10,-0.772948300582061,-0.755591080857131,-0.795691897784493,0.140653835392209,-0.160483486922781,0.460920935704452 +86,1,1.04689401512909,1.48899924906156,-0.102806023076495,-0.232256721754397,0.982487312078063,0.220639487969972,0.466108251058299,-0.328239000603224,0.955688285869012,0.98401214247364 +87,1,1.58830474403604,1.70226055213414,-0.400909948872293,-0.887240029691788,-0.796366553971199,-0.189011341359002,-0.984264269832423,0.228539348323108,0.696045037642922,-0.734941166556072 +88,1,1.07001216284605,1.81845698640496,-0.906675421892372,0.643501800272306,0.2964442904515,-0.212339822521429,-0.624947347663644,-0.076505534185115,0.690006945874019,0.603178865697037 +89,1,1.47818853391931,1.1810797217516,-0.777878371782176,0.158700400185078,0.77008386941758,0.318201581494366,-0.577373286340777,0.207915408782256,0.169898207168944,-0.134718349741109 +90,0,-1.39792536337696,1.8903759983709,10,-10,-0.381543623044489,-0.150608604917312,0.940200935058958,-0.260126956593852,0.011178432296195,-0.552646188796202 +91,0,-1.34181919280501,1.37770384290606,-0.273896107346467,0.9218628887177,0.183329714125041,0.794995796775324,0.47034078624241,0.587159127993906,0.656065190534019,0.710378359435155 +92,0,-1.08535749655328,1.25684564483175,-0.093438684660175,0.867363731909897,0.501979335337794,0.929133531466716,0.853038546233495,0.231647371842096,-0.921363933789468,0.9955206665909 +93,0,-1.5078347061732,1.75537297346943,-0.353047628963401,0.686996459628496,0.12650715249212,-0.584157551233493,0.67801198459735,0.130184075673761,-0.541365882749818,0.804095414322346 +94,0,-1.67232665291775,1.91398842184753,-0.055989266428472,0.083972688856283,0.495406878960658,-0.531851511151842,-0.68298755038252,-0.762719341237422,0.044183568378214,0.569492860435106 +95,0,1.52196747373202,-1.81272431584475,-10,10,-0.592427348924565,-0.245215291809175,0.450286805609337,-0.61720080602177,-0.078323806376631,-0.138400199664094 +96,0,1.34277619089321,-1.04264614535854,-0.840523610880692,-0.579768061766314,0.207088065224924,-0.30689024242517,-0.707319832593209,0.067209487208095,-0.219041441615042,0.651618314592841 +97,0,1.72996670685819,-1.26148185356343,-0.071347258910479,-0.571647931880792,0.00248497405952,0.410346123251162,0.294254262248804,0.698018369247902,0.652553267893053,-0.960621219815728 +98,0,1.63679608599505,-1.40483117266873,0.133355343382705,0.785183623637213,0.106494106522641,0.457003384754942,-0.314470768070196,-0.05337112691883,0.86147345141363,-0.770167158107586 +99,0,1.22531932528574,-1.39832123108255,0.751819680541469,0.843477659731268,0.880714646905367,0.20665859661747,-0.85053999542226,0.770244035843202,-0.790477429383416,-0.219373260405667 diff --git a/tests/googletest/descriptor_identification/sisso_regressor/test_sisso_classifier.cc b/tests/googletest/descriptor_identification/sisso_regressor/test_sisso_classifier.cc index c1ce780eea46d81d00a7fe9fc23fcc1298513293..1e4ac804e2c89e0cc719cefa4c7d1b8a4154e9ed 100644 --- a/tests/googletest/descriptor_identification/sisso_regressor/test_sisso_classifier.cc +++ b/tests/googletest/descriptor_identification/sisso_regressor/test_sisso_classifier.cc @@ -52,11 +52,15 @@ namespace value_2[ii] = distribution_12_neg(generator); } - value_1[0] = 0.40; - value_1[40] = 0.40; + value_1[0] = 0.45; + value_1[1] = 0.40; + value_1[40] = 0.45; + value_1[41] = 0.40; - value_2[0] = -0.40; - value_2[40] = 0.40; + value_2[0] = -0.45; + value_2[1] = -0.40; + value_2[40] = 0.45; + value_2[41] = 0.40; for(int ii = 0; ii < 5; ++ii) { @@ -82,11 +86,11 @@ namespace test_value_2[ii] = distribution_12_neg(generator); } - test_value_1[0] = 0.40; - test_value_1[10] = 0.40; + test_value_1[0] = 0.45; + test_value_1[10] = 0.45; - test_value_2[0] = -0.40; - test_value_2[10] = 0.40; + test_value_2[0] = -0.45; + test_value_2[10] = 0.45; node_ptr feat_1 = std::make_shared<FeatureNode>(0, "A", value_1, test_value_1, Unit("m")); node_ptr feat_2 = std::make_shared<FeatureNode>(1, "B", value_2, test_value_2, Unit("m")); @@ -163,13 +167,13 @@ namespace EXPECT_EQ(sisso.models().size(), 2); EXPECT_EQ(sisso.models()[0].size(), 3); - EXPECT_EQ(sisso.models()[0][0].n_convex_overlap_train(), 2); - EXPECT_EQ(sisso.models().back()[0].n_convex_overlap_train(), 0); - + EXPECT_EQ(sisso.models()[0][0].n_convex_overlap_train(), 4); EXPECT_EQ(sisso.models()[0][0].n_convex_overlap_test(), 2); + + EXPECT_EQ(sisso.models().back()[0].n_convex_overlap_train(), 0); EXPECT_EQ(sisso.models().back()[0].n_convex_overlap_test(), 0); - EXPECT_EQ(sisso.models()[0][0].n_svm_misclassified_train(), 1); + EXPECT_EQ(sisso.models()[0][0].n_svm_misclassified_train(), 2); EXPECT_EQ(sisso.models()[0][0].n_svm_misclassified_test(), 1); EXPECT_EQ(sisso.models().back()[0].n_svm_misclassified_train(), 0); diff --git a/tests/googletest/utils/test_compare_features.cc b/tests/googletest/utils/test_compare_features.cc index 22d92c2eaeefd0effd82b3e464c2dd20ccd978e2..831c4a156ac5119ec7143abea957eeb3d4aba403 100644 --- a/tests/googletest/utils/test_compare_features.cc +++ b/tests/googletest/utils/test_compare_features.cc @@ -21,14 +21,14 @@ namespace { std::copy_n(val_3.data(), val_3.size(), node_value_arrs::get_d_matrix_ptr(0)); - EXPECT_FALSE(comp_feats::valid_feature_against_selected_max_corr_1(val_1.data(), 4, 1.0, scores, 0.9897782665572893, 1, 0)); - EXPECT_FALSE(comp_feats::valid_feature_against_selected_max_corr_1_feat_list(val_1.data(), 4, 1.0, selected, scores, 0.9897782665572893)); - EXPECT_FALSE(comp_feats::valid_feature_against_selected(val_1.data(), 4, 1.0, scores, 0.9897782665572893, 1, 0)); - EXPECT_FALSE(comp_feats::valid_feature_against_selected_feat_list(val_1.data(), 4, 1.0, selected, scores, 0.9897782665572893)); + EXPECT_FALSE(comp_feats::valid_feature_against_selected_max_corr_1_pearson(val_1.data(), 4, 1.0, scores, 0.9897782665572893, 1, 0)); + EXPECT_FALSE(comp_feats::valid_feature_against_selected_max_corr_1_feat_list_pearson(val_1.data(), 4, 1.0, selected, scores, 0.9897782665572893)); + EXPECT_FALSE(comp_feats::valid_feature_against_selected_pearson(val_1.data(), 4, 1.0, scores, 0.9897782665572893, 1, 0)); + EXPECT_FALSE(comp_feats::valid_feature_against_selected_feat_list_pearson(val_1.data(), 4, 1.0, selected, scores, 0.9897782665572893)); - EXPECT_TRUE(comp_feats::valid_feature_against_selected_max_corr_1(val_2.data(), 4, 1.0, scores, 0.9028289727756884, 1, 0)); - EXPECT_TRUE(comp_feats::valid_feature_against_selected_max_corr_1_feat_list(val_2.data(), 4, 1.0, selected, scores, 0.9028289727756884)); - EXPECT_TRUE(comp_feats::valid_feature_against_selected(val_2.data(), 4, 1.0, scores, 0.9028289727756884, 1, 0)); - EXPECT_TRUE(comp_feats::valid_feature_against_selected_feat_list(val_2.data(), 4, 1.0, selected, scores, 0.9028289727756884)); + EXPECT_TRUE(comp_feats::valid_feature_against_selected_max_corr_1_pearson(val_2.data(), 4, 1.0, scores, 0.9028289727756884, 1, 0)); + EXPECT_TRUE(comp_feats::valid_feature_against_selected_max_corr_1_feat_list_pearson(val_2.data(), 4, 1.0, selected, scores, 0.9028289727756884)); + EXPECT_TRUE(comp_feats::valid_feature_against_selected_pearson(val_2.data(), 4, 1.0, scores, 0.9028289727756884, 1, 0)); + EXPECT_TRUE(comp_feats::valid_feature_against_selected_feat_list_pearson(val_2.data(), 4, 1.0, selected, scores, 0.9028289727756884)); } }