diff --git a/src/feature_creation/feature_space/FeatureSpace.cpp b/src/feature_creation/feature_space/FeatureSpace.cpp index fc34c6357e84911150ffdf79af5a24c9e38782f1..d2d70d0997ae34b6df84b8efa4075352bae80a86 100644 --- a/src/feature_creation/feature_space/FeatureSpace.cpp +++ b/src/feature_creation/feature_space/FeatureSpace.cpp @@ -800,7 +800,6 @@ void FeatureSpace::generate_and_project(std::shared_ptr<LossFunction> loss, std: #pragma omp parallel firstprivate(worst_score, worst_score_ind, scores_sel_all) { std::shared_ptr<LossFunction> loss_copy = loss_function_util::copy(loss); - std::cout << prop_sorted_d_mat::N_FEATURES << '\t' << prop_sorted_d_mat::SORTED_D_MATRIX.size() << '\t' << node_value_arrs::D_MATRIX.size() << std::endl; std::vector<node_ptr> phi_sel_private(phi_sel); std::vector<double> scores_sel_private(scores_sel); int index_base = _phi.size() + _n_sis_select * (omp_get_thread_num() + _mpi_comm->size()); diff --git a/src/loss_function/LossFunctionConvexHull.cpp b/src/loss_function/LossFunctionConvexHull.cpp index 61ba0716742b124ff042bc9450803f088640401f..9ffd4addc994a9404f079c834078ba9d9970a3ae 100644 --- a/src/loss_function/LossFunctionConvexHull.cpp +++ b/src/loss_function/LossFunctionConvexHull.cpp @@ -228,12 +228,11 @@ void LossFunctionConvexHull::setup_lp(bool initialize_sorted_d_mat) if((omp_get_thread_num() == 0) && initialize_sorted_d_mat) { prop_sorted_d_mat::initialize_sorted_d_matrix_arr( - prop_sorted_d_mat::N_FEATURES, - _n_task, - _n_class, - n_samp_per_class - ); - std::cout << prop_sorted_d_mat::N_TASK << '\t' << prop_sorted_d_mat::N_CLASS << '\t' << prop_sorted_d_mat::N_FEATURES << '\t' << prop_sorted_d_mat::N_SAMPLES << '\t' << prop_sorted_d_mat::SORTED_D_MATRIX.size() << std::endl; + std::max(prop_sorted_d_mat::N_FEATURES, _n_feat), + _n_task, + _n_class, + n_samp_per_class + ); } } diff --git a/tests/googletest/loss_function/test_convex_hull_loss.cc b/tests/googletest/loss_function/test_convex_hull_loss.cc index 40b61e2bf17bbe7dc7de0bb0aa4e96c25486d36c..c4fc86c97dbfc5946d90241f0541d46e7e3ac7bf 100644 --- a/tests/googletest/loss_function/test_convex_hull_loss.cc +++ b/tests/googletest/loss_function/test_convex_hull_loss.cc @@ -124,11 +124,6 @@ namespace std::fill_n(_prop_test.begin() + 5, 5, 1.0); std::fill_n(_prop_test.begin() + 10, 5, 2.0); std::fill_n(_prop_test.begin() + 15, 5, 3.0); - - prop_sorted_d_mat::initialize_sorted_d_matrix_arr(2, _task_sizes_train.size(), 4, std::vector<int>(4, 20)); - std::copy_n(value_1.data(), _task_sizes_train[0], prop_sorted_d_mat::access_sorted_d_matrix(0)); - std::copy_n(value_2.data(), _task_sizes_train[0], prop_sorted_d_mat::access_sorted_d_matrix(1)); - std::cout << prop_sorted_d_mat::access_sorted_d_matrix(1)[0] <<'\t' << prop_sorted_d_mat::access_sorted_d_matrix(0)[0] << std::endl; } void TearDown() override @@ -156,6 +151,8 @@ namespace _task_sizes_test, 2 ); + std::copy_n(_phi[0]->value_ptr(), _task_sizes_train[0], prop_sorted_d_mat::access_sorted_d_matrix(0)); + std::copy_n(_phi[1]->value_ptr(), _task_sizes_train[0], prop_sorted_d_mat::access_sorted_d_matrix(1)); EXPECT_EQ(std::floor(loss.project(_phi[0])), 80); EXPECT_EQ(std::floor(loss.project(_phi[1])), 80); @@ -176,6 +173,8 @@ namespace _task_sizes_test, 2 ); + std::copy_n(_phi[0]->value_ptr(), _task_sizes_train[0], prop_sorted_d_mat::access_sorted_d_matrix(0)); + std::copy_n(_phi[1]->value_ptr(), _task_sizes_train[0], prop_sorted_d_mat::access_sorted_d_matrix(1)); LossFunctionConvexHull loss_copy(std::make_shared<LossFunctionConvexHull>(loss));