From 43e5b9502bbdd5229f1d28788079771103bee455 Mon Sep 17 00:00:00 2001 From: Thomas <purcell@fhi-berlin.mpg.de> Date: Wed, 1 Sep 2021 07:19:22 +0200 Subject: [PATCH] LPWrapper Convex Hull test fixed Rinitialization removed existing data leading to the problem --- src/feature_creation/feature_space/FeatureSpace.cpp | 1 - src/loss_function/LossFunctionConvexHull.cpp | 11 +++++------ .../googletest/loss_function/test_convex_hull_loss.cc | 9 ++++----- 3 files changed, 9 insertions(+), 12 deletions(-) diff --git a/src/feature_creation/feature_space/FeatureSpace.cpp b/src/feature_creation/feature_space/FeatureSpace.cpp index fc34c635..d2d70d09 100644 --- a/src/feature_creation/feature_space/FeatureSpace.cpp +++ b/src/feature_creation/feature_space/FeatureSpace.cpp @@ -800,7 +800,6 @@ void FeatureSpace::generate_and_project(std::shared_ptr<LossFunction> loss, std: #pragma omp parallel firstprivate(worst_score, worst_score_ind, scores_sel_all) { std::shared_ptr<LossFunction> loss_copy = loss_function_util::copy(loss); - std::cout << prop_sorted_d_mat::N_FEATURES << '\t' << prop_sorted_d_mat::SORTED_D_MATRIX.size() << '\t' << node_value_arrs::D_MATRIX.size() << std::endl; std::vector<node_ptr> phi_sel_private(phi_sel); std::vector<double> scores_sel_private(scores_sel); int index_base = _phi.size() + _n_sis_select * (omp_get_thread_num() + _mpi_comm->size()); diff --git a/src/loss_function/LossFunctionConvexHull.cpp b/src/loss_function/LossFunctionConvexHull.cpp index 61ba0716..9ffd4add 100644 --- a/src/loss_function/LossFunctionConvexHull.cpp +++ b/src/loss_function/LossFunctionConvexHull.cpp @@ -228,12 +228,11 @@ void LossFunctionConvexHull::setup_lp(bool initialize_sorted_d_mat) if((omp_get_thread_num() == 0) && initialize_sorted_d_mat) { prop_sorted_d_mat::initialize_sorted_d_matrix_arr( - prop_sorted_d_mat::N_FEATURES, - _n_task, - _n_class, - n_samp_per_class - ); - std::cout << prop_sorted_d_mat::N_TASK << '\t' << prop_sorted_d_mat::N_CLASS << '\t' << prop_sorted_d_mat::N_FEATURES << '\t' << prop_sorted_d_mat::N_SAMPLES << '\t' << prop_sorted_d_mat::SORTED_D_MATRIX.size() << std::endl; + std::max(prop_sorted_d_mat::N_FEATURES, _n_feat), + _n_task, + _n_class, + n_samp_per_class + ); } } diff --git a/tests/googletest/loss_function/test_convex_hull_loss.cc b/tests/googletest/loss_function/test_convex_hull_loss.cc index 40b61e2b..c4fc86c9 100644 --- a/tests/googletest/loss_function/test_convex_hull_loss.cc +++ b/tests/googletest/loss_function/test_convex_hull_loss.cc @@ -124,11 +124,6 @@ namespace std::fill_n(_prop_test.begin() + 5, 5, 1.0); std::fill_n(_prop_test.begin() + 10, 5, 2.0); std::fill_n(_prop_test.begin() + 15, 5, 3.0); - - prop_sorted_d_mat::initialize_sorted_d_matrix_arr(2, _task_sizes_train.size(), 4, std::vector<int>(4, 20)); - std::copy_n(value_1.data(), _task_sizes_train[0], prop_sorted_d_mat::access_sorted_d_matrix(0)); - std::copy_n(value_2.data(), _task_sizes_train[0], prop_sorted_d_mat::access_sorted_d_matrix(1)); - std::cout << prop_sorted_d_mat::access_sorted_d_matrix(1)[0] <<'\t' << prop_sorted_d_mat::access_sorted_d_matrix(0)[0] << std::endl; } void TearDown() override @@ -156,6 +151,8 @@ namespace _task_sizes_test, 2 ); + std::copy_n(_phi[0]->value_ptr(), _task_sizes_train[0], prop_sorted_d_mat::access_sorted_d_matrix(0)); + std::copy_n(_phi[1]->value_ptr(), _task_sizes_train[0], prop_sorted_d_mat::access_sorted_d_matrix(1)); EXPECT_EQ(std::floor(loss.project(_phi[0])), 80); EXPECT_EQ(std::floor(loss.project(_phi[1])), 80); @@ -176,6 +173,8 @@ namespace _task_sizes_test, 2 ); + std::copy_n(_phi[0]->value_ptr(), _task_sizes_train[0], prop_sorted_d_mat::access_sorted_d_matrix(0)); + std::copy_n(_phi[1]->value_ptr(), _task_sizes_train[0], prop_sorted_d_mat::access_sorted_d_matrix(1)); LossFunctionConvexHull loss_copy(std::make_shared<LossFunctionConvexHull>(loss)); -- GitLab