diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index fa63b0fbad4c84724214153edd45fa8730f3030a..8fb53deee3d411d083a642027747aed855a28855 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -415,6 +415,8 @@ build-gnu-gcov:
     name: ${CI_JOB_NAME}-${CI_COMMIT_REF_NAME}-${CI_COMMIT_SHA}
     reports:
       cobertura: build_gcov/coverage_xml.xml
+    paths:
+      - build_gcov/Testing
 
 pages:
   stage: doc_builds
diff --git a/src/CMakeLists.txt b/src/CMakeLists.txt
index e090afedb7cbcee216922f69ed5144e551e3394b..793780f2b261d527d4b32590ad2c321c84b57069 100644
--- a/src/CMakeLists.txt
+++ b/src/CMakeLists.txt
@@ -51,6 +51,18 @@ add_test(
     NAME Classification
     COMMAND ${MPIEXEC_EXECUTABLE} ${MPIEXEC_NUMPROC_FLAG} 2 ${MPIEXEC_PREFLAGS} "${CMAKE_BINARY_DIR}/bin/sisso++" "${CMAKE_SOURCE_DIR}/tests/exec_test/classification/sisso.json" ${MPIEXEC_POSTFLAGS}
 )
+add_test(
+    NAME Classification_Max_Correlation_NE_One
+    COMMAND ${MPIEXEC_EXECUTABLE} ${MPIEXEC_NUMPROC_FLAG} 2 ${MPIEXEC_PREFLAGS} "${CMAKE_BINARY_DIR}/bin/sisso++" "${CMAKE_SOURCE_DIR}/tests/exec_test/classification_max_corr/sisso.json" ${MPIEXEC_POSTFLAGS}
+)
+add_test(
+    NAME Classification_Generate_Project
+    COMMAND ${MPIEXEC_EXECUTABLE} ${MPIEXEC_NUMPROC_FLAG} 2 ${MPIEXEC_PREFLAGS} "${CMAKE_BINARY_DIR}/bin/sisso++" "${CMAKE_SOURCE_DIR}/tests/exec_test/classification_gen_proj/sisso.json" ${MPIEXEC_POSTFLAGS}
+)
+add_test(
+    NAME Classification_Max_Correlation_NE_One_Generate_Project
+    COMMAND ${MPIEXEC_EXECUTABLE} ${MPIEXEC_NUMPROC_FLAG} 2 ${MPIEXEC_PREFLAGS} "${CMAKE_BINARY_DIR}/bin/sisso++" "${CMAKE_SOURCE_DIR}/tests/exec_test/classification_max_corr_gen_proj/sisso.json" ${MPIEXEC_POSTFLAGS}
+)
 add_test(
     NAME Regression
     COMMAND ${MPIEXEC_EXECUTABLE} ${MPIEXEC_NUMPROC_FLAG} 2 ${MPIEXEC_PREFLAGS} "${CMAKE_BINARY_DIR}/bin/sisso++" "${CMAKE_SOURCE_DIR}/tests/exec_test/default/sisso.json" ${MPIEXEC_POSTFLAGS}
@@ -59,14 +71,18 @@ add_test(
     NAME Generate_Project
     COMMAND ${MPIEXEC_EXECUTABLE} ${MPIEXEC_NUMPROC_FLAG} 2 ${MPIEXEC_PREFLAGS} "${CMAKE_BINARY_DIR}/bin/sisso++" "${CMAKE_SOURCE_DIR}/tests/exec_test/gen_proj/sisso.json" ${MPIEXEC_POSTFLAGS}
 )
-add_test(
-    NAME Log_Regression
-    COMMAND ${MPIEXEC_EXECUTABLE} ${MPIEXEC_NUMPROC_FLAG} 2 ${MPIEXEC_PREFLAGS} "${CMAKE_BINARY_DIR}/bin/sisso++" "${CMAKE_SOURCE_DIR}/tests/exec_test/log_reg/sisso.json" ${MPIEXEC_POSTFLAGS}
-)
 add_test(
     NAME Maximum_Correlation_NE_One
     COMMAND ${MPIEXEC_EXECUTABLE} ${MPIEXEC_NUMPROC_FLAG} 2 ${MPIEXEC_PREFLAGS} "${CMAKE_BINARY_DIR}/bin/sisso++" "${CMAKE_SOURCE_DIR}/tests/exec_test/max_corr/sisso.json" ${MPIEXEC_POSTFLAGS}
 )
+add_test(
+    NAME Maximum_Correlation_NE_One_Generate_Project
+    COMMAND ${MPIEXEC_EXECUTABLE} ${MPIEXEC_NUMPROC_FLAG} 2 ${MPIEXEC_PREFLAGS} "${CMAKE_BINARY_DIR}/bin/sisso++" "${CMAKE_SOURCE_DIR}/tests/exec_test/max_corr_gen_proj/sisso.json" ${MPIEXEC_POSTFLAGS}
+)
+add_test(
+    NAME Log_Regression
+    COMMAND ${MPIEXEC_EXECUTABLE} ${MPIEXEC_NUMPROC_FLAG} 2 ${MPIEXEC_PREFLAGS} "${CMAKE_BINARY_DIR}/bin/sisso++" "${CMAKE_SOURCE_DIR}/tests/exec_test/log_reg/sisso.json" ${MPIEXEC_POSTFLAGS}
+)
 add_test(
     NAME Train_Only
     COMMAND ${MPIEXEC_EXECUTABLE} ${MPIEXEC_NUMPROC_FLAG} 2 ${MPIEXEC_PREFLAGS} "${CMAKE_BINARY_DIR}/bin/sisso++" "${CMAKE_SOURCE_DIR}/tests/exec_test/no_test_data/sisso.json" ${MPIEXEC_POSTFLAGS}
diff --git a/src/descriptor_identifier/model/Model.cpp b/src/descriptor_identifier/model/Model.cpp
index 15f36fa1aa2850f306818a9a439db9c1b65a4b4f..cce0459e260e6dd452c6aa1ca37218f7b41d30de 100644
--- a/src/descriptor_identifier/model/Model.cpp
+++ b/src/descriptor_identifier/model/Model.cpp
@@ -531,6 +531,7 @@ void Model::populate_model(const std::string train_filename, const std::string t
             feat_vals[nf][ns] = std::stod(split_line[2 + nf + with_samp_id]);
         }
     }
+
     for(int ns = 0; ns < _n_samp_test; ++ns)
     {
         std::getline(test_file_stream, test_line);
diff --git a/src/descriptor_identifier/model/ModelRegressor.hpp b/src/descriptor_identifier/model/ModelRegressor.hpp
index 73c047c589bd67dac340718dcdbc231bbfb0df3f..d2e25bc272709da60db2f56c5b6b766e9e22d6e9 100644
--- a/src/descriptor_identifier/model/ModelRegressor.hpp
+++ b/src/descriptor_identifier/model/ModelRegressor.hpp
@@ -243,7 +243,7 @@ public:
     {
         std::vector<double> p = prop_train();
         std::vector<double> p_est = prop_train_est();
-        return util_funcs::r2(p.data(), p_est.data(), _n_samp_test);
+        return util_funcs::r2(p.data(), p_est.data(), _n_samp_train);
     }
 
     // DocString: model_reg_test_r2
diff --git a/src/feature_creation/feature_space/FeatureSpace.cpp b/src/feature_creation/feature_space/FeatureSpace.cpp
index 720e9156f7759034862e476d00b3b6d65666a782..6d58b86ebfb7eb49c602d9361104a7b52fa61b5e 100644
--- a/src/feature_creation/feature_space/FeatureSpace.cpp
+++ b/src/feature_creation/feature_space/FeatureSpace.cpp
@@ -152,7 +152,7 @@ FeatureSpace::FeatureSpace(InputParser inputs):
     set_op_lists();
 
     double start = omp_get_wtime();
-    generate_feature_space();
+    generate_feature_space(_phi, _start_rung, _prop_train);
     _mpi_comm->barrier();
     double duration = omp_get_wtime() - start;
     if(_mpi_comm->rank() == 0)
@@ -310,347 +310,6 @@ void FeatureSpace::generate_reparam_feats(
         }
     }
 }
-
-void FeatureSpace::generate_reparam_feature_set(const std::vector<double>& prop)
-{
-    double u_bound = 1e50;
-    double l_bound = 1e-50;
-    std::vector<int> inds;
-
-    for(int nn = 1; nn <= _max_rung - _n_rung_generate; ++nn)
-    {
-        node_value_arrs::clear_temp_reg();
-        if(nn == _max_rung)
-        {
-            u_bound = _u_bound;
-            l_bound = _l_bound;
-        }
-        std::vector<node_ptr> next_phi;
-        _n_feat = _phi.size();
-
-        unsigned long int feat_ind = _phi.size() + _phi_reparam.size();
-
-        node_value_arrs::clear_temp_reg();
-        double start = omp_get_wtime();
-        #pragma omp parallel firstprivate(feat_ind, l_bound, u_bound)
-        {
-            std::vector<node_ptr> next_phi_private;
-            std::shared_ptr<NLOptimizer> optimizer_param = nlopt_wrapper::get_optimizer(_project_type, _task_sizes_train, _prop_train, _max_rung, _max_param_depth);
-            std::shared_ptr<NLOptimizer> optimizer_reparam = nlopt_wrapper::get_optimizer(_project_type, _task_sizes_train, prop, _max_rung, _max_param_depth);
-
-            #ifdef OMP45
-            #pragma omp for schedule(monotonic: dynamic)
-            #else
-            #pragma omp for schedule(dynamic)
-            #endif
-            for(auto feat_1 = _phi_reparam.begin() + _start_rung_reparam.back() + _mpi_comm->rank(); feat_1 < _phi_reparam.end(); feat_1 += _mpi_comm->size())
-            {
-                generate_non_param_feats(feat_1, next_phi_private, _phi_reparam.begin(), feat_ind, l_bound, u_bound);
-                generate_param_feats(feat_1, next_phi_private, _phi_reparam.begin(), feat_ind, optimizer_param, l_bound, u_bound);
-            }
-
-            #ifdef OMP45
-            #pragma omp for schedule(monotonic: dynamic)
-            #else
-            #pragma omp for schedule(dynamic)
-            #endif
-            for(auto feat_1 = _phi.begin() + _start_rung[nn-1] + _mpi_comm->rank(); feat_1 < (_phi.begin() + _end_no_params[nn-1]); feat_1 += _mpi_comm->size())
-            {
-                generate_reparam_feats(feat_1, next_phi_private, feat_ind, optimizer_reparam, l_bound, u_bound);
-            }
-
-            #pragma omp critical
-            next_phi.insert(next_phi.end(), next_phi_private.begin(), next_phi_private.end());
-        }
-        _start_rung_reparam.push_back(_phi_reparam.size());
-        node_value_arrs::clear_temp_reg();
-        if((nn < _max_rung) || (nn <= _n_rung_store) || (_mpi_comm->size() == 1))
-        {
-            int new_phi_size;
-            int phi_size_start = _phi_reparam.size();
-            if(_mpi_comm->rank() == 0)
-            {
-                std::vector<std::vector<node_ptr>> next_phi_gathered;
-                mpi::gather(*_mpi_comm, next_phi, next_phi_gathered, 0);
-                feat_ind = _phi_reparam.size();
-                for(auto& next_phi_vec : next_phi_gathered)
-                {
-                    _phi_reparam.insert(_phi_reparam.end(), next_phi_vec.begin(), next_phi_vec.end());
-                }
-                new_phi_size = _phi_reparam.size();
-
-                // Sort the features to ensure consistent feature spaces for all MPI/OpenMP configurations
-                std::sort(
-                    _phi_reparam.begin() + _start_rung_reparam.back(),
-                    _phi_reparam.end(),
-                    [feat_ind](node_ptr n1, node_ptr n2){return n1->sort_score(feat_ind) < n2->sort_score(feat_ind);}
-                );
-
-                // Reindex sorted features
-                std::for_each(
-                    _phi_reparam.begin() + _start_rung_reparam.back(),
-                    _phi_reparam.end(),
-                    [&feat_ind](node_ptr n){n->reindex(feat_ind); ++feat_ind;}
-                );
-
-                mpi::broadcast(*_mpi_comm, new_phi_size, 0);
-
-                for(int bb = 0; bb <= (new_phi_size - phi_size_start) / 10000; ++bb)
-                {
-                    mpi::broadcast(*_mpi_comm, &_phi_reparam[phi_size_start + bb * 10000], std::min(10000, new_phi_size - phi_size_start - bb * 10000), 0);
-                }
-            }
-            else
-            {
-                mpi::gather(*_mpi_comm, next_phi, 0);
-                mpi::broadcast(*_mpi_comm, new_phi_size, 0);
-                _phi_reparam.resize(new_phi_size);
-
-                for(int bb = 0; bb <= (new_phi_size - phi_size_start) / 10000; ++bb)
-                {
-                    mpi::broadcast(*_mpi_comm, &_phi_reparam[phi_size_start + bb * 10000], std::min(10000, new_phi_size - phi_size_start - bb * 10000), 0);
-                }
-            }
-
-            if(phi_size_start == new_phi_size)
-            {
-                throw std::logic_error("No features created during this rung (" + std::to_string(nn) + ")");
-            }
-
-            node_value_arrs::clear_temp_reg();
-            if(nn < _max_rung)
-            {
-                // Remove identical features
-                std::vector<double> scores(_phi_reparam.size());
-                _mpi_comm->barrier();
-                project_funcs::project_r(_prop_train.data(), scores.data(), _phi_reparam, _task_sizes_train, 1);
-                scores.erase(scores.begin(), scores.begin() + _start_rung_reparam.back());
-                inds = util_funcs::argsort<double>(scores);
-
-                std::vector<int> del_inds;
-
-                _mpi_comm->barrier();
-                node_value_arrs::clear_temp_reg();
-                for(int sc = 0; sc < scores.size() - 1; ++sc)
-                {
-                    #ifdef PARAMETERIZE
-                    if(_phi_reparam[inds[sc] + _start_rung_reparam.back()]->n_params() > 0)
-                    {
-                        continue;
-                    }
-                    #endif
-
-                    if(scores[inds[sc]] > -1e-10)
-                    {
-                        double base_val = std::abs(
-                            util_funcs::r(
-                                _phi_reparam[_start_rung_reparam.back() + inds[sc]]->value_ptr(),
-                                _phi_reparam[_start_rung_reparam.back() + inds[sc]]->value_ptr(),
-                                _n_samp_train
-                            )
-                        );
-                        for(int sc2 = sc + 1; sc2 < scores.size(); ++sc2)
-                        {
-                            double comp = std::abs(
-                                base_val - std::abs(
-                                    util_funcs::r(
-                                        _phi_reparam[_start_rung_reparam.back() + inds[sc]]->value_ptr(),
-                                        _phi_reparam[_start_rung_reparam.back() + inds[sc2]]->value_ptr(0, true),
-                                        _n_samp_train
-                                    )
-                                )
-                            );
-                            if(comp < 1e-10)
-                            {
-                                del_inds.push_back(-1 * (inds[sc] + _start_rung_reparam.back()));
-                                break;
-                            }
-                        }
-                    }
-                    else if(scores[inds[sc + 1]] - scores[inds[sc]] < 1e-10)
-                    {
-                        double base_val = std::abs(
-                            util_funcs::r(
-                                _phi_reparam[_start_rung_reparam.back() + inds[sc]]->value_ptr(),
-                                _phi_reparam[_start_rung_reparam.back() + inds[sc]]->value_ptr(),
-                                _n_samp_train
-                            )
-                        );
-                        double comp = std::abs(
-                            base_val - std::abs(
-                                util_funcs::r(
-                                    _phi_reparam[_start_rung_reparam.back() + inds[sc]]->value_ptr(),
-                                    _phi_reparam[_start_rung_reparam.back() + inds[sc + 1]]->value_ptr(0, true),
-                                    _n_samp_train
-                                )
-                            )
-                        );
-                        if(comp < 1e-10)
-                        {
-                            del_inds.push_back(-1 * (inds[sc] + _start_rung.back()));
-                        }
-                    }
-                }
-
-                inds = util_funcs::argsort<int>(del_inds);
-                for(int ii = 0; ii < inds.size(); ++ii)
-                {
-                    _phi_reparam.erase(_phi_reparam.begin() - del_inds[inds[ii]]);
-                }
-
-                // Reindex
-                for(int ff = _start_rung_reparam.back(); ff < _phi_reparam.size(); ++ff)
-                {
-                    _phi_reparam[ff]->reindex(ff + _n_feat);
-                }
-            }
-        }
-        else
-        {
-            std::vector<size_t> next_phi_sizes(_mpi_comm->size());
-            if(_mpi_comm->rank() == 0)
-            {
-                mpi::gather(*_mpi_comm, next_phi.size(), next_phi_sizes.data(), 0);
-                mpi::broadcast(*_mpi_comm, next_phi_sizes.data(), next_phi_sizes.size(), 0);
-            }
-            else
-            {
-                mpi::gather(*_mpi_comm, next_phi.size(), 0);
-                mpi::broadcast(*_mpi_comm, next_phi_sizes.data(), next_phi_sizes.size(), 0);
-            }
-
-            size_t n_feat = std::accumulate(next_phi_sizes.begin(), next_phi_sizes.end(), 0);
-            if(n_feat == 0)
-            {
-                throw std::logic_error("No features created during this rung (" + std::to_string(nn) + ")");
-            }
-
-            size_t n_feat_rank = n_feat / _mpi_comm->size();
-            size_t n_feat_below_rank = _mpi_comm->rank() * n_feat_rank;
-            size_t n_feat_added = 0;
-            if(_mpi_comm->rank() < n_feat % _mpi_comm->size())
-            {
-                ++n_feat_rank;
-                n_feat_below_rank += _mpi_comm->rank();
-            }
-            else
-            {
-                n_feat_below_rank += n_feat % _mpi_comm->size();
-            }
-
-            while((n_feat_added < n_feat_rank) && (next_phi.size() > 0))
-            {
-                _phi_reparam.push_back(next_phi.back());
-                next_phi.pop_back();
-                ++n_feat_added;
-            }
-
-            // This can be calculated without an all_gather, using it to not introduce too many things at one time
-            std::vector<size_t> next_phi_needed(_mpi_comm->size());
-            std::vector<size_t> next_phi_excess(_mpi_comm->size());
-            if(_mpi_comm->rank() == 0)
-            {
-                mpi::gather(*_mpi_comm, next_phi.size(), next_phi_excess.data(), 0);
-                mpi::gather(*_mpi_comm, n_feat_rank - n_feat_added, next_phi_needed.data(), 0);
-
-                mpi::broadcast(*_mpi_comm, next_phi_excess.data(), next_phi_excess.size(), 0);
-                mpi::broadcast(*_mpi_comm, next_phi_needed.data(), next_phi_needed.size(), 0);
-            }
-            else
-            {
-                mpi::gather(*_mpi_comm, next_phi.size(), 0);
-                mpi::gather(*_mpi_comm, n_feat_rank - n_feat_added, 0);
-
-                mpi::broadcast(*_mpi_comm, next_phi_excess.data(), next_phi_excess.size(), 0);
-                mpi::broadcast(*_mpi_comm, next_phi_needed.data(), next_phi_needed.size(), 0);
-            }
-
-            std::vector<size_t> send_sizes(next_phi_sizes.size(), 0);
-            std::vector<size_t> recv_sizes(next_phi_sizes.size(), 0);
-            // Is this rank sending or receiving?
-            if(next_phi_excess[_mpi_comm->rank()] > 0)
-            {
-                size_t total_sent = std::accumulate(next_phi_excess.begin(), next_phi_excess.begin() + _mpi_comm->rank(), 0);
-                size_t prev_sent_recv = 0;
-                size_t send_size = 0;
-                int ind = 0;
-                while((prev_sent_recv <= total_sent) && (ind < _mpi_comm->size()))
-                {
-                    prev_sent_recv += next_phi_needed[ind];
-                    ++ind;
-                }
-                send_size = std::min(next_phi.size(), prev_sent_recv - total_sent);
-                send_sizes[ind-1] = send_size;
-                total_sent = send_size;
-                while((total_sent < next_phi.size()) && (ind < _mpi_comm->size()))
-                {
-                    send_size = std::min(next_phi.size() - total_sent, next_phi_needed[ind]);
-                    send_sizes[ind] = send_size;
-                    total_sent += send_size;
-                    ++ind;
-                }
-
-                total_sent = 0;
-                for(int pp = 0; pp < send_sizes.size(); ++pp)
-                {
-                    if(send_sizes[pp] == 0)
-                        continue;
-
-                    std::vector<node_ptr> to_send(send_sizes[pp]);
-                    std::copy_n(next_phi.begin() + total_sent, send_sizes[pp], to_send.begin());
-                    _mpi_comm->send(pp, _mpi_comm->cantor_tag_gen(_mpi_comm->rank(), pp, 1, 0), to_send);
-                    total_sent += send_sizes[pp];
-                }
-            }
-            else
-            {
-                size_t total_recv = std::accumulate(next_phi_needed.begin(), next_phi_needed.begin() + _mpi_comm->rank(), 0);
-                size_t prev_recv_sent = 0;
-                size_t recv_size = 0;
-                int ind = 0;
-                while((prev_recv_sent <= total_recv) && (ind < _mpi_comm->size()))
-                {
-                    prev_recv_sent += next_phi_excess[ind];
-                    ++ind;
-                }
-                recv_size = std::min(n_feat_rank - n_feat_added, prev_recv_sent - total_recv);
-                recv_sizes[ind-1] = recv_size;
-                total_recv = recv_size;
-                while((total_recv < n_feat_rank - n_feat_added) && (ind < _mpi_comm->size()))
-                {
-                    recv_size = std::min(n_feat_rank - n_feat_added - total_recv, next_phi_excess[ind]);
-                    recv_sizes[ind] = recv_size;
-                    total_recv += recv_size;
-                    ++ind;
-                }
-
-                total_recv = 0;
-                for(int pp = 0; pp < recv_sizes.size(); ++pp)
-                {
-                    if(recv_sizes[pp] == 0)
-                    {
-                        continue;
-                    }
-
-                    std::vector<node_ptr> to_recv;
-                    _mpi_comm->recv(pp, _mpi_comm->cantor_tag_gen(pp, _mpi_comm->rank(), 1, 0), to_recv);
-                    for(auto& feat : to_recv)
-                    {
-                        _phi_reparam.push_back(feat);
-                    }
-                }
-            }
-
-            #pragma omp parallel for
-            for(int ff = _start_rung.back(); ff < _phi_reparam.size(); ++ff)
-            {
-                _phi_reparam[ff]->reindex(ff + n_feat_below_rank, ff);
-                _phi_reparam[ff]->set_value();
-                _phi_reparam[ff]->set_test_value();
-            }
-        }
-    }
-}
 #endif
 
 void FeatureSpace::generate_non_param_feats(
@@ -688,7 +347,12 @@ void FeatureSpace::generate_non_param_feats(
     }
 }
 
-void FeatureSpace::generate_feature_space()
+void FeatureSpace::generate_feature_space(
+    std::vector<node_ptr>& feat_set,
+    std::vector<int>& start_rung,
+    const std::vector<double>& prop,
+    bool reparam
+)
 {
     double u_bound = 1e50;
     double l_bound = 1e-50;
@@ -703,9 +367,16 @@ void FeatureSpace::generate_feature_space()
             l_bound = _l_bound;
         }
         std::vector<node_ptr> next_phi;
-        _n_feat = _phi.size();
-
-        unsigned long int feat_ind = _phi.back()->feat_ind();
+        unsigned long int feat_ind;
+        if(!reparam)
+        {
+            feat_ind = feat_set.back()->feat_ind() + 1;
+            _n_feat = feat_set.size();
+        }
+        else
+        {
+            feat_ind = _phi.size() + _phi_reparam.size();
+        }
 
         node_value_arrs::clear_temp_reg();
         double start = omp_get_wtime();
@@ -720,10 +391,26 @@ void FeatureSpace::generate_feature_space()
             #else
             #pragma omp for schedule(dynamic)
             #endif
-            for(auto feat_1 = _phi.begin() + _start_rung.back() + _mpi_comm->rank(); feat_1 < _phi.end(); feat_1 += _mpi_comm->size())
+            for(auto feat_1 = feat_set.begin() + start_rung.back() + _mpi_comm->rank(); feat_1 < feat_set.end(); feat_1 += _mpi_comm->size())
             {
-                generate_non_param_feats(feat_1, next_phi_private, _phi.begin(), feat_ind, l_bound, u_bound);
-                generate_param_feats(feat_1, next_phi_private, _phi.begin(), feat_ind, optimizer, l_bound, u_bound);
+                generate_non_param_feats(feat_1, next_phi_private, feat_set.begin(), feat_ind, l_bound, u_bound);
+                generate_param_feats(feat_1, next_phi_private, feat_set.begin(), feat_ind, optimizer, l_bound, u_bound);
+            }
+
+            if(reparam)
+            {
+                std::shared_ptr<NLOptimizer> optimizer_reparam = nlopt_wrapper::get_optimizer(
+                    _project_type, _task_sizes_train, prop, _max_rung, _max_param_depth
+                );
+                #ifdef OMP45
+                #pragma omp for schedule(monotonic: dynamic)
+                #else
+                #pragma omp for schedule(dynamic)
+                #endif
+                for(auto feat_1 = _phi.begin() + _start_rung[nn-1] + _mpi_comm->rank(); feat_1 < (_phi.begin() + _end_no_params[nn-1]); feat_1 += _mpi_comm->size())
+                {
+                    generate_reparam_feats(feat_1, next_phi_private, feat_ind, optimizer_reparam, l_bound, u_bound);
+                }
             }
 
             #pragma omp critical
@@ -738,43 +425,43 @@ void FeatureSpace::generate_feature_space()
             #else
             #pragma omp for schedule(dynamic)
             #endif
-            for(auto feat_1 = _phi.begin() + _start_rung.back() + _mpi_comm->rank(); feat_1 < _phi.end(); feat_1 += _mpi_comm->size())
+            for(auto feat_1 = feat_set.begin() + start_rung.back() + _mpi_comm->rank(); feat_1 < feat_set.end(); feat_1 += _mpi_comm->size())
             {
-                generate_non_param_feats(feat_1, next_phi_private, _phi.begin(), feat_ind, l_bound, u_bound);
+                generate_non_param_feats(feat_1, next_phi_private, feat_set.begin(), feat_ind, l_bound, u_bound);
             }
 
             #pragma omp critical
             next_phi.insert(next_phi.end(), next_phi_private.begin(), next_phi_private.end());
         }
         #endif
-        _start_rung.push_back(_phi.size());
+        start_rung.push_back(feat_set.size());
         node_value_arrs::clear_temp_reg();
         if((nn < _max_rung) || (nn <= _n_rung_store) || (_mpi_comm->size() == 1))
         {
             int new_phi_size;
-            int phi_size_start = _phi.size();
+            int phi_size_start = feat_set.size();
             if(_mpi_comm->rank() == 0)
             {
                 std::vector<std::vector<node_ptr>> next_phi_gathered;
                 mpi::gather(*_mpi_comm, next_phi, next_phi_gathered, 0);
-                feat_ind = _phi.size();
+                feat_ind = feat_set.size();
                 for(auto& next_phi_vec : next_phi_gathered)
                 {
-                    _phi.insert(_phi.end(), next_phi_vec.begin(), next_phi_vec.end());
+                    feat_set.insert(feat_set.end(), next_phi_vec.begin(), next_phi_vec.end());
                 }
-                new_phi_size = _phi.size();
+                new_phi_size = feat_set.size();
 
                 // Sort the features to ensure consistent feature spaces for all MPI/OpenMP configurations
                 std::sort(
-                    _phi.begin() + _start_rung.back(),
-                    _phi.end(),
+                    feat_set.begin() + start_rung.back(),
+                    feat_set.end(),
                     [feat_ind](node_ptr n1, node_ptr n2){return n1->sort_score(feat_ind) < n2->sort_score(feat_ind);}
                 );
 
                 // Reindex sorted features
                 std::for_each(
-                    _phi.begin() + _start_rung.back(),
-                    _phi.end(),
+                    feat_set.begin() + start_rung.back(),
+                    feat_set.end(),
                     [&feat_ind](node_ptr n){n->reindex(feat_ind); ++feat_ind;}
                 );
 
@@ -782,18 +469,18 @@ void FeatureSpace::generate_feature_space()
 
                 for(int bb = 0; bb <= (new_phi_size - phi_size_start) / 10000; ++bb)
                 {
-                    mpi::broadcast(*_mpi_comm, &_phi[phi_size_start + bb * 10000], std::min(10000, new_phi_size - phi_size_start - bb * 10000), 0);
+                    mpi::broadcast(*_mpi_comm, &feat_set[phi_size_start + bb * 10000], std::min(10000, new_phi_size - phi_size_start - bb * 10000), 0);
                 }
             }
             else
             {
                 mpi::gather(*_mpi_comm, next_phi, 0);
                 mpi::broadcast(*_mpi_comm, new_phi_size, 0);
-                _phi.resize(new_phi_size);
+                feat_set.resize(new_phi_size);
 
                 for(int bb = 0; bb <= (new_phi_size - phi_size_start) / 10000; ++bb)
                 {
-                    mpi::broadcast(*_mpi_comm, &_phi[phi_size_start + bb * 10000], std::min(10000, new_phi_size - phi_size_start - bb * 10000), 0);
+                    mpi::broadcast(*_mpi_comm, &feat_set[phi_size_start + bb * 10000], std::min(10000, new_phi_size - phi_size_start - bb * 10000), 0);
                 }
             }
 
@@ -806,10 +493,10 @@ void FeatureSpace::generate_feature_space()
             if(nn < _max_rung)
             {
                 // Remove identical features
-                _scores.resize(_phi.size());
+                _scores.resize(feat_set.size());
                 _mpi_comm->barrier();
-                project_funcs::project_r(_prop_train.data(), _scores.data(), _phi, _task_sizes_train, 1);
-                _scores.erase(_scores.begin(), _scores.begin() + _start_rung[_start_rung.size() - 1]);
+                project_funcs::project_r(_prop_train.data(), _scores.data(), feat_set, _task_sizes_train, 1);
+                _scores.erase(_scores.begin(), _scores.begin() + start_rung[start_rung.size() - 1]);
                 inds = util_funcs::argsort<double>(_scores);
 
                 std::vector<int> del_inds;
@@ -819,7 +506,7 @@ void FeatureSpace::generate_feature_space()
                 for(int sc = 0; sc < _scores.size() - 1; ++sc)
                 {
                     #ifdef PARAMETERIZE
-                    if(_phi[inds[sc] + _start_rung.back()]->n_params() > 0)
+                    if(feat_set[inds[sc] + start_rung.back()]->n_params() > 0)
                     {
                         continue;
                     }
@@ -829,8 +516,8 @@ void FeatureSpace::generate_feature_space()
                     {
                         double base_val = std::abs(
                             util_funcs::r(
-                                _phi[_start_rung.back() + inds[sc]]->value_ptr(),
-                                _phi[_start_rung.back() + inds[sc]]->value_ptr(),
+                                feat_set[start_rung.back() + inds[sc]]->value_ptr(),
+                                feat_set[start_rung.back() + inds[sc]]->value_ptr(),
                                 _n_samp_train
                             )
                         );
@@ -839,15 +526,15 @@ void FeatureSpace::generate_feature_space()
                             double comp = std::abs(
                                 base_val - std::abs(
                                     util_funcs::r(
-                                        _phi[_start_rung.back() + inds[sc]]->value_ptr(),
-                                        _phi[_start_rung.back() + inds[sc2]]->value_ptr(0, true),
+                                        feat_set[start_rung.back() + inds[sc]]->value_ptr(),
+                                        feat_set[start_rung.back() + inds[sc2]]->value_ptr(0, true),
                                         _n_samp_train
                                     )
                                 )
                             );
                             if(comp < 1e-10)
                             {
-                                del_inds.push_back(-1 * (inds[sc] + _start_rung.back()));
+                                del_inds.push_back(-1 * (inds[sc] + start_rung.back()));
                                 break;
                             }
                         }
@@ -856,23 +543,23 @@ void FeatureSpace::generate_feature_space()
                     {
                         double base_val = std::abs(
                             util_funcs::r(
-                                _phi[_start_rung.back() + inds[sc]]->value_ptr(),
-                                _phi[_start_rung.back() + inds[sc]]->value_ptr(),
+                                feat_set[start_rung.back() + inds[sc]]->value_ptr(),
+                                feat_set[start_rung.back() + inds[sc]]->value_ptr(),
                                 _n_samp_train
                             )
                         );
                         double comp = std::abs(
                             base_val - std::abs(
                                 util_funcs::r(
-                                    _phi[_start_rung.back() + inds[sc]]->value_ptr(),
-                                    _phi[_start_rung.back() + inds[sc + 1]]->value_ptr(0, true),
+                                    feat_set[start_rung.back() + inds[sc]]->value_ptr(),
+                                    feat_set[start_rung.back() + inds[sc + 1]]->value_ptr(0, true),
                                     _n_samp_train
                                 )
                             )
                         );
                         if(comp < 1e-10)
                         {
-                            del_inds.push_back(-1 * (inds[sc] + _start_rung.back()));
+                            del_inds.push_back(-1 * (inds[sc] + start_rung.back()));
                         }
                     }
                 }
@@ -880,29 +567,31 @@ void FeatureSpace::generate_feature_space()
                 inds = util_funcs::argsort<int>(del_inds);
                 for(int ii = 0; ii < inds.size(); ++ii)
                 {
-                    _phi.erase(_phi.begin() - del_inds[inds[ii]]);
+                    feat_set.erase(feat_set.begin() - del_inds[inds[ii]]);
                 }
 
                 // Reindex
-                for(int ff = _start_rung.back(); ff < _phi.size(); ++ff)
+                for(int ff = start_rung.back(); ff < feat_set.size(); ++ff)
                 {
-                    _phi[ff]->reindex(ff);
+                    feat_set[ff]->reindex(ff);
                 }
             }
             node_value_arrs::clear_temp_reg();
-            for(int ff = _start_rung.back(); ff < _phi.size(); ++ff)
+            if(!reparam)
             {
-                _phi[ff]->reset_feats(_phi);
-            }
-
-            if(nn <= _n_rung_store)
-            {
-                node_value_arrs::resize_values_arr(nn, _phi.size());
-
-                for(int ff = _start_rung.back(); ff < _phi.size(); ++ff)
+                for(int ff = start_rung.back(); ff < feat_set.size(); ++ff)
                 {
-                    _phi[ff]->set_value();
-                    _phi[ff]->set_test_value();
+                    feat_set[ff]->reset_feats(feat_set);
+                }
+                if(nn <= _n_rung_store)
+                {
+                    node_value_arrs::resize_values_arr(nn, feat_set.size());
+
+                    for(int ff = start_rung.back(); ff < feat_set.size(); ++ff)
+                    {
+                        feat_set[ff]->set_value();
+                        feat_set[ff]->set_test_value();
+                    }
                 }
             }
         }
@@ -941,7 +630,7 @@ void FeatureSpace::generate_feature_space()
 
             while((n_feat_added < n_feat_rank) && (next_phi.size() > 0))
             {
-                _phi.push_back(next_phi.back());
+                feat_set.push_back(next_phi.back());
                 next_phi.pop_back();
                 ++n_feat_added;
             }
@@ -1037,56 +726,60 @@ void FeatureSpace::generate_feature_space()
                     _mpi_comm->recv(pp, _mpi_comm->cantor_tag_gen(pp, _mpi_comm->rank(), 1, 0), to_recv);
                     for(auto& feat : to_recv)
                     {
-                        _phi.push_back(feat);
+                        feat_set.push_back(feat);
                     }
                 }
             }
 
             #pragma omp parallel for
-            for(int ff = _start_rung.back(); ff < _phi.size(); ++ff)
+            for(int ff = start_rung.back(); ff < feat_set.size(); ++ff)
             {
-                _phi[ff]->reindex(ff + n_feat_below_rank, ff);
-                _phi[ff]->set_value();
-                _phi[ff]->set_test_value();
+                feat_set[ff]->reindex(ff + n_feat_below_rank, ff);
+                feat_set[ff]->set_value();
+                feat_set[ff]->set_test_value();
             }
         }
 
         #ifdef PARAMETERIZE
-        // Reorder features based on the number of parameters they have (none goes first)
-        std::vector<int> feat_n_params(_phi.size() - _start_rung.back());
-        std::transform(
-            _phi.begin() + _start_rung.back(),
-            _phi.end(),
-            feat_n_params.begin(),
-            [](node_ptr feat){return feat->n_params();}
-        );
-        inds = util_funcs::argsort<int>(feat_n_params);
-        next_phi.resize(feat_n_params.size());
-        std::copy_n(_phi.begin() + _start_rung.back(), feat_n_params.size(), next_phi.begin());
-        std::transform(
-            inds.begin(),
-            inds.end(),
-            _phi.begin() + _start_rung.back(),
-            [&next_phi](int ind){return next_phi[ind];}
-        );
-        for(int ff = _start_rung.back(); ff < _phi.size(); ++ff)
+        if(!reparam)
         {
-            _phi[ff]->reindex(ff);
-            _phi[ff]->set_value();
-        }
-
-        // Set how many features have no parameters
-        _end_no_params.push_back(
-            _start_rung.back() +
-            std::count_if(feat_n_params.begin(), feat_n_params.end(), [](int n_param){return n_param == 0;})
-        );
+            // Reorder features based on the number of parameters they have (none goes first)
+            std::vector<int> feat_n_params(feat_set.size() - start_rung.back());
+            std::transform(
+                feat_set.begin() + start_rung.back(),
+                feat_set.end(),
+                feat_n_params.begin(),
+                [](node_ptr feat){return feat->n_params();}
+            );
+            inds = util_funcs::argsort<int>(feat_n_params);
+            next_phi.resize(feat_n_params.size());
+            std::copy_n(feat_set.begin() + start_rung.back(), feat_n_params.size(), next_phi.begin());
+            std::transform(
+                inds.begin(),
+                inds.end(),
+                feat_set.begin() + start_rung.back(),
+                [&next_phi](int ind){return next_phi[ind];}
+            );
+            for(int ff = start_rung.back(); ff < feat_set.size(); ++ff)
+            {
+                feat_set[ff]->reindex(ff);
+                feat_set[ff]->set_value();
+            }
 
+            // Set how many features have no parameters
+            _end_no_params.push_back(
+                start_rung.back() +
+                std::count_if(feat_n_params.begin(), feat_n_params.end(), [](int n_param){return n_param == 0;})
+            );
+        }
         #endif
     }
-    _n_feat = _phi.size();
+    if(!reparam)
+    {
+        _n_feat = feat_set.size();
+    }
 }
 
-// void FeatureSpace::generate_and_project(const double* prop, const int size, std::vector<node_ptr>& phi_sel, std::vector<double>& scores_sel)
 void FeatureSpace::generate_and_project(std::shared_ptr<LossFunction> loss, std::vector<node_ptr>& phi_sel, std::vector<double>& scores_sel)
 {
     std::vector<double> scores_sel_all(node_value_arrs::N_SELECTED);
@@ -1245,7 +938,6 @@ void FeatureSpace::generate_and_project(std::shared_ptr<LossFunction> loss, std:
 
 void FeatureSpace::sis(const std::vector<double>& prop)
 {
-    std::cout << _project_type << std::endl;
     sis(
         loss_function_util::get_loss_function(
             _project_type,
@@ -1268,7 +960,12 @@ void FeatureSpace::sis(std::shared_ptr<LossFunction> loss)
         _phi.resize(_n_feat);
         _phi_reparam.resize(0);
         _start_rung_reparam.resize(0);
-        generate_reparam_feature_set(loss->prop_project());
+        generate_feature_space(
+            _phi_reparam,
+            _start_rung_reparam,
+            loss->prop_project(),
+            true
+        );
         _phi.insert(_phi.end(), _phi_reparam.begin(), _phi_reparam.end());
         _scores.resize(_phi.size());
     }
diff --git a/src/feature_creation/feature_space/FeatureSpace.hpp b/src/feature_creation/feature_space/FeatureSpace.hpp
index 6805a49bff0371073fafc4fc3d34281c1d9d6f83..e99e136c18adf7254786a058391d19a61bcad728 100644
--- a/src/feature_creation/feature_space/FeatureSpace.hpp
+++ b/src/feature_creation/feature_space/FeatureSpace.hpp
@@ -121,7 +121,12 @@ public:
     /**
      * @brief Populate _phi using _phi_0 and the allowed operators up to (_max_rung - _n_rung_generate)^th rung
      */
-    void generate_feature_space();
+    void generate_feature_space(
+        std::vector<node_ptr>& feat_set,
+        std::vector<int>& start_rung,
+        const std::vector<double>& prop,
+        bool reparam = false
+    );
 
     /**
      * @brief A vector containing all of the selected features
@@ -274,13 +279,6 @@ public:
         const double l_bound=1e-50,
         const double u_bound=1e50
     );
-
-    /**
-     * @brief Generate reparameterized feature set
-     *
-     * @param prop The property to optimize against
-     */
-    void generate_reparam_feature_set(const std::vector<double>& prop);
 #endif
 
     /**
diff --git a/src/feature_creation/node/FeatureNode.cpp b/src/feature_creation/node/FeatureNode.cpp
index 72b62298d60d2aaf1c8e5f1f71f9cbef5d7378b4..e2ffed499c1fe4cdf1b6c34e6a1d22d274bbc4da 100644
--- a/src/feature_creation/node/FeatureNode.cpp
+++ b/src/feature_creation/node/FeatureNode.cpp
@@ -108,22 +108,15 @@ void FeatureNode::update_div_mult_leaves(std::map<std::string, double>& div_mult
     expected_abs_tot += std::abs(fact);
 }
 
-std::map<int, int> FeatureNode::primary_feature_decomp() const
+void FeatureNode::update_primary_feature_decomp(std::map<std::string, int>& pf_decomp) const
 {
-    std::map<int, int> pf_decomp;
-    pf_decomp[_arr_ind] = 1;
-    return pf_decomp;
-}
-
-void FeatureNode::update_primary_feature_decomp(std::map<int, int>& pf_decomp) const
-{
-    if(pf_decomp.count(_arr_ind) > 0)
+    if(pf_decomp.count(_expr) > 0)
     {
-        pf_decomp[_arr_ind] += 1;
+        pf_decomp[_expr] += 1;
     }
     else
     {
-        pf_decomp[_arr_ind] = 1;
+        pf_decomp[_expr] = 1;
     }
 }
 
diff --git a/src/feature_creation/node/FeatureNode.hpp b/src/feature_creation/node/FeatureNode.hpp
index ea9a015fc5a1f877698037aef16a94a6e22a8d27..88e1cb6a935a76186a650bcd173fb2cb1d518263 100644
--- a/src/feature_creation/node/FeatureNode.hpp
+++ b/src/feature_creation/node/FeatureNode.hpp
@@ -170,7 +170,7 @@ public:
     /**
      * @brief Get the number of primary features (non-unique) this feature contains (The number of leaves of the Binary Expression Tree)
      */
-    virtual inline int n_leaves(int cur_n_leaves = 0) const {return cur_n_leaves + 1;}
+    inline int n_leaves(int cur_n_leaves) const {return cur_n_leaves + 1;}
 
     /**
      * @brief Get the score used to sort the features in the feature space (Based on the type of node and _feat_ind)
@@ -288,19 +288,12 @@ public:
      */
     inline int rung(int cur_rung = 0) const {return cur_rung;}
 
-    /**
-     * @brief Get the primary feature decomposition of a feature
-     *
-     * @return A map representing the primary feature comprising a feature
-     */
-    std::map<int, int> primary_feature_decomp() const;
-
     /**
      * @brief Update the primary feature decomposition of a feature
      *
      * @param pf_decomp The primary feature decomposition of the feature calling this function.
      */
-    void update_primary_feature_decomp(std::map<int, int>& pf_decomp) const;
+    virtual void update_primary_feature_decomp(std::map<std::string, int>& pf_decomp) const;
 
     /**
      * @brief Converts a feature into a postfix expression (reverse polish notation)
@@ -347,9 +340,9 @@ public:
      */
     inline node_ptr feat(const int ind) const
     {
-        if(ind > 0)
+        if(ind > -1)
         {
-            throw std::logic_error("Index not found in _feats");
+            throw std::logic_error("Can't get a child node from a FeatureNode.");
         }
         return nullptr;
     }
diff --git a/src/feature_creation/node/ModelNode.cpp b/src/feature_creation/node/ModelNode.cpp
index 0bd2f9defa16124f8063dd9b457e5bfbf4081402..d0c3ad66472ad8ef98b0d151d82cd0fedb83bb82 100644
--- a/src/feature_creation/node/ModelNode.cpp
+++ b/src/feature_creation/node/ModelNode.cpp
@@ -1010,79 +1010,22 @@ ModelNode::~ModelNode()
 {}
 
 void ModelNode::update_add_sub_leaves(std::map<std::string, int>& add_sub_leaves, const int pl_mn, int& expected_abs_tot) const
-{
-    if(add_sub_leaves.count(_expr) > 0)
-    {
-        add_sub_leaves[_expr] += pl_mn;
-    }
-    else
-    {
-        add_sub_leaves[_expr] = pl_mn;
-    }
-
-    ++expected_abs_tot;
-}
+{}
 
 void ModelNode::update_div_mult_leaves(std::map<std::string, double>& div_mult_leaves, const double fact, double& expected_abs_tot) const
-{
-    if(div_mult_leaves.count(_expr) > 0)
-    {
-        div_mult_leaves[_expr] += fact;
-    }
-    else
-    {
-        div_mult_leaves[_expr] = fact;
-    }
-
-    expected_abs_tot += std::abs(fact);
-}
-
-std::map<int, int> ModelNode::primary_feature_decomp() const
-{
-    std::map<int, int> pf_decomp;
-    std::vector<std::string> split_postfix = str_utils::split_string_trim(_postfix_expr, "|");
-    for(auto& part : split_postfix)
-    {
-        try
-        {
-            if(pf_decomp.count(std::stoi(part)))
-            {
-                ++pf_decomp[std::stoi(part)];
-            }
-            else
-            {
-                pf_decomp[std::stoi(part)] = 1;
-            }
-        }
-        catch(const std::invalid_argument e)
-        {
-            // Do Nothing
-        }
-    }
-
-    return pf_decomp;
-}
+{}
 
-void ModelNode::update_primary_feature_decomp(std::map<int, int>& pf_decomp) const
+void ModelNode::update_primary_feature_decomp(std::map<std::string, int>& pf_decomp) const
 {
-    pf_decomp.clear();
-    std::vector<std::string> split_postfix = str_utils::split_string_trim(_postfix_expr, "|");
-    for(auto& part : split_postfix)
+    for(auto& leaf : _x_in_expr_list)
     {
-        try
+        if(pf_decomp.count(leaf))
         {
-            if(pf_decomp.count(std::stoi(part)))
-            {
-                ++pf_decomp[std::stoi(part)];
-            }
-            else
-            {
-                pf_decomp[std::stoi(part)] = 1;
-            }
+            ++pf_decomp[leaf];
         }
-        catch(const std::invalid_argument e)
+        else
         {
-            // Do Nothing
+            pf_decomp[leaf] = 1;
         }
     }
 }
diff --git a/src/feature_creation/node/ModelNode.hpp b/src/feature_creation/node/ModelNode.hpp
index a5c03120f09878d8f9d3f6c78095e56181b18d64..ec307c8d694b6417676775b0c7839b16252e70c4 100644
--- a/src/feature_creation/node/ModelNode.hpp
+++ b/src/feature_creation/node/ModelNode.hpp
@@ -76,6 +76,7 @@ protected:
     int _rung; //!< The rung of the feature (Height of the binary expression tree - 1)
     int _n_leaves; //!< The number of primary features (non-unique) this feature contains (The number of leaves of the Binary Expression Tree)
 public:
+    using Node::n_leaves;
     /**
      * @brief Base Constructor
      * @details This is only used for serialization
@@ -219,7 +220,7 @@ public:
      * @brief The number of primary features (non-unique) this feature contains (The number of leaves of the Binary Expression Tree)
      *
      */
-    inline int n_leaves(int n_cur_leaves = 0) const {return _n_leaves;}
+    inline int n_leaves(int n_cur_leaves) const {return _n_leaves;}
 
     /**
      * @return Value of the feature converted to a range of -1.0 to 1.0
@@ -341,14 +342,7 @@ public:
      *
      * @param pf_decomp The primary feature decomposition of the feature calling this function.
      */
-    void update_primary_feature_decomp(std::map<int, int>& pf_decomp) const;
-
-    /**
-     * @brief Get the primary feature decomposition of a feature
-     *
-     * @return A map representing the primary feature comprising a feature
-     */
-    std::map<int, int> primary_feature_decomp() const;
+    void update_primary_feature_decomp(std::map<std::string, int>& pf_decomp) const;
 
     /**
      * @brief Get the term used in the postfix expression for this Node
diff --git a/src/feature_creation/node/Node.cpp b/src/feature_creation/node/Node.cpp
index 7b4749e214019a9002b3966fe6de7de49622b608..92c85b669238b946de9ed8ad172cb2e93fb66da1 100644
--- a/src/feature_creation/node/Node.cpp
+++ b/src/feature_creation/node/Node.cpp
@@ -40,5 +40,11 @@ Node::Node(const unsigned long int feat_ind, const int n_samp, const int n_samp_
 Node::~Node()
 {}
 
+std::map<std::string, int> Node::primary_feature_decomp() const
+{
+    std::map<std::string, int> pf_decomp;
+    update_primary_feature_decomp(pf_decomp);
+    return pf_decomp;
+}
 BOOST_SERIALIZATION_ASSUME_ABSTRACT(Node)
 
diff --git a/src/feature_creation/node/Node.hpp b/src/feature_creation/node/Node.hpp
index ee4a43706a977e69e8408c7b4c5f7182fb22544c..8b65134b8bee1362820ba24db3e3cae54f3f614e 100644
--- a/src/feature_creation/node/Node.hpp
+++ b/src/feature_creation/node/Node.hpp
@@ -144,14 +144,23 @@ public:
      */
     virtual std::vector<std::string> get_x_in_expr_list() const = 0;
 
-    // DocString: node_n_leaves
     /**
      * @brief The number of primary features (non-unique) this feature contains (The number of leaves of the Binary Expression Tree)
      *
      * @param cur_n_leaves (int) A recurisve counting variable
      * @return The number of leaves of the Binary Expression Tree
      */
-    virtual int n_leaves(const int cur_n_leaves = 0) const = 0;
+    virtual int n_leaves(const int cur_n_leaves) const = 0;
+
+    // DocString: node_n_leaves
+    /**
+     * @brief The number of primary features (non-unique) this feature contains (The number of leaves of the Binary Expression Tree)
+     *
+     */
+    inline int n_leaves() const
+    {
+        return n_leaves(0);
+    }
 
     // DocString: node_reindex_1
     /**
@@ -333,14 +342,14 @@ public:
      *
      * @return A map representing the primary feature comprising a feature
      */
-    virtual std::map<int, int> primary_feature_decomp() const = 0;
+    std::map<std::string, int> primary_feature_decomp() const;
 
     /**
      * @brief Update the primary feature decomposition of a feature
      *
      * @param pf_decomp The primary feature decomposition of the feature calling this function.
      */
-    virtual void update_primary_feature_decomp(std::map<int, int>& pf_decomp) const = 0;
+    virtual void update_primary_feature_decomp(std::map<std::string, int>& pf_decomp) const = 0;
 
     /**
      * @brief Converts a feature into a postfix expression
@@ -396,6 +405,15 @@ public:
      */
     virtual std::vector<double> parameters() const = 0;
 
+    /**
+     * @brief The pointer to the head of the parameters used for including individual scale and bias terms to each operator in the Node
+     */
+    virtual inline const double* param_pointer() const
+    {
+        throw std::logic_error("Trying to access the parameter pointer to a node with no parameters.");
+        return nullptr;
+    }
+
     /**
      * @brief Set the non-linear parameters
      * @param params The new scale and bias terms of this node
@@ -591,7 +609,7 @@ public:
     /**
      * @brief The decomposition of the primary features and how often they appear in the feature
      */
-    inline py::dict primary_feature_decomp_py(){return python_conv_utils::to_dict<int, int>(primary_feature_decomp());}
+    inline py::dict primary_feature_decomp_py(){return python_conv_utils::to_dict<std::string, int>(primary_feature_decomp());}
 
     #endif
 };
diff --git a/src/feature_creation/node/operator_nodes/OperatorNode.hpp b/src/feature_creation/node/operator_nodes/OperatorNode.hpp
index d54821c486f872cf6317b8d21a44716e0e718fcb..9cbb8243d6597273b5becc27aca55ff21028d78b 100644
--- a/src/feature_creation/node/operator_nodes/OperatorNode.hpp
+++ b/src/feature_creation/node/operator_nodes/OperatorNode.hpp
@@ -71,6 +71,7 @@ protected:
     std::array<node_ptr, N> _feats; //!< The features for the operator nodes to act on (This Node's children)
 
 public:
+    using Node::n_leaves;
     /**
      * @brief Base Constructor
      * @details This is only used for serialization
@@ -95,27 +96,27 @@ public:
      *
      * @param o OperatorNode to be copied
      */
-    OperatorNode(const OperatorNode&) = default;
+    // OperatorNode(const OperatorNode&) = default;
 
     /**
      * @brief Move Constructor
      *
      * @param o OperatorNode to be moved
      */
-    OperatorNode(OperatorNode&&) = default;
+    // OperatorNode(OperatorNode&&) = default;
 
     /**
      * @brief Copy Assignment operator
      *
      * @param o OperatorNode to be copied
      */
-    OperatorNode& operator=(const OperatorNode&) = default;
+    // OperatorNode& operator=(const OperatorNode&) = default;
     /**
      * @brief Move Assignment operator
      *
      * @param o OperatorNode to be moved
      */
-    OperatorNode& operator=(OperatorNode&&) = default;
+    // OperatorNode& operator=(OperatorNode&&) = default;
 
     /**
      * @brief Destructor
@@ -156,12 +157,12 @@ public:
         return x_in_expr;
     }
 
-    // DocString: op_node_n_leaves
     /**
      * @brief The number of primary features (non-unique) this feature contains (The number of leaves of the Binary Expression Tree)
      *
+     * @param cur_n_leaves The current count of the number of leaves
      */
-    int n_leaves(int cur_n_leaves = 0) const
+    int n_leaves(int cur_n_leaves) const
     {
         return std::accumulate(_feats.begin(), _feats.end(), cur_n_leaves, [](int tot, node_ptr feat){return tot + feat->n_leaves();});
     }
@@ -337,24 +338,12 @@ public:
      */
     virtual NODE_TYPE type() const = 0;
 
-     /**
-     * @brief Get the primary feature decomposition of a feature
-     *
-     * @return A map representing the primary feature comprising a feature
-     */
-    std::map<int, int> primary_feature_decomp() const
-    {
-        std::map<int, int> pf_decomp;
-        update_primary_feature_decomp(pf_decomp);
-        return pf_decomp;
-    }
-
     /**
      * @brief Update the primary feature decomposition of a feature
      *
      * @param pf_decomp The primary feature decomposition of the feature calling this function.
      */
-    void update_primary_feature_decomp(std::map<int, int>& pf_decomp) const
+    void update_primary_feature_decomp(std::map<std::string, int>& pf_decomp) const
     {
         for(auto& feat : _feats)
         {
@@ -410,7 +399,7 @@ public:
      */
     inline node_ptr feat(const int ind) const
     {
-        if(ind > N)
+        if(ind >= N)
         {
             throw std::logic_error("Index not found in _feats");
         }
@@ -603,7 +592,8 @@ public:
         {
             throw std::logic_error("Asking for the gradient of non-parameterized feature");
         }
-        gradient(grad, dfdp, parameters().data());
+
+        gradient(grad, dfdp, param_pointer());
     }
 
     /**
diff --git a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/abs/absolute_value.hpp b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/abs/absolute_value.hpp
index e75ab4318d2430c163fec5f6567bf8e9f0234f87..e61c18863340a53a98e22694531f5227b623a822 100644
--- a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/abs/absolute_value.hpp
+++ b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/abs/absolute_value.hpp
@@ -47,6 +47,7 @@ class AbsNode: public OperatorNode<1>
         ar & boost::serialization::base_object<OperatorNode>(*this);
     }
 public:
+
     /**
      * @brief Base Constructor
      * @details This is only used for serialization
diff --git a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/abs/parameterized_absolute_value.hpp b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/abs/parameterized_absolute_value.hpp
index 7bec971860b086e248e2ee41d6d5a78648667098..3caa351132be7f0dcabcdac5e30178b00dc1de21 100644
--- a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/abs/parameterized_absolute_value.hpp
+++ b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/abs/parameterized_absolute_value.hpp
@@ -60,6 +60,7 @@ protected:
     std::vector<double> _params; //!< The vector containing the scale and bias terms for the Node
 
 public:
+
     /**
      * @brief Base Constructor
      * @details This is only used for serialization
@@ -179,6 +180,11 @@ public:
      */
     inline std::vector<double> parameters() const {return _params;}
 
+    /**
+     * @brief The pointer to the head of the parameters used for including individual scale and bias terms to each operator in the Node
+     */
+    virtual const double* param_pointer() const {return _params.data();}
+
     /**
      * @brief Optimize the scale and bias terms for each operation in the Node.
      * @details Use optimizer to find the scale and bias terms that minimizes the associated loss function
diff --git a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/abs_diff/absolute_difference.hpp b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/abs_diff/absolute_difference.hpp
index bc951454460bc34f6fc26c0389ac77e8de834316..b75a2c43d9c381c572c30ebfe5d2d54a0e100059 100644
--- a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/abs_diff/absolute_difference.hpp
+++ b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/abs_diff/absolute_difference.hpp
@@ -48,6 +48,7 @@ class AbsDiffNode: public OperatorNode<2>
     }
 
 public:
+
     /**
      * @brief Base Constructor
      * @details This is only used for serialization
diff --git a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/abs_diff/parameterized_absolute_difference.hpp b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/abs_diff/parameterized_absolute_difference.hpp
index ba2ce9e615bf81cb05a261aff87440bb841d62f4..41ace0a0d4b7983934a3e31626358e1fbebb5da9 100644
--- a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/abs_diff/parameterized_absolute_difference.hpp
+++ b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/abs_diff/parameterized_absolute_difference.hpp
@@ -58,6 +58,7 @@ protected:
 
     std::vector<double> _params;
 public:
+
     /**
      * @brief Base Constructor
      * @details This is only used for serialization
@@ -181,6 +182,11 @@ public:
      */
     inline std::vector<double> parameters() const {return _params;}
 
+    /**
+     * @brief The pointer to the head of the parameters used for including individual scale and bias terms to each operator in the Node
+     */
+    virtual const double* param_pointer() const {return _params.data();}
+
     /**
      * @brief Optimize the scale and bias terms for each operation in the Node.
      * @details Use optimizer to find the scale and bias terms that minimizes the associated loss function
diff --git a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/add/add.hpp b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/add/add.hpp
index 1d2ad7922bf20d4c102329e32396018bf1960347..6d200d004b56607d4194155e4308d8cc328b2b6c 100644
--- a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/add/add.hpp
+++ b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/add/add.hpp
@@ -47,6 +47,7 @@ class AddNode: public OperatorNode<2>
         ar & boost::serialization::base_object<OperatorNode>(*this);
     }
 public:
+
     /**
      * @brief Base Constructor
      * @details This is only used for serialization
diff --git a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/add/parameterized_add.hpp b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/add/parameterized_add.hpp
index c0f620851a18bd901b045301c0eb3f5c65449471..fb977897b3e85d8e18f54060e34117135b93b894 100644
--- a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/add/parameterized_add.hpp
+++ b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/add/parameterized_add.hpp
@@ -60,6 +60,7 @@ protected:
     std::vector<double> _params;
 
 public:
+
     /**
      * @brief Base Constructor
      * @details This is only used for serialization
@@ -182,6 +183,11 @@ public:
      */
     inline std::vector<double> parameters() const {return _params;}
 
+    /**
+     * @brief The pointer to the head of the parameters used for including individual scale and bias terms to each operator in the Node
+     */
+    virtual const double* param_pointer() const {return _params.data();}
+
     /**
      * @brief Optimize the scale and bias terms for each operation in the Node.
      * @details Use optimizer to find the scale and bias terms that minimizes the associated loss function
diff --git a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/cb/cube.hpp b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/cb/cube.hpp
index 651a8a280728ebc68c66d8792afdb405b0f5d378..72d1a34e1f675085baca8f75a16c025d1d94a06b 100644
--- a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/cb/cube.hpp
+++ b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/cb/cube.hpp
@@ -47,6 +47,7 @@ class CbNode: public OperatorNode<1>
         ar & boost::serialization::base_object<OperatorNode>(*this);
     }
 public:
+
     /**
      * @brief Base Constructor
      * @details This is only used for serialization
diff --git a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/cb/parameterized_cube.hpp b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/cb/parameterized_cube.hpp
index 498207f44e79a9f9bb9a437f7ccf7d696bffccc1..73742779a4ae8411e6755d2741b07ae6fe2a17dd 100644
--- a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/cb/parameterized_cube.hpp
+++ b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/cb/parameterized_cube.hpp
@@ -60,6 +60,7 @@ protected:
     std::vector<double> _params; //!< The vector containing the scale and bias terms for the Node
 
 public:
+
     /**
      * @brief Base Constructor
      * @details This is only used for serialization
@@ -179,6 +180,11 @@ public:
      */
     inline std::vector<double> parameters() const {return _params;}
 
+    /**
+     * @brief The pointer to the head of the parameters used for including individual scale and bias terms to each operator in the Node
+     */
+    virtual const double* param_pointer() const {return _params.data();}
+
     /**
      * @brief Optimize the scale and bias terms for each operation in the Node.
      * @details Use optimizer to find the scale and bias terms that minimizes the associated loss function
diff --git a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/cbrt/cube_root.hpp b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/cbrt/cube_root.hpp
index 48d6d1eb4e89e819144ce1fcc0646c2e8151dca8..86f6ef3087be59b925dfcf367e5df5b8be7c9599 100644
--- a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/cbrt/cube_root.hpp
+++ b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/cbrt/cube_root.hpp
@@ -47,6 +47,7 @@ class CbrtNode: public OperatorNode<1>
         ar & boost::serialization::base_object<OperatorNode>(*this);
     }
 public:
+
     /**
      * @brief Base Constructor
      * @details This is only used for serialization
diff --git a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/cbrt/parameterized_cube_root.hpp b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/cbrt/parameterized_cube_root.hpp
index 8430d38962385e6b6b502f4d3cacb9d773a063f7..708310c87304666a0a473d1f55aa7871630b950e 100644
--- a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/cbrt/parameterized_cube_root.hpp
+++ b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/cbrt/parameterized_cube_root.hpp
@@ -59,6 +59,7 @@ protected:
 
     std::vector<double> _params; //!< The vector containing the scale and bias terms for the Node
 public:
+
     /**
      * @brief Base Constructor
      * @details This is only used for serialization
@@ -178,6 +179,11 @@ public:
      */
     inline std::vector<double> parameters() const {return _params;}
 
+    /**
+     * @brief The pointer to the head of the parameters used for including individual scale and bias terms to each operator in the Node
+     */
+    virtual const double* param_pointer() const {return _params.data();}
+
     /**
      * @brief Optimize the scale and bias terms for each operation in the Node.
      * @details Use optimizer to find the scale and bias terms that minimizes the associated loss function
diff --git a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/cos/cos.hpp b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/cos/cos.hpp
index 954248f0f6a1f17a671d558bfc92fdae1042b409..84566154e03b4c5d049abdbceb1ccafc3fffffb8 100644
--- a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/cos/cos.hpp
+++ b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/cos/cos.hpp
@@ -47,6 +47,7 @@ class CosNode: public OperatorNode<1>
         ar & boost::serialization::base_object<OperatorNode>(*this);
     }
 public:
+
     /**
      * @brief Base Constructor
      * @details This is only used for serialization
diff --git a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/cos/parameterized_cos.hpp b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/cos/parameterized_cos.hpp
index 90bc33fcbb23ab73baccf92701ad163218a15adc..d0ac78d491d8d7633fecf38a49be5c382a2c4317 100644
--- a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/cos/parameterized_cos.hpp
+++ b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/cos/parameterized_cos.hpp
@@ -60,6 +60,7 @@ protected:
     std::vector<double> _params; //!< The vector containing the scale and bias terms for the Node
 
 public:
+
     /**
      * @brief Base Constructor
      * @details This is only used for serialization
@@ -179,6 +180,11 @@ public:
      */
     inline std::vector<double> parameters() const {return _params;}
 
+    /**
+     * @brief The pointer to the head of the parameters used for including individual scale and bias terms to each operator in the Node
+     */
+    virtual const double* param_pointer() const {return _params.data();}
+
     /**
      * @brief Optimize the scale and bias terms for each operation in the Node.
      * @details Use optimizer to find the scale and bias terms that minimizes the associated loss function
diff --git a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/div/divide.hpp b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/div/divide.hpp
index 19c336770310ffefe95ce3753bbff1fba8f12d8f..e8684dee1ffbedc4712ffe4f870076e35d81e383 100644
--- a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/div/divide.hpp
+++ b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/div/divide.hpp
@@ -47,6 +47,7 @@ class DivNode: public OperatorNode<2>
         ar & boost::serialization::base_object<OperatorNode>(*this);
     }
 public:
+
     /**
      * @brief Base Constructor
      * @details This is only used for serialization
diff --git a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/div/parameterized_divide.hpp b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/div/parameterized_divide.hpp
index e58fe78a2b50df569abb89d9f18e69d900a4b719..52302872e46916d09a941d58a31a1c4e5235eaa2 100644
--- a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/div/parameterized_divide.hpp
+++ b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/div/parameterized_divide.hpp
@@ -60,6 +60,7 @@ protected:
     std::vector<double> _params;
 
 public:
+
     /**
      * @brief Base Constructor
      * @details This is only used for serialization
@@ -182,6 +183,11 @@ public:
      */
     inline std::vector<double> parameters() const {return _params;}
 
+    /**
+     * @brief The pointer to the head of the parameters used for including individual scale and bias terms to each operator in the Node
+     */
+    virtual const double* param_pointer() const {return _params.data();}
+
     /**
      * @brief Optimize the scale and bias terms for each operation in the Node.
      * @details Use optimizer to find the scale and bias terms that minimizes the associated loss function
diff --git a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/exp/exponential.hpp b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/exp/exponential.hpp
index ffe0eb25a75e77628ac77e1f6b4646f587e34d80..e3bee5399ae22da76685f5140967e19de2b0a2c5 100644
--- a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/exp/exponential.hpp
+++ b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/exp/exponential.hpp
@@ -47,6 +47,7 @@ class ExpNode: public OperatorNode<1>
         ar & boost::serialization::base_object<OperatorNode>(*this);
     }
 public:
+
     /**
      * @brief Base Constructor
      * @details This is only used for serialization
diff --git a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/exp/parameterized_exponential.hpp b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/exp/parameterized_exponential.hpp
index f20dfd883312b3f51a16fdaebd3774bc96b2cc79..38912cedf6842d9464178178538a9ef63065332c 100644
--- a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/exp/parameterized_exponential.hpp
+++ b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/exp/parameterized_exponential.hpp
@@ -60,6 +60,7 @@ protected:
     std::vector<double> _params; //!< The vector containing the scale and bias terms for the Node
 
 public:
+
     /**
      * @brief Base Constructor
      * @details This is only used for serialization
@@ -178,6 +179,11 @@ public:
      */
     inline std::vector<double> parameters() const {return _params;}
 
+    /**
+     * @brief The pointer to the head of the parameters used for including individual scale and bias terms to each operator in the Node
+     */
+    virtual const double* param_pointer() const {return _params.data();}
+
     /**
      * @brief Optimize the scale and bias terms for each operation in the Node.
      * @details Use optimizer to find the scale and bias terms that minimizes the associated loss function
diff --git a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/inv/inverse.hpp b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/inv/inverse.hpp
index 67008e79c97684811e3db3348c0449113ac687fd..5b04ee5284d710b5c9f1edce983d35ed4470c565 100644
--- a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/inv/inverse.hpp
+++ b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/inv/inverse.hpp
@@ -43,6 +43,7 @@ class InvNode: public OperatorNode<1>
     }
 
 public:
+
      /**
      * @brief Base Constructor
      * @details This is only used for serialization
diff --git a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/inv/parameterized_inverse.hpp b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/inv/parameterized_inverse.hpp
index 569965cc324e4a2ae70f09b1aa0885729ce691e2..750ee5906f86aaeb78021cbc2eb5317b3d8c3272 100644
--- a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/inv/parameterized_inverse.hpp
+++ b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/inv/parameterized_inverse.hpp
@@ -60,6 +60,7 @@ protected:
     std::vector<double> _params; //!< The vector containing the scale and bias terms for the Node
 
 public:
+
     /**
      * @brief Base Constructor
      * @details This is only used for serialization
@@ -179,6 +180,11 @@ public:
      */
     inline std::vector<double> parameters() const {return _params;}
 
+    /**
+     * @brief The pointer to the head of the parameters used for including individual scale and bias terms to each operator in the Node
+     */
+    virtual const double* param_pointer() const {return _params.data();}
+
     /**
      * @brief Optimize the scale and bias terms for each operation in the Node.
      * @details Use optimizer to find the scale and bias terms that minimizes the associated loss function
diff --git a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/log/log.hpp b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/log/log.hpp
index 7419571fe59d1c76c213411c9f6808c94410c7d0..233e3dbe0f861387e7db6f857d49e3196d91734f 100644
--- a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/log/log.hpp
+++ b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/log/log.hpp
@@ -47,6 +47,7 @@ class LogNode: public OperatorNode<1>
         ar & boost::serialization::base_object<OperatorNode>(*this);
     }
 public:
+
     /**
      * @brief Base Constructor
      * @details This is only used for serialization
diff --git a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/log/parameterized_log.hpp b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/log/parameterized_log.hpp
index b3ade124466b2b37f6edc76b5776826278db0549..456715baaaeaeb2b6b0a20072f0b3f22cdf53d00 100644
--- a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/log/parameterized_log.hpp
+++ b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/log/parameterized_log.hpp
@@ -62,6 +62,7 @@ protected:
     double _sign_alpha; //!< Used to alternate between +/- 1
 
 public:
+
     /**
      * @brief Base Constructor
      * @details This is only used for serialization
@@ -198,6 +199,11 @@ public:
      */
     inline std::vector<double> parameters() const {return _params;}
 
+    /**
+     * @brief The pointer to the head of the parameters used for including individual scale and bias terms to each operator in the Node
+     */
+    virtual const double* param_pointer() const {return _params.data();}
+
     /**
      * @brief Optimize the scale and bias terms for each operation in the Node.
      * @details Use optimizer to find the scale and bias terms that minimizes the associated loss function
diff --git a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/mult/multiply.hpp b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/mult/multiply.hpp
index aecf7a7f0336ac480a69c939115b2b7e7ed2a045..284aacea16f5674643f0d3d6775884c1d07b234f 100644
--- a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/mult/multiply.hpp
+++ b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/mult/multiply.hpp
@@ -48,6 +48,7 @@ class MultNode: public OperatorNode<2>
     }
 
 public:
+
     /**
      * @brief Base Constructor
      * @details This is only used for serialization
@@ -308,7 +309,7 @@ public:
      * @param params A pointer to the bias and scale terms for this Node and its children
      * @param dfdp pointer to where the feature derivative pointers are located
      */
-    inline void param_derivative(const double* params, double* dfdp) const {std::copy_n(_feats[0]->value_ptr(params, 2),  _n_samp, dfdp);}
+    inline void param_derivative(const double* params, double* dfdp) const {std::copy_n(_feats[0]->value_ptr(params, -1, true),  _n_samp, dfdp);}
     #endif
 };
 
diff --git a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/mult/parameterized_multiply.hpp b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/mult/parameterized_multiply.hpp
index 67d64cbe870dbe99027f412d560a5ca7b5e698fe..8c3ab95bb47b3f4288f0f415d0b3cbbae0074622 100644
--- a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/mult/parameterized_multiply.hpp
+++ b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/mult/parameterized_multiply.hpp
@@ -60,6 +60,7 @@ protected:
     std::vector<double> _params;
 
 public:
+
     /**
      * @brief Base Constructor
      * @details This is only used for serialization
@@ -182,6 +183,11 @@ public:
      */
     inline std::vector<double> parameters() const {return _params;}
 
+    /**
+     * @brief The pointer to the head of the parameters used for including individual scale and bias terms to each operator in the Node
+     */
+    virtual const double* param_pointer() const {return _params.data();}
+
     /**
      * @brief Optimize the scale and bias terms for each operation in the Node.
      * @details Use optimizer to find the scale and bias terms that minimizes the associated loss function
diff --git a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/neg_exp/negative_exponential.hpp b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/neg_exp/negative_exponential.hpp
index a61f3a3b2c1272c6738b62984cfd61ee159efa97..420c8fe33c35d36f009a48f42cab1a18e72cb0ec 100644
--- a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/neg_exp/negative_exponential.hpp
+++ b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/neg_exp/negative_exponential.hpp
@@ -48,6 +48,7 @@ class NegExpNode: public OperatorNode<1>
     }
 
 public:
+
     /**
      * @brief Base Constructor
      * @details This is only used for serialization
diff --git a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/neg_exp/parameterized_negative_exponential.hpp b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/neg_exp/parameterized_negative_exponential.hpp
index 982b49b5fda05c688af88dc9182e0c89bb9a6915..b932099b8e06cf6cb7efcf3378db6a761427e5e3 100644
--- a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/neg_exp/parameterized_negative_exponential.hpp
+++ b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/neg_exp/parameterized_negative_exponential.hpp
@@ -60,6 +60,7 @@ protected:
     std::vector<double> _params; //!< The vector containing the scale and bias terms for the Node
 
 public:
+
     /**
      * @brief Base Constructor
      * @details This is only used for serialization
@@ -179,6 +180,11 @@ public:
      */
     inline std::vector<double> parameters() const {return _params;}
 
+    /**
+     * @brief The pointer to the head of the parameters used for including individual scale and bias terms to each operator in the Node
+     */
+    virtual const double* param_pointer() const {return _params.data();}
+
     /**
      * @brief Optimize the scale and bias terms for each operation in the Node.
      * @details Use optimizer to find the scale and bias terms that minimizes the associated loss function
diff --git a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/sin/parameterized_sin.hpp b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/sin/parameterized_sin.hpp
index ec97225afa2047053d07479a482ddd3dd5bc8d0c..905c6a04e8b9fe236221047342c647d9a769d3d6 100644
--- a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/sin/parameterized_sin.hpp
+++ b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/sin/parameterized_sin.hpp
@@ -60,6 +60,7 @@ protected:
     std::vector<double> _params; //!< The vector containing the scale and bias terms for the Node
 
 public:
+
     /**
      * @brief Base Constructor
      * @details This is only used for serialization
@@ -179,6 +180,11 @@ public:
      */
     inline std::vector<double> parameters() const {return _params;}
 
+    /**
+     * @brief The pointer to the head of the parameters used for including individual scale and bias terms to each operator in the Node
+     */
+    virtual const double* param_pointer() const {return _params.data();}
+
     /**
      * @brief Optimize the scale and bias terms for each operation in the Node.
      * @details Use optimizer to find the scale and bias terms that minimizes the associated loss function
diff --git a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/sin/sin.hpp b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/sin/sin.hpp
index bfa5b82316c670b49ba43387b1f619e23a2dbfef..5dd50188313024da63952043e5ae8011fa6c91b4 100644
--- a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/sin/sin.hpp
+++ b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/sin/sin.hpp
@@ -48,6 +48,7 @@ class SinNode: public OperatorNode<1>
     }
 
 public:
+
     /**
      * @brief Base Constructor
      * @details This is only used for serialization
diff --git a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/six_pow/parameterized_sixth_power.hpp b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/six_pow/parameterized_sixth_power.hpp
index 26ce17c9abbb9638683f4c970c001c5f7aa2d383..db6b082ee75f36101600a42ed4de1791947e885a 100644
--- a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/six_pow/parameterized_sixth_power.hpp
+++ b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/six_pow/parameterized_sixth_power.hpp
@@ -60,6 +60,7 @@ protected:
     std::vector<double> _params; //!< The vector containing the scale and bias terms for the Node
 
 public:
+
     /**
      * @brief Base Constructor
      * @details This is only used for serialization
@@ -179,6 +180,11 @@ public:
      */
     inline std::vector<double> parameters() const {return _params;}
 
+    /**
+     * @brief The pointer to the head of the parameters used for including individual scale and bias terms to each operator in the Node
+     */
+    virtual const double* param_pointer() const {return _params.data();}
+
     /**
      * @brief Optimize the scale and bias terms for each operation in the Node.
      * @details Use optimizer to find the scale and bias terms that minimizes the associated loss function
diff --git a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/six_pow/sixth_power.hpp b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/six_pow/sixth_power.hpp
index 174fb760208c46b0f479abc94d29a42145eede01..6a295650157711b67b405f7858c0e4ca331bd487 100644
--- a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/six_pow/sixth_power.hpp
+++ b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/six_pow/sixth_power.hpp
@@ -48,6 +48,7 @@ class SixPowNode: public OperatorNode<1>
     }
 
 public:
+
     /**
      * @brief Base Constructor
      * @details This is only used for serialization
diff --git a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/sq/parameterized_square.hpp b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/sq/parameterized_square.hpp
index 35d5ed5c21ba6031304413c0df05a58a676d5969..51fb0a08c3081544ecc04f4d360f5377605746de 100644
--- a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/sq/parameterized_square.hpp
+++ b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/sq/parameterized_square.hpp
@@ -60,6 +60,7 @@ protected:
     std::vector<double> _params; //!< The vector containing the scale and bias terms for the Node
 
 public:
+
     /**
      * @brief Base Constructor
      * @details This is only used for serialization
@@ -179,6 +180,11 @@ public:
      */
     inline std::vector<double> parameters() const {return _params;}
 
+    /**
+     * @brief The pointer to the head of the parameters used for including individual scale and bias terms to each operator in the Node
+     */
+    virtual const double* param_pointer() const {return _params.data();}
+
     /**
      * @brief Optimize the scale and bias terms for each operation in the Node.
      * @details Use optimizer to find the scale and bias terms that minimizes the associated loss function
diff --git a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/sq/square.hpp b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/sq/square.hpp
index a1ef0cbf5836278aeb3bc1db13bae12400591945..39eb3bfd8e7e6b785450391d60b734d0efc0725f 100644
--- a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/sq/square.hpp
+++ b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/sq/square.hpp
@@ -47,6 +47,7 @@ class SqNode: public OperatorNode<1>
         ar & boost::serialization::base_object<OperatorNode>(*this);
     }
 public:
+
     /**
      * @brief Base Constructor
      * @details This is only used for serialization
diff --git a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/sqrt/parameterized_square_root.hpp b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/sqrt/parameterized_square_root.hpp
index 994c397b0ede7093f7f73fa0c36ed693d03f8143..a02fe0f924b814e3f1a32d4fcebc59aa42e8568e 100644
--- a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/sqrt/parameterized_square_root.hpp
+++ b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/sqrt/parameterized_square_root.hpp
@@ -62,6 +62,7 @@ protected:
     double _sign_alpha; //!< 1 if alpha is positive, -1 if alpha is negative
 
 public:
+
     /**
      * @brief Base Constructor
      * @details This is only used for serialization
@@ -181,6 +182,11 @@ public:
      */
     inline std::vector<double> parameters() const {return _params;}
 
+    /**
+     * @brief The pointer to the head of the parameters used for including individual scale and bias terms to each operator in the Node
+     */
+    virtual const double* param_pointer() const {return _params.data();}
+
     /**
      * @brief Optimize the scale and bias terms for each operation in the Node.
      * @details Use optimizer to find the scale and bias terms that minimizes the associated loss function
diff --git a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/sqrt/square_root.hpp b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/sqrt/square_root.hpp
index e39d82a065abaf883d7021723a46d4681e527566..07b2d1da7f07092ddb60d15152453056dc58cda0 100644
--- a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/sqrt/square_root.hpp
+++ b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/sqrt/square_root.hpp
@@ -48,6 +48,7 @@ class SqrtNode: public OperatorNode<1>
     }
 
 public:
+
     /**
      * @brief Base Constructor
      * @details This is only used for serialization
diff --git a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/sub/parameterized_subtract.hpp b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/sub/parameterized_subtract.hpp
index dbf8acccdd9d7d41e4d7e90ac7d2486ca8d9bd55..c85cfc703d44345b8c78ed90892e958610673584 100644
--- a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/sub/parameterized_subtract.hpp
+++ b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/sub/parameterized_subtract.hpp
@@ -60,6 +60,7 @@ protected:
     std::vector<double> _params;
 
 public:
+
     /**
      * @brief Base Constructor
      * @details This is only used for serialization
@@ -183,6 +184,11 @@ public:
      */
     inline std::vector<double> parameters() const {return _params;}
 
+    /**
+     * @brief The pointer to the head of the parameters used for including individual scale and bias terms to each operator in the Node
+     */
+    virtual const double* param_pointer() const {return _params.data();}
+
     /**
      * @brief Optimize the scale and bias terms for each operation in the Node.
      * @details Use optimizer to find the scale and bias terms that minimizes the associated loss function
diff --git a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/sub/subtract.hpp b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/sub/subtract.hpp
index 85e85f9980a7fe0acf64dc82eac4de4f363e1e97..ea2b3d0d6324093809a00e2cd6dee716339affb7 100644
--- a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/sub/subtract.hpp
+++ b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/sub/subtract.hpp
@@ -49,6 +49,7 @@ class SubNode: public OperatorNode<2>
     }
 
 public:
+
     /**
      * @brief Base Constructor
      * @details This is only used for serialization
diff --git a/src/feature_creation/node/utils.cpp b/src/feature_creation/node/utils.cpp
index 35044efc7088e781069e92cb005715a342ef42ef..13904a12e685062ae9da6e6055c674cab585721a 100644
--- a/src/feature_creation/node/utils.cpp
+++ b/src/feature_creation/node/utils.cpp
@@ -504,5 +504,73 @@ std::string node_identifier::feature_type_to_string(const NODE_TYPE nt)
     {
         return "cos";
     }
+    if(nt == NODE_TYPE::PARAM_ADD)
+    {
+        return "p:add";
+    }
+    if(nt == NODE_TYPE::PARAM_SUB)
+    {
+        return "p:sub";
+    }
+    if(nt == NODE_TYPE::PARAM_ABS_DIFF)
+    {
+        return "p:abs_diff";
+    }
+    if(nt == NODE_TYPE::PARAM_MULT)
+    {
+        return "p:mult";
+    }
+    if(nt == NODE_TYPE::PARAM_DIV)
+    {
+        return "p:div";
+    }
+    if(nt == NODE_TYPE::PARAM_EXP)
+    {
+        return "p:exp";
+    }
+    if(nt == NODE_TYPE::PARAM_NEG_EXP)
+    {
+        return "p:neg_exp";
+    }
+    if(nt == NODE_TYPE::PARAM_INV)
+    {
+        return "p:inv";
+    }
+    if(nt == NODE_TYPE::PARAM_SQ)
+    {
+        return "p:sq";
+    }
+    if(nt == NODE_TYPE::PARAM_CB)
+    {
+        return "p:cb";
+    }
+    if(nt == NODE_TYPE::PARAM_SIX_POW)
+    {
+        return "p:six_pow";
+    }
+    if(nt == NODE_TYPE::PARAM_SQRT)
+    {
+        return "p:sqrt";
+    }
+    if(nt == NODE_TYPE::PARAM_CBRT)
+    {
+        return "p:cbrt";
+    }
+    if(nt == NODE_TYPE::PARAM_LOG)
+    {
+        return "p:log";
+    }
+    if(nt == NODE_TYPE::PARAM_ABS)
+    {
+        return "p:abs";
+    }
+    if(nt == NODE_TYPE::PARAM_SIN)
+    {
+        return "p:sin";
+    }
+    if(nt == NODE_TYPE::PARAM_COS)
+    {
+        return "p:cos";
+    }
     throw std::logic_error("Invalid feature type");
 }
diff --git a/src/python/postprocess/classification.py b/src/python/postprocess/classification.py
index 69fe7d91ed5e0087bb115ef05e79db0887a7bf72..87cd7e5b65b605a2d97215af98c1e1da52060bce 100644
--- a/src/python/postprocess/classification.py
+++ b/src/python/postprocess/classification.py
@@ -54,7 +54,7 @@ def update_model_svm(model, c=1.0, max_iter=-1, tol=0.0001, filename=None):
     updated_prop_train_est = []
     updated_prop_test_est = []
 
-    for ts_train, ts_test in zip(model.task_size_train, model.task_size_test):
+    for ts_train, ts_test in zip(model.task_sizes_train, model.task_sizes_test):
         X = np.column_stack(
             [feat.value[start_train : start_train + ts_train] for feat in model.feats]
         )
diff --git a/src/python/postprocess/plot/classification.py b/src/python/postprocess/plot/classification.py
index d90695b71096cb06eec3ab58a6680ed8425a1fbe..63aa128001384c8e3821f5d9d3242f2a1430b961 100644
--- a/src/python/postprocess/plot/classification.py
+++ b/src/python/postprocess/plot/classification.py
@@ -35,8 +35,8 @@ def plot_classification(model, task=0, filename=None, fig_settings=None):
     verts = []
     points = []
 
-    task_start = np.sum(model.task_size_train[:task], dtype=int)
-    task_end = task_start + model.task_size_train[task]
+    task_start = np.sum(model.task_sizes_train[:task], dtype=int)
+    task_end = task_start + model.task_sizes_train[task]
     for c in classes:
         inds = np.where(model.prop_train[task_start:task_end] == c)[0] + task_start
         pts = np.zeros((len(inds), len(model.feats)))
diff --git a/src/python/py_binding_cpp_def/bindings_docstring_keyed.cpp b/src/python/py_binding_cpp_def/bindings_docstring_keyed.cpp
index 03ba8fce8582c4cc61b61665f7d2dd4ad67c56c3..0b7f48f3e90794fb832a452fc43d6d1b3d9cad06 100644
--- a/src/python/py_binding_cpp_def/bindings_docstring_keyed.cpp
+++ b/src/python/py_binding_cpp_def/bindings_docstring_keyed.cpp
@@ -340,6 +340,7 @@ void sisso::feature_creation::registerUnit()
     {
         void (Node::*reindex_1)(unsigned long int) = &Node::reindex;
         void (Node::*reindex_2)(unsigned long int, unsigned long int) = &Node::reindex;
+        int (Node::*n_leaves_prop)() const = &Node::n_leaves;
 
         class_<sisso::feature_creation::node::NodeWrap, boost::noncopyable>("Node", "@DocString_cls_node@", no_init)
             .add_property("n_samp", &Node::n_samp, "@DocString_node_n_samp@")
@@ -356,6 +357,7 @@ void sisso::feature_creation::registerUnit()
             .add_property("postfix_expr", &Node::postfix_expr, "@DocString_node_postfix_expr@")
             .add_property("latex_expr", &Node::latex_expr, "@DocString_node_latex_expr@")
             .add_property("parameters", &Node::parameters_py, "@DocString_node_parameters_py@")
+            .add_property("n_leaves", n_leaves_prop, "@DocString_node_n_leaves@")
             .def("reindex", reindex_1, (arg("self"), arg("feat_ind")), "@DocString_node_reindex_1@")
             .def("reindex", reindex_2, (arg("self"), arg("feat_ind"), arg("arr_ind")), "@DocString_node_reindex_2@")
             .def("unit", pure_virtual(&Node::unit), (arg("self")), "@DocString_node_unit@")
@@ -364,7 +366,6 @@ void sisso::feature_creation::registerUnit()
             .def("rung", pure_virtual(&Node::rung), (arg("self"), arg("cur_rung")), "@DocString_node_rung@")
             .def("n_feats", pure_virtual(&Node::n_feats), (arg("self")), "@DocString_node_n_feats@")
             .def("feat", pure_virtual(&Node::feat), (arg("self"), arg("feat_ind")), "@DocString_node_feat@")
-            .def("n_leaves", pure_virtual(&Node::n_leaves), (arg("self"), arg("cur_n_leaves")), "@DocString_node_n_leaves@")
             .def("x_in_expr_list", pure_virtual(&Node::get_x_in_expr_list), (arg("self")), "@DocString_node_x_in_expr@")
         ;
     }
@@ -395,6 +396,7 @@ void sisso::feature_creation::registerUnit()
             .def("feat", pure_virtual(&Node::feat), (arg("self"), arg("feat_ind")), "@DocString_node_feat@")
             .def("n_leaves", pure_virtual(&Node::n_leaves), (arg("self"), arg("cur_n_leaves")), "@DocString_node_n_leaves@")
             .def("x_in_expr_list", pure_virtual(&Node::get_x_in_expr_list), (arg("self")), "@DocString_node_x_in_expr@")
+            .def("matlab_fxn_expr", pure_virtual(&Node::matlab_fxn_expr), (arg("self")), "@DocString_node_matlab_expr@")
         ;
     }
 #endif
@@ -403,6 +405,7 @@ void sisso::feature_creation::node::registerFeatureNode()
 {
     void (FeatureNode::*set_value_no_param)(int, bool) const = &FeatureNode::set_value;
     void (FeatureNode::*set_test_value_no_param)(int, bool) const = &FeatureNode::set_test_value;
+    std::string (FeatureNode::*matlab_expr)() const = &FeatureNode::matlab_fxn_expr;
     std::string (FeatureNode::*expr_no_param)() const = &FeatureNode::expr;
 
     using namespace boost::python;
@@ -427,8 +430,8 @@ void sisso::feature_creation::node::registerFeatureNode()
         .add_property("expr", expr_no_param, "@DocString_feat_node_expr_const@")
         .add_property("unit", &FeatureNode::unit, "@DocString_feat_node_unit@")
         .add_property("rung", &FeatureNode::rung, "@DocString_feat_node_rung@")
-        .add_property("n_leaves", &FeatureNode::n_leaves, "@DocString_feat_node_n_leaves@")
-        .add_property("x_in_expr_list", &FeatureNode::get_x_in_expr_list, "@DocString_node_x_in_expr@")
+        .add_property("x_in_expr_list", &FeatureNode::get_x_in_expr_list, "@DocString_feat_node_x_in_expr@")
+        .add_property("matlab_fxn_expr", matlab_expr, "@DocString_feat_node_matlab_expr@")
     ;
 }
 
@@ -440,6 +443,7 @@ void sisso::feature_creation::node::registerModelNode()
     double (ModelNode::*eval_ndarr)(np::ndarray) = &ModelNode::eval_py;
     double (ModelNode::*eval_list)(py::list) = &ModelNode::eval_py;
     double (ModelNode::*eval_dict)(py::dict) = &ModelNode::eval_py;
+    std::string (ModelNode::*matlab_expr)() const = &ModelNode::matlab_fxn_expr;
 
     using namespace boost::python;
     class_<ModelNode, bases<FeatureNode>>(
@@ -467,8 +471,8 @@ void sisso::feature_creation::node::registerModelNode()
         .def("eval", eval_list, (arg("self"), arg("x_in")), "@DocString_model_node_eval_list@")
         .def("eval", eval_dict, (arg("self"), arg("x_in")), "@DocString_model_node_eval_dict@")
         .add_property("rung", &ModelNode::rung, "@DocString_model_node_rung@")
-        .add_property("n_leaves", &ModelNode::n_leaves, "@DocString_node_n_leaves@")
-        .add_property("x_in_expr_list", &ModelNode::x_in_expr_list_py, "@DocString_node_x_in_expr@")
+        .add_property("x_in_expr_list", &ModelNode::x_in_expr_list_py, "@DocString_model_node_x_in_expr@")
+        .add_property("matlab_fxn_expr", matlab_expr, "@DocString_model_node_matlab_expr@")
     ;
 }
 
@@ -476,6 +480,7 @@ void sisso::feature_creation::node::registerAddNode()
 {
     void (AddNode::*set_value_no_param)(int, bool) const = &AddNode::set_value;
     void (AddNode::*set_test_value_no_param)(int, bool) const = &AddNode::set_test_value;
+    std::string (AddNode::*matlab_expr)() const = &AddNode::matlab_fxn_expr;
     std::string (AddNode::*expr_no_param)() const = &AddNode::expr;
 
     class_<AddNode, bases<OperatorNode<2>>>(
@@ -491,7 +496,7 @@ void sisso::feature_creation::node::registerAddNode()
         .add_property("expr", expr_no_param, "@DocString_add_node_expr@")
         .add_property("unit", &AddNode::unit, "@DocString_add_node_unit@")
         .add_property("rung", &AddNode::rung, "@DocString_add_node_rung@")
-
+        .add_property("matlab_fxn_expr", matlab_expr, "@DocString_add_node_matlab_expr@")
     ;
 }
 
@@ -499,6 +504,7 @@ void sisso::feature_creation::node::registerSubNode()
 {
     void (SubNode::*set_value_no_param)(int, bool) const = &SubNode::set_value;
     void (SubNode::*set_test_value_no_param)(int, bool) const = &SubNode::set_test_value;
+    std::string (SubNode::*matlab_expr)() const = &SubNode::matlab_fxn_expr;
     std::string (SubNode::*expr_no_param)() const = &SubNode::expr;
 
     class_<SubNode, bases<OperatorNode<2>>>(
@@ -514,6 +520,7 @@ void sisso::feature_creation::node::registerSubNode()
         .add_property("expr", expr_no_param, "@DocString_sub_node_expr@")
         .add_property("unit", &SubNode::unit, "@DocString_sub_node_unit@")
         .add_property("rung", &SubNode::rung, "@DocString_sub_node_rung@")
+        .add_property("matlab_fxn_expr", matlab_expr, "@DocString_sub_node_matlab_expr@")
     ;
 }
 
@@ -521,6 +528,7 @@ void sisso::feature_creation::node::registerDivNode()
 {
     void (DivNode::*set_value_no_param)(int, bool) const = &DivNode::set_value;
     void (DivNode::*set_test_value_no_param)(int, bool) const = &DivNode::set_test_value;
+    std::string (DivNode::*matlab_expr)() const = &DivNode::matlab_fxn_expr;
     std::string (DivNode::*expr_no_param)() const = &DivNode::expr;
 
     class_<DivNode, bases<OperatorNode<2>>>(
@@ -536,6 +544,7 @@ void sisso::feature_creation::node::registerDivNode()
         .add_property("expr", expr_no_param, "@DocString_div_node_expr@")
         .add_property("unit", &DivNode::unit, "@DocString_div_node_unit@")
         .add_property("rung", &DivNode::rung, "@DocString_div_node_rung@")
+        .add_property("matlab_fxn_expr", matlab_expr, "@DocString_div_node_matlab_expr@")
     ;
 }
 
@@ -543,6 +552,7 @@ void sisso::feature_creation::node::registerMultNode()
 {
     void (MultNode::*set_value_no_param)(int, bool) const = &MultNode::set_value;
     void (MultNode::*set_test_value_no_param)(int, bool) const = &MultNode::set_test_value;
+    std::string (MultNode::*matlab_expr)() const = &MultNode::matlab_fxn_expr;
     std::string (MultNode::*expr_no_param)() const = &MultNode::expr;
 
     class_<MultNode, bases<OperatorNode<2>>>(
@@ -558,6 +568,7 @@ void sisso::feature_creation::node::registerMultNode()
         .add_property("expr", expr_no_param, "@DocString_mult_node_expr@")
         .add_property("unit", &MultNode::unit, "@DocString_mult_node_unit@")
         .add_property("rung", &MultNode::rung, "@DocString_mult_node_rung@")
+        .add_property("matlab_fxn_expr", matlab_expr, "@DocString_mult_node_matlab_expr@")
     ;
 }
 
@@ -565,6 +576,7 @@ void sisso::feature_creation::node::registerAbsDiffNode()
 {
     void (AbsDiffNode::*set_value_no_param)(int, bool) const = &AbsDiffNode::set_value;
     void (AbsDiffNode::*set_test_value_no_param)(int, bool) const = &AbsDiffNode::set_test_value;
+    std::string (AbsDiffNode::*matlab_expr)() const = &AbsDiffNode::matlab_fxn_expr;
     std::string (AbsDiffNode::*expr_no_param)() const = &AbsDiffNode::expr;
 
     class_<AbsDiffNode, bases<OperatorNode<2>>>(
@@ -580,6 +592,7 @@ void sisso::feature_creation::node::registerAbsDiffNode()
         .add_property("expr", expr_no_param, "@DocString_abs_diff_node_expr@")
         .add_property("unit", &AbsDiffNode::unit, "@DocString_abs_diff_node_unit@")
         .add_property("rung", &AbsDiffNode::rung, "@DocString_abs_diff_node_rung@")
+        .add_property("matlab_fxn_expr", matlab_expr, "@DocString_abs_diff_node_matlab_expr@")
     ;
 }
 
@@ -587,6 +600,7 @@ void sisso::feature_creation::node::registerAbsNode()
 {
     void (AbsNode::*set_value_no_param)(int, bool) const = &AbsNode::set_value;
     void (AbsNode::*set_test_value_no_param)(int, bool) const = &AbsNode::set_test_value;
+    std::string (AbsNode::*matlab_expr)() const = &AbsNode::matlab_fxn_expr;
     std::string (AbsNode::*expr_no_param)() const = &AbsNode::expr;
 
     class_<AbsNode, bases<OperatorNode<1>>>(
@@ -602,6 +616,7 @@ void sisso::feature_creation::node::registerAbsNode()
         .add_property("expr", expr_no_param, "@DocString_abs_node_expr@")
         .add_property("unit", &AbsNode::unit, "@DocString_abs_node_unit@")
         .add_property("rung", &AbsNode::rung, "@DocString_abs_node_rung@")
+        .add_property("matlab_fxn_expr", matlab_expr, "@DocString_abs_node_matlab_expr@")
     ;
 }
 
@@ -609,6 +624,7 @@ void sisso::feature_creation::node::registerInvNode()
 {
     void (InvNode::*set_value_no_param)(int, bool) const = &InvNode::set_value;
     void (InvNode::*set_test_value_no_param)(int, bool) const = &InvNode::set_test_value;
+    std::string (InvNode::*matlab_expr)() const = &InvNode::matlab_fxn_expr;
     std::string (InvNode::*expr_no_param)() const = &InvNode::expr;
 
     class_<InvNode, bases<OperatorNode<1>>>(
@@ -624,6 +640,7 @@ void sisso::feature_creation::node::registerInvNode()
         .add_property("expr", expr_no_param, "@DocString_inv_node_expr@")
         .add_property("unit", &InvNode::unit, "@DocString_inv_node_unit@")
         .add_property("rung", &InvNode::rung, "@DocString_inv_node_rung@")
+        .add_property("matlab_fxn_expr", matlab_expr, "@DocString_inv_node_matlab_expr@")
     ;
 }
 
@@ -631,6 +648,7 @@ void sisso::feature_creation::node::registerLogNode()
 {
     void (LogNode::*set_value_no_param)(int, bool) const = &LogNode::set_value;
     void (LogNode::*set_test_value_no_param)(int, bool) const = &LogNode::set_test_value;
+    std::string (LogNode::*matlab_expr)() const = &LogNode::matlab_fxn_expr;
     std::string (LogNode::*expr_no_param)() const = &LogNode::expr;
 
     class_<LogNode, bases<OperatorNode<1>>>(
@@ -646,6 +664,7 @@ void sisso::feature_creation::node::registerLogNode()
         .add_property("expr", expr_no_param, "@DocString_log_node_expr@")
         .add_property("unit", &LogNode::unit, "@DocString_log_node_unit@")
         .add_property("rung", &LogNode::rung, "@DocString_log_node_rung@")
+        .add_property("matlab_fxn_expr", matlab_expr, "@DocString_log_node_matlab_expr@")
     ;
 }
 
@@ -653,6 +672,7 @@ void sisso::feature_creation::node::registerExpNode()
 {
     void (ExpNode::*set_value_no_param)(int, bool) const = &ExpNode::set_value;
     void (ExpNode::*set_test_value_no_param)(int, bool) const = &ExpNode::set_test_value;
+    std::string (ExpNode::*matlab_expr)() const = &ExpNode::matlab_fxn_expr;
     std::string (ExpNode::*expr_no_param)() const = &ExpNode::expr;
 
     class_<ExpNode, bases<OperatorNode<1>>>(
@@ -668,6 +688,7 @@ void sisso::feature_creation::node::registerExpNode()
         .add_property("expr", expr_no_param, "@DocString_exp_node_expr@")
         .add_property("unit", &ExpNode::unit, "@DocString_exp_node_unit@")
         .add_property("rung", &ExpNode::rung, "@DocString_exp_node_rung@")
+        .add_property("matlab_fxn_expr", matlab_expr, "@DocString_exp_node_matlab_expr@")
     ;
 }
 
@@ -675,6 +696,7 @@ void sisso::feature_creation::node::registerNegExpNode()
 {
     void (NegExpNode::*set_value_no_param)(int, bool) const = &NegExpNode::set_value;
     void (NegExpNode::*set_test_value_no_param)(int, bool) const = &NegExpNode::set_test_value;
+    std::string (NegExpNode::*matlab_expr)() const = &NegExpNode::matlab_fxn_expr;
     std::string (NegExpNode::*expr_no_param)() const = &NegExpNode::expr;
 
     class_<NegExpNode, bases<OperatorNode<1>>>(
@@ -690,6 +712,7 @@ void sisso::feature_creation::node::registerNegExpNode()
         .add_property("expr", expr_no_param, "@DocString_neg_exp_node_expr@")
         .add_property("unit", &NegExpNode::unit, "@DocString_neg_exp_node_unit@")
         .add_property("rung", &NegExpNode::rung, "@DocString_neg_exp_node_rung@")
+        .add_property("matlab_fxn_expr", matlab_expr, "@DocString_neg_exp_node_matlab_expr@")
     ;
 }
 
@@ -697,6 +720,7 @@ void sisso::feature_creation::node::registerSinNode()
 {
     void (SinNode::*set_value_no_param)(int, bool) const = &SinNode::set_value;
     void (SinNode::*set_test_value_no_param)(int, bool) const = &SinNode::set_test_value;
+    std::string (SinNode::*matlab_expr)() const = &SinNode::matlab_fxn_expr;
     std::string (SinNode::*expr_no_param)() const = &SinNode::expr;
 
     class_<SinNode, bases<OperatorNode<1>>>(
@@ -712,6 +736,7 @@ void sisso::feature_creation::node::registerSinNode()
         .add_property("expr", expr_no_param, "@DocString_sin_node_expr@")
         .add_property("unit", &SinNode::unit, "@DocString_sin_node_unit@")
         .add_property("rung", &SinNode::rung, "@DocString_sin_node_rung@")
+        .add_property("matlab_fxn_expr", matlab_expr, "@DocString_sin_node_matlab_expr@")
     ;
 }
 
@@ -719,6 +744,7 @@ void sisso::feature_creation::node::registerCosNode()
 {
     void (CosNode::*set_value_no_param)(int, bool) const = &CosNode::set_value;
     void (CosNode::*set_test_value_no_param)(int, bool) const = &CosNode::set_test_value;
+    std::string (CosNode::*matlab_expr)() const = &CosNode::matlab_fxn_expr;
     std::string (CosNode::*expr_no_param)() const = &CosNode::expr;
 
     class_<CosNode, bases<OperatorNode<1>>>(
@@ -734,6 +760,7 @@ void sisso::feature_creation::node::registerCosNode()
         .add_property("expr", expr_no_param, "@DocString_cos_node_expr@")
         .add_property("unit", &CosNode::unit, "@DocString_cos_node_unit@")
         .add_property("rung", &CosNode::rung, "@DocString_cos_node_rung@")
+        .add_property("matlab_fxn_expr", matlab_expr, "@DocString_cos_node_matlab_expr@")
     ;
 }
 
@@ -741,6 +768,7 @@ void sisso::feature_creation::node::registerCbNode()
 {
     void (CbNode::*set_value_no_param)(int, bool) const = &CbNode::set_value;
     void (CbNode::*set_test_value_no_param)(int, bool) const = &CbNode::set_test_value;
+    std::string (CbNode::*matlab_expr)() const = &CbNode::matlab_fxn_expr;
     std::string (CbNode::*expr_no_param)() const = &CbNode::expr;
 
     class_<CbNode, bases<OperatorNode<1>>>(
@@ -756,6 +784,7 @@ void sisso::feature_creation::node::registerCbNode()
         .add_property("expr", expr_no_param, "@DocString_cb_node_expr@")
         .add_property("unit", &CbNode::unit, "@DocString_cb_node_unit@")
         .add_property("rung", &CbNode::rung, "@DocString_cb_node_rung@")
+        .add_property("matlab_fxn_expr", matlab_expr, "@DocString_cb_node_matlab_expr@")
     ;
 }
 
@@ -763,6 +792,7 @@ void sisso::feature_creation::node::registerCbrtNode()
 {
     void (CbrtNode::*set_value_no_param)(int, bool) const = &CbrtNode::set_value;
     void (CbrtNode::*set_test_value_no_param)(int, bool) const = &CbrtNode::set_test_value;
+    std::string (CbrtNode::*matlab_expr)() const = &CbrtNode::matlab_fxn_expr;
     std::string (CbrtNode::*expr_no_param)() const = &CbrtNode::expr;
 
     class_<CbrtNode, bases<OperatorNode<1>>>(
@@ -778,6 +808,7 @@ void sisso::feature_creation::node::registerCbrtNode()
         .add_property("expr", expr_no_param, "@DocString_cbrt_node_expr@")
         .add_property("unit", &CbrtNode::unit, "@DocString_cbrt_node_unit@")
         .add_property("rung", &CbrtNode::rung, "@DocString_cbrt_node_rung@")
+        .add_property("matlab_fxn_expr", matlab_expr, "@DocString_cbrt_node_matlab_expr@")
     ;
 }
 
@@ -785,6 +816,7 @@ void sisso::feature_creation::node::registerSqNode()
 {
     void (SqNode::*set_value_no_param)(int, bool) const = &SqNode::set_value;
     void (SqNode::*set_test_value_no_param)(int, bool) const = &SqNode::set_test_value;
+    std::string (SqNode::*matlab_expr)() const = &SqNode::matlab_fxn_expr;
     std::string (SqNode::*expr_no_param)() const = &SqNode::expr;
 
     class_<SqNode, bases<OperatorNode<1>>>(
@@ -800,6 +832,7 @@ void sisso::feature_creation::node::registerSqNode()
         .add_property("expr", expr_no_param, "@DocString_sq_node_expr@")
         .add_property("unit", &SqNode::unit, "@DocString_sq_node_unit@")
         .add_property("rung", &SqNode::rung, "@DocString_sq_node_rung@")
+        .add_property("matlab_fxn_expr", matlab_expr, "@DocString_sq_node_matlab_expr@")
     ;
 }
 
@@ -807,6 +840,7 @@ void sisso::feature_creation::node::registerSqrtNode()
 {
     void (SqrtNode::*set_value_no_param)(int, bool) const = &SqrtNode::set_value;
     void (SqrtNode::*set_test_value_no_param)(int, bool) const = &SqrtNode::set_test_value;
+    std::string (SqrtNode::*matlab_expr)() const = &SqrtNode::matlab_fxn_expr;
     std::string (SqrtNode::*expr_no_param)() const = &SqrtNode::expr;
 
     class_<SqrtNode, bases<OperatorNode<1>>>(
@@ -822,6 +856,7 @@ void sisso::feature_creation::node::registerSqrtNode()
         .add_property("expr", expr_no_param, "@DocString_sqrt_node_expr@")
         .add_property("unit", &SqrtNode::unit, "@DocString_sqrt_node_unit@")
         .add_property("rung", &SqrtNode::rung, "@DocString_sqrt_node_rung@")
+        .add_property("matlab_fxn_expr", matlab_expr, "@DocString_sqrt_node_matlab_expr@")
     ;
 }
 
@@ -829,6 +864,7 @@ void sisso::feature_creation::node::registerSixPowNode()
 {
     void (SixPowNode::*set_value_no_param)(int, bool) const = &SixPowNode::set_value;
     void (SixPowNode::*set_test_value_no_param)(int, bool) const = &SixPowNode::set_test_value;
+    std::string (SixPowNode::*matlab_expr)() const = &SixPowNode::matlab_fxn_expr;
     std::string (SixPowNode::*expr_no_param)() const = &SixPowNode::expr;
 
     class_<SixPowNode, bases<OperatorNode<1>>>(
@@ -844,6 +880,7 @@ void sisso::feature_creation::node::registerSixPowNode()
         .add_property("expr", expr_no_param, "@DocString_six_pow_node_expr@")
         .add_property("unit", &SixPowNode::unit, "@DocString_six_pow_node_unit@")
         .add_property("rung", &SixPowNode::rung, "@DocString_six_pow_node_rung@")
+        .add_property("matlab_fxn_expr", matlab_expr, "@DocString_six_pow_node_matlab_expr@")
     ;
 }
 #ifdef PARAMETERIZE
@@ -851,6 +888,7 @@ void sisso::feature_creation::node::registerSixPowNode()
     {
         void (AddParamNode::*set_value_no_param)(int, bool) const = &AddParamNode::set_value;
         void (AddParamNode::*set_test_value_no_param)(int, bool) const = &AddParamNode::set_test_value;
+        std::string (AddParamNode::*matlab_expr)() const = &AddParamNode::matlab_fxn_expr;
         std::string (AddParamNode::*expr_no_param)() const = &AddParamNode::expr;
 
         class_<AddParamNode, bases<AddNode>>(
@@ -865,6 +903,7 @@ void sisso::feature_creation::node::registerSixPowNode()
             .def("set_test_value", set_test_value_no_param, (arg("self"), arg("offset"), arg("for_comp")), "@DocString_add_param_node_set_test_value@")
             .add_property("expr", expr_no_param, "@DocString_add_param_node_expr@")
             .add_property("n_params", &AddParamNode::n_params, "@DocString_add_param_node_n_params@")
+            .add_property("matlab_fxn_expr", matlab_expr, "@DocString_add_param_node_n_pamatlab_fxn_expr@")
         ;
     }
 
@@ -872,6 +911,7 @@ void sisso::feature_creation::node::registerSixPowNode()
     {
         void (SubParamNode::*set_value_no_param)(int, bool) const = &SubParamNode::set_value;
         void (SubParamNode::*set_test_value_no_param)(int, bool) const = &SubParamNode::set_test_value;
+        std::string (SubParamNode::*matlab_expr)() const = &SubParamNode::matlab_fxn_expr;
         std::string (SubParamNode::*expr_no_param)() const = &SubParamNode::expr;
 
         class_<SubParamNode, bases<SubNode>>(
@@ -886,6 +926,7 @@ void sisso::feature_creation::node::registerSixPowNode()
             .def("set_test_value", set_test_value_no_param, (arg("self"), arg("offset"), arg("for_comp")), "@DocString_sub_param_node_set_test_value@")
             .add_property("expr", expr_no_param, "@DocString_sub_param_node_expr@")
             .add_property("n_params", &SubParamNode::n_params, "@DocString_sub_param_node_n_params@")
+            .add_property("matlab_fxn_expr", matlab_expr, "@DocString_sub_param_node_n_pamatlab_fxn_expr@")
         ;
     }
 
@@ -893,6 +934,7 @@ void sisso::feature_creation::node::registerSixPowNode()
     {
         void (DivParamNode::*set_value_no_param)(int, bool) const = &DivParamNode::set_value;
         void (DivParamNode::*set_test_value_no_param)(int, bool) const = &DivParamNode::set_test_value;
+        std::string (DivParamNode::*matlab_expr)() const = &DivParamNode::matlab_fxn_expr;
         std::string (DivParamNode::*expr_no_param)() const = &DivParamNode::expr;
 
         class_<DivParamNode, bases<DivNode>>(
@@ -907,6 +949,7 @@ void sisso::feature_creation::node::registerSixPowNode()
             .def("set_test_value", set_test_value_no_param, (arg("self"), arg("offset"), arg("for_comp")), "@DocString_div_param_node_set_test_value@")
             .add_property("expr", expr_no_param, "@DocString_div_param_node_expr@")
             .add_property("n_params", &DivParamNode::n_params, "@DocString_div_param_node_n_params@")
+            .add_property("matlab_fxn_expr", matlab_expr, "@DocString_div_param_node_n_pamatlab_fxn_expr@")
         ;
     }
 
@@ -914,6 +957,7 @@ void sisso::feature_creation::node::registerSixPowNode()
     {
         void (MultParamNode::*set_value_no_param)(int, bool) const = &MultParamNode::set_value;
         void (MultParamNode::*set_test_value_no_param)(int, bool) const = &MultParamNode::set_test_value;
+        std::string (MultParamNode::*matlab_expr)() const = &MultParamNode::matlab_fxn_expr;
         std::string (MultParamNode::*expr_no_param)() const = &MultParamNode::expr;
 
         class_<MultParamNode, bases<MultNode>>(
@@ -928,6 +972,7 @@ void sisso::feature_creation::node::registerSixPowNode()
             .def("set_test_value", set_test_value_no_param, (arg("self"), arg("offset"), arg("for_comp")), "@DocString_mult_param_node_set_test_value@")
             .add_property("expr", expr_no_param, "@DocString_mult_param_node_expr@")
             .add_property("n_params", &MultParamNode::n_params, "@DocString_mult_param_node_n_params@")
+            .add_property("matlab_fxn_expr", matlab_expr, "@DocString_mult_param_node_n_pamatlab_fxn_expr@")
         ;
     }
 
@@ -935,6 +980,7 @@ void sisso::feature_creation::node::registerSixPowNode()
     {
         void (AbsDiffParamNode::*set_value_no_param)(int, bool) const = &AbsDiffParamNode::set_value;
         void (AbsDiffParamNode::*set_test_value_no_param)(int, bool) const = &AbsDiffParamNode::set_test_value;
+        std::string (AbsDiffParamNode::*matlab_expr)() const = &AbsDiffParamNode::matlab_fxn_expr;
         std::string (AbsDiffParamNode::*expr_no_param)() const = &AbsDiffParamNode::expr;
 
         class_<AbsDiffParamNode, bases<AbsDiffNode>>(
@@ -949,6 +995,7 @@ void sisso::feature_creation::node::registerSixPowNode()
             .def("set_test_value", set_test_value_no_param, (arg("self"), arg("offset"), arg("for_comp")), "@DocString_abs_diff_param_node_set_test_value@")
             .add_property("expr", expr_no_param, "@DocString_abs_diff_param_node_expr@")
             .add_property("n_params", &AbsDiffParamNode::n_params, "@DocString_abs_diff_param_node_n_params@")
+            .add_property("matlab_fxn_expr", matlab_expr, "@DocString_abs_diff_param_node_n_pamatlab_fxn_expr@")
         ;
     }
 
@@ -956,6 +1003,7 @@ void sisso::feature_creation::node::registerSixPowNode()
     {
         void (AbsParamNode::*set_value_no_param)(int, bool) const = &AbsParamNode::set_value;
         void (AbsParamNode::*set_test_value_no_param)(int, bool) const = &AbsParamNode::set_test_value;
+        std::string (AbsParamNode::*matlab_expr)() const = &AbsParamNode::matlab_fxn_expr;
         std::string (AbsParamNode::*expr_no_param)() const = &AbsParamNode::expr;
 
         class_<AbsParamNode, bases<AbsNode>>(
@@ -970,6 +1018,7 @@ void sisso::feature_creation::node::registerSixPowNode()
             .def("set_test_value", set_test_value_no_param, (arg("self"), arg("offset"), arg("for_comp")), "@DocString_abs_param_node_set_test_value@")
             .add_property("expr", expr_no_param, "@DocString_abs_param_node_expr@")
             .add_property("n_params", &AbsParamNode::n_params, "@DocString_abs_param_node_n_params@")
+            .add_property("matlab_fxn_expr", matlab_expr, "@DocString_abs_param_node_n_pamatlab_fxn_expr@")
         ;
     }
 
@@ -977,6 +1026,7 @@ void sisso::feature_creation::node::registerSixPowNode()
     {
         void (InvParamNode::*set_value_no_param)(int, bool) const = &InvParamNode::set_value;
         void (InvParamNode::*set_test_value_no_param)(int, bool) const = &InvParamNode::set_test_value;
+        std::string (InvParamNode::*matlab_expr)() const = &InvParamNode::matlab_fxn_expr;
         std::string (InvParamNode::*expr_no_param)() const = &InvParamNode::expr;
 
         class_<InvParamNode, bases<InvNode>>(
@@ -991,6 +1041,7 @@ void sisso::feature_creation::node::registerSixPowNode()
             .def("set_test_value", set_test_value_no_param, (arg("self"), arg("offset"), arg("for_comp")), "@DocString_inv_param_node_set_test_value@")
             .add_property("expr", expr_no_param, "@DocString_inv_param_node_expr@")
             .add_property("n_params", &InvParamNode::n_params, "@DocString_inv_param_node_n_params@")
+            .add_property("matlab_fxn_expr", matlab_expr, "@DocString_inv_param_node_n_pamatlab_fxn_expr@")
         ;
     }
 
@@ -998,6 +1049,7 @@ void sisso::feature_creation::node::registerSixPowNode()
     {
         void (LogParamNode::*set_value_no_param)(int, bool) const = &LogParamNode::set_value;
         void (LogParamNode::*set_test_value_no_param)(int, bool) const = &LogParamNode::set_test_value;
+        std::string (LogParamNode::*matlab_expr)() const = &LogParamNode::matlab_fxn_expr;
         std::string (LogParamNode::*expr_no_param)() const = &LogParamNode::expr;
 
         class_<LogParamNode, bases<LogNode>>(
@@ -1012,6 +1064,7 @@ void sisso::feature_creation::node::registerSixPowNode()
             .def("set_test_value", set_test_value_no_param, (arg("self"), arg("offset"), arg("for_comp")), "@DocString_log_param_node_set_test_value@")
             .add_property("expr", expr_no_param, "@DocString_log_param_node_expr@")
             .add_property("n_params", &LogParamNode::n_params, "@DocString_log_param_node_n_params@")
+            .add_property("matlab_fxn_expr", matlab_expr, "@DocString_log_param_node_n_pamatlab_fxn_expr@")
         ;
     }
 
@@ -1019,6 +1072,7 @@ void sisso::feature_creation::node::registerSixPowNode()
     {
         void (ExpParamNode::*set_value_no_param)(int, bool) const = &ExpParamNode::set_value;
         void (ExpParamNode::*set_test_value_no_param)(int, bool) const = &ExpParamNode::set_test_value;
+        std::string (ExpParamNode::*matlab_expr)() const = &ExpParamNode::matlab_fxn_expr;
         std::string (ExpParamNode::*expr_no_param)() const = &ExpParamNode::expr;
 
         class_<ExpParamNode, bases<ExpNode>>(
@@ -1033,6 +1087,7 @@ void sisso::feature_creation::node::registerSixPowNode()
             .def("set_test_value", set_test_value_no_param, (arg("self"), arg("offset"), arg("for_comp")), "@DocString_exp_param_node_set_test_value@")
             .add_property("expr", expr_no_param, "@DocString_exp_param_node_expr@")
             .add_property("n_params", &ExpParamNode::n_params, "@DocString_exp_param_node_n_params@")
+            .add_property("matlab_fxn_expr", matlab_expr, "@DocString_exp_param_node_n_pamatlab_fxn_expr@")
         ;
     }
 
@@ -1040,6 +1095,7 @@ void sisso::feature_creation::node::registerSixPowNode()
     {
         void (NegExpParamNode::*set_value_no_param)(int, bool) const = &NegExpParamNode::set_value;
         void (NegExpParamNode::*set_test_value_no_param)(int, bool) const = &NegExpParamNode::set_test_value;
+        std::string (NegExpParamNode::*matlab_expr)() const = &NegExpParamNode::matlab_fxn_expr;
         std::string (NegExpParamNode::*expr_no_param)() const = &NegExpParamNode::expr;
 
         class_<NegExpParamNode, bases<NegExpNode>>(
@@ -1054,6 +1110,7 @@ void sisso::feature_creation::node::registerSixPowNode()
             .def("set_test_value", set_test_value_no_param, (arg("self"), arg("offset"), arg("for_comp")), "@DocString_neg_exp_param_node_set_test_value@")
             .add_property("expr", expr_no_param, "@DocString_neg_exp_param_node_expr@")
             .add_property("n_params", &NegExpParamNode::n_params, "@DocString_neg_exp_param_node_n_params@")
+            .add_property("matlab_fxn_expr", matlab_expr, "@DocString_neg_exp_param_node_n_pamatlab_fxn_expr@")
         ;
     }
 
@@ -1061,6 +1118,7 @@ void sisso::feature_creation::node::registerSixPowNode()
     {
         void (SinParamNode::*set_value_no_param)(int, bool) const = &SinParamNode::set_value;
         void (SinParamNode::*set_test_value_no_param)(int, bool) const = &SinParamNode::set_test_value;
+        std::string (SinParamNode::*matlab_expr)() const = &SinParamNode::matlab_fxn_expr;
         std::string (SinParamNode::*expr_no_param)() const = &SinParamNode::expr;
 
         class_<SinParamNode, bases<SinNode>>(
@@ -1074,13 +1132,16 @@ void sisso::feature_creation::node::registerSixPowNode()
             .def("set_value", set_value_no_param, (arg("self"), arg("offset"), arg("for_comp")), "@DocString_sin_param_node_set_value@")
             .def("set_test_value", set_test_value_no_param, (arg("self"), arg("offset"), arg("for_comp")), "@DocString_sin_param_node_set_test_value@")
             .add_property("expr", expr_no_param, "@DocString_sin_param_node_expr@")
-            .add_property("n_params", &SinParamNode::n_params, "@DocString_sin_param_node_n_params@")        ;
+            .add_property("n_params", &SinParamNode::n_params, "@DocString_sin_param_node_n_params@")
+            .add_property("matlab_fxn_expr", matlab_expr, "@DocString_sin_param_node_n_pamatlab_fxn_expr@")
+        ;
     }
 
     void sisso::feature_creation::node::registerCosParamNode()
     {
         void (CosParamNode::*set_value_no_param)(int, bool) const = &CosParamNode::set_value;
         void (CosParamNode::*set_test_value_no_param)(int, bool) const = &CosParamNode::set_test_value;
+        std::string (CosParamNode::*matlab_expr)() const = &CosParamNode::matlab_fxn_expr;
         std::string (CosParamNode::*expr_no_param)() const = &CosParamNode::expr;
 
         class_<CosParamNode, bases<CosNode>>(
@@ -1094,7 +1155,8 @@ void sisso::feature_creation::node::registerSixPowNode()
             .def("set_value", set_value_no_param, (arg("self"), arg("offset"), arg("for_comp")), "@DocString_cos_param_node_set_value@")
             .def("set_test_value", set_test_value_no_param, (arg("self"), arg("offset"), arg("for_comp")), "@DocString_cos_param_node_set_test_value@")
             .add_property("expr", expr_no_param, "@DocString_cos_param_node_expr@")
-            .add_property("n_params", &CosParamNode::n_params, "@DocString_cos_param_node_n_params@")        ;
+            .add_property("n_params", &CosParamNode::n_params, "@DocString_cos_param_node_n_params@")
+            .add_property("matlab_fxn_expr", matlab_expr, "@DocString_cos_param_node_n_pamatlab_fxn_expr@")
         ;
     }
 
@@ -1102,6 +1164,7 @@ void sisso::feature_creation::node::registerSixPowNode()
     {
         void (CbParamNode::*set_value_no_param)(int, bool) const = &CbParamNode::set_value;
         void (CbParamNode::*set_test_value_no_param)(int, bool) const = &CbParamNode::set_test_value;
+        std::string (CbParamNode::*matlab_expr)() const = &CbParamNode::matlab_fxn_expr;
         std::string (CbParamNode::*expr_no_param)() const = &CbParamNode::expr;
 
         class_<CbParamNode, bases<CbNode>>(
@@ -1115,7 +1178,8 @@ void sisso::feature_creation::node::registerSixPowNode()
             .def("set_value", set_value_no_param, (arg("self"), arg("offset"), arg("for_comp")), "@DocString_cb_param_node_set_value@")
             .def("set_test_value", set_test_value_no_param, (arg("self"), arg("offset"), arg("for_comp")), "@DocString_cb_param_node_set_test_value@")
             .add_property("expr", expr_no_param, "@DocString_cb_param_node_expr@")
-            .add_property("n_params", &CbParamNode::n_params, "@DocString_cb_param_node_n_params@")        ;
+            .add_property("n_params", &CbParamNode::n_params, "@DocString_cb_param_node_n_params@")
+            .add_property("matlab_fxn_expr", matlab_expr, "@DocString_cb_param_node_matlab_expr@")
         ;
     }
 
@@ -1123,6 +1187,7 @@ void sisso::feature_creation::node::registerSixPowNode()
     {
         void (CbrtParamNode::*set_value_no_param)(int, bool) const = &CbrtParamNode::set_value;
         void (CbrtParamNode::*set_test_value_no_param)(int, bool) const = &CbrtParamNode::set_test_value;
+        std::string (CbrtParamNode::*matlab_expr)() const = &CbrtParamNode::matlab_fxn_expr;
         std::string (CbrtParamNode::*expr_no_param)() const = &CbrtParamNode::expr;
 
         class_<CbrtParamNode, bases<CbrtNode>>(
@@ -1136,7 +1201,8 @@ void sisso::feature_creation::node::registerSixPowNode()
             .def("set_value", set_value_no_param, (arg("self"), arg("offset"), arg("for_comp")), "@DocString_cbrt_param_node_set_value@")
             .def("set_test_value", set_test_value_no_param, (arg("self"), arg("offset"), arg("for_comp")), "@DocString_cbrt_param_node_set_test_value@")
             .add_property("expr", expr_no_param, "@DocString_cbrt_param_node_expr@")
-            .add_property("n_params", &CbrtParamNode::n_params, "@DocString_cbrt_param_node_n_params@")        ;
+            .add_property("n_params", &CbrtParamNode::n_params, "@DocString_cbrt_param_node_n_params@")
+            .add_property("matlab_fxn_expr", matlab_expr, "@DocString_cbrt_param_node_matlab_expr@")
         ;
     }
 
@@ -1144,6 +1210,7 @@ void sisso::feature_creation::node::registerSixPowNode()
     {
         void (SqParamNode::*set_value_no_param)(int, bool) const = &SqParamNode::set_value;
         void (SqParamNode::*set_test_value_no_param)(int, bool) const = &SqParamNode::set_test_value;
+        std::string (SqParamNode::*matlab_expr)() const = &SqParamNode::matlab_fxn_expr;
         std::string (SqParamNode::*expr_no_param)() const = &SqParamNode::expr;
 
         class_<SqParamNode, bases<SqNode>>(
@@ -1157,7 +1224,8 @@ void sisso::feature_creation::node::registerSixPowNode()
             .def("set_value", set_value_no_param, (arg("self"), arg("offset"), arg("for_comp")), "@DocString_sq_param_node_set_value@")
             .def("set_test_value", set_test_value_no_param, (arg("self"), arg("offset"), arg("for_comp")), "@DocString_sq_param_node_set_test_value@")
             .add_property("expr", expr_no_param, "@DocString_sq_param_node_expr@")
-            .add_property("n_params", &SqParamNode::n_params, "@DocString_sq_param_node_n_params@")        ;
+            .add_property("n_params", &SqParamNode::n_params, "@DocString_sq_param_node_n_params@")
+            .add_property("matlab_fxn_expr", matlab_expr, "@DocString_sq_param_node_matlab_expr@")
         ;
     }
 
@@ -1165,6 +1233,7 @@ void sisso::feature_creation::node::registerSixPowNode()
     {
         void (SqrtParamNode::*set_value_no_param)(int, bool) const = &SqrtParamNode::set_value;
         void (SqrtParamNode::*set_test_value_no_param)(int, bool) const = &SqrtParamNode::set_test_value;
+        std::string (SqrtParamNode::*matlab_expr)() const = &SqrtParamNode::matlab_fxn_expr;
         std::string (SqrtParamNode::*expr_no_param)() const = &SqrtParamNode::expr;
 
         class_<SqrtParamNode, bases<SqrtNode>>(
@@ -1178,7 +1247,8 @@ void sisso::feature_creation::node::registerSixPowNode()
             .def("set_value", set_value_no_param, (arg("self"), arg("offset"), arg("for_comp")), "@DocString_sqrt_param_node_set_value@")
             .def("set_test_value", set_test_value_no_param, (arg("self"), arg("offset"), arg("for_comp")), "@DocString_sqrt_param_node_set_test_value@")
             .add_property("expr", expr_no_param, "@DocString_sqrt_param_node_expr@")
-            .add_property("n_params", &SqrtParamNode::n_params, "@DocString_sqrt_param_node_n_params@")        ;
+            .add_property("n_params", &SqrtParamNode::n_params, "@DocString_sqrt_param_node_n_params@")
+            .add_property("matlab_fxn_expr", matlab_expr, "@DocString_sqrt_param_node_matlab_expr@")
         ;
     }
 
@@ -1186,6 +1256,7 @@ void sisso::feature_creation::node::registerSixPowNode()
     {
         void (SixPowParamNode::*set_value_no_param)(int, bool) const = &SixPowParamNode::set_value;
         void (SixPowParamNode::*set_test_value_no_param)(int, bool) const = &SixPowParamNode::set_test_value;
+        std::string (SixPowParamNode::*matlab_expr)() const = &SixPowParamNode::matlab_fxn_expr;
         std::string (SixPowParamNode::*expr_no_param)() const = &SixPowParamNode::expr;
 
         class_<SixPowParamNode, bases<SixPowNode>>(
@@ -1199,7 +1270,8 @@ void sisso::feature_creation::node::registerSixPowNode()
             .def("set_value", set_value_no_param, (arg("self"), arg("offset"), arg("for_comp")), "@DocString_six_pow_param_node_set_value@")
             .def("set_test_value", set_test_value_no_param, (arg("self"), arg("offset"), arg("for_comp")), "@DocString_six_pow_param_node_set_test_value@")
             .add_property("expr", expr_no_param, "@DocString_six_pow_param_node_expr@")
-            .add_property("n_params", &SixPowParamNode::n_params, "@DocString_six_pow_param_node_n_params@")        ;
+            .add_property("n_params", &SixPowParamNode::n_params, "@DocString_six_pow_param_node_n_params@")
+            .add_property("matlab_fxn_expr", matlab_expr, "@DocString_six_pow_param_node_matlab_expr@")
         ;
     }
 #endif
diff --git a/src/python/py_binding_cpp_def/bindings_docstring_keyed.hpp b/src/python/py_binding_cpp_def/bindings_docstring_keyed.hpp
index e3cf33612d58bef6e8767398d13ca48eced66e3a..21760c0af077ee2a05e78718d0fed4e424966747 100644
--- a/src/python/py_binding_cpp_def/bindings_docstring_keyed.hpp
+++ b/src/python/py_binding_cpp_def/bindings_docstring_keyed.hpp
@@ -101,8 +101,8 @@ namespace sisso
                 inline bool is_const() const {return this->get_override("is_const")();}
                 inline NODE_TYPE type() const {return this->get_override("type")();}
                 inline int rung(int cur_rung = 0) const {return this->get_override("rung")();}
-                inline std::map<int, int> primary_feature_decomp() const {return this->get_override("primary_feature_decomp")();}
-                inline void update_primary_feature_decomp(std::map<int, int>& pf_decomp) const {this->get_override("update_primary_feature_decomp")();}
+                inline std::map<std::string, int> primary_feature_decomp() const {return this->get_override("primary_feature_decomp")();}
+                inline void update_primary_feature_decomp(std::map<std::string, int>& pf_decomp) const {this->get_override("update_primary_feature_decomp")();}
                 inline void update_postfix(std::string& cur_expr) const {this->get_override("update_postfix")();}
                 inline std::string get_postfix_term() const {return this->get_override("get_postfix_term")();}
                 inline void update_add_sub_leaves(std::map<std::string, int>& add_sub_leaves, const int pl_mn, int& expected_abs_tot) const {this->get_override("update_add_sub_leaves")();}
@@ -221,11 +221,11 @@ namespace sisso
                 {
                     return this->get_override("rung")();
                 }
-                inline std::map<int, int> primary_feature_decomp() const
+                inline std::map<std::string, int> primary_feature_decomp() const
                 {
                     return this->get_override("primary_feature_decomp")();
                 }
-                inline void update_primary_feature_decomp(std::map<int, int>& pf_decomp) const
+                inline void update_primary_feature_decomp(std::map<std::string, int>& pf_decomp) const
                 {
                     this->get_override("update_primary_feature_decomp")();
                 }
@@ -343,7 +343,6 @@ namespace sisso
                         .def("is_nan", &OperatorNode<N>::is_nan, (py::arg("self")), "@DocString_op_node_is_nan@")
                         .def("is_const", &OperatorNode<N>::is_const, (py::arg("self")), "@DocString_op_node_is_const@")
                         .def("feat", &OperatorNode<N>::feat, (py::arg("self"), py::arg("ind")), "@DocString_op_node_feat@")
-                        .add_property("n_leaves", &OperatorNode<N>::n_leaves, "@DocString_op_node_n_leaves@")
                         .add_property("n_feats", &OperatorNode<N>::n_feats, "@DocString_op_node_n_feats@")
                     ;
                 }
@@ -360,7 +359,6 @@ namespace sisso
                         .def("set_parameters", set_params_arr, (py::arg("self"), py::arg("params")), "@DocString_op_node_set_param_arr@")
                         .def("set_parameters", set_params_list, (py::arg("self"), py::arg("params")), "@DocString_op_node_set_param_list@")
                         .def("feat", &OperatorNode<N>::feat, (py::arg("self"), py::arg("ind")), "@DocString_op_node_feat@")
-                        .add_property("n_leaves", &OperatorNode<N>::n_leaves, "@DocString_op_node_n_leaves@")
                         .add_property("n_feats", &OperatorNode<N>::n_feats, "@DocString_op_node_n_feats@")
                     ;
                 }
diff --git a/src/python/py_binding_cpp_def/feature_creation/FeatureSpace.cpp b/src/python/py_binding_cpp_def/feature_creation/FeatureSpace.cpp
index 7ec6fa6cf03f01da1bea46b2fa52b3d30a2a616c..f83954d965b11c2845343eac717f95fd27cfa0ec 100644
--- a/src/python/py_binding_cpp_def/feature_creation/FeatureSpace.cpp
+++ b/src/python/py_binding_cpp_def/feature_creation/FeatureSpace.cpp
@@ -70,9 +70,23 @@ FeatureSpace::FeatureSpace(
         }
 
         std::string nt = node_identifier::feature_type_to_string(phi_temp[ff]->type());
-        if(std::find(_allowed_ops.begin(), _allowed_ops.end(), nt) == _allowed_ops.end())
+        if(nt.substr(0, 2) == "p:")
         {
-            _allowed_ops.push_back(nt);
+            #ifdef PARAMETERIZE
+            if(std::find(_allowed_param_ops.begin(), _allowed_param_ops.end(), nt) == _allowed_param_ops.end())
+            {
+                _allowed_param_ops.push_back(nt.substr(2, nt.size() - 2));
+            }
+            #else
+            throw std::logic_error("Parameterized features are not currently built, recompile with BUILD_PARAMS=ON.");
+            #endif
+        }
+        else
+        {
+            if(std::find(_allowed_ops.begin(), _allowed_ops.end(), nt) == _allowed_ops.end())
+            {
+                _allowed_ops.push_back(nt);
+            }
         }
     }
 
@@ -106,6 +120,22 @@ FeatureSpace::FeatureSpace(
         }
     }
     #ifdef PARAMETERIZE
+    for(auto & op : _allowed_param_ops)
+    {
+        if((op.compare("add") == 0) || (op.compare("sub") == 0) || (op.compare("abs_diff") == 0))
+        {
+            _com_bin_param_operators.push_back(allowed_op_maps::binary_param_operator_map[op]);
+        }
+        else if((op.compare("div") == 0) || (op.compare("mult") == 0))
+        {
+            _bin_param_operators.push_back(allowed_op_maps::binary_param_operator_map[op]);
+        }
+        else
+        {
+            _un_param_operators.push_back(allowed_op_maps::unary_param_operator_map[op]);
+        }
+    }
+
     _start_rung_reparam = {0};
     _end_no_params = {0};
     for(int rr = 1; rr < _max_rung; ++rr)
diff --git a/src/utils/math_funcs.hpp b/src/utils/math_funcs.hpp
index 6bcdd5490b1de82e71445399a01c8b7df61df8d7..58878ca0e0554a0a5121c7d76c00222e356fcc29 100644
--- a/src/utils/math_funcs.hpp
+++ b/src/utils/math_funcs.hpp
@@ -557,7 +557,7 @@ namespace util_funcs
      * @param rank_b The pointer to the head of the vector used to rank of b
      * @param index The pointer used to store the sorted indexes
      * @param size The size of the vector
-     * @return The Coefficient of Determination
+     * @return The Spearman Correlation
      */
     double spearman_r(const double* a, const double* b, double* rank_a, double* rank_b, int* index, const int size);
 
@@ -570,7 +570,7 @@ namespace util_funcs
      * @param rank_b The pointer to the head of the vector used to rank of b
      * @param index The pointer used to store the sorted indexes
      * @param sizes The sizes of the tasks to calculate the correlation on
-     * @return The average Coefficient of Determination
+     * @return The average Spearman Correlation
      */
     double spearman_r(const double* a, const double* b, double* rank_a, double* rank_b, int* index, const std::vector<int>& sizes);
 
@@ -584,7 +584,7 @@ namespace util_funcs
      * @param index The pointer used to store the sorted indexes
      * @param sz The start of vector that describes the sizes of the tasks to calculate the correlation on
      * @param n_tasks The number of tasks to average over
-     * @return The average Coefficient of Determination
+     * @return The average Spearman Correlation
      */
     double spearman_r(const double* a, const double* b, double* rank_a, double* rank_b, int* index, const int* sz, const int n_tasks);
 
diff --git a/tests/exec_test/classification_gen_proj/check_model.py b/tests/exec_test/classification_gen_proj/check_model.py
new file mode 100644
index 0000000000000000000000000000000000000000..5958c75a5d751910b4baa9cd22593a2de8289c5d
--- /dev/null
+++ b/tests/exec_test/classification_gen_proj/check_model.py
@@ -0,0 +1,10 @@
+from sissopp import ModelClassifier
+from pathlib import Path
+
+import numpy as np
+
+model = ModelClassifier(
+    str("models/train_dim_2_model_0.dat"), str("models/test_dim_2_model_0.dat")
+)
+assert model.percent_error < 1e-7
+assert model.percent_test_error < 1e-7
diff --git a/tests/exec_test/classification_gen_proj/data.csv b/tests/exec_test/classification_gen_proj/data.csv
new file mode 100644
index 0000000000000000000000000000000000000000..3fa9f64bd0b9d2133040ea4849a71789ac8f078a
--- /dev/null
+++ b/tests/exec_test/classification_gen_proj/data.csv
@@ -0,0 +1,101 @@
+index,prop,A,B,C,D,E,F,G,H,I,J
+0,1,0.1,-0.3,10,10,-0.492825179098274,0.173025977694162,0.598942935224295,-0.298754475196825,-0.581254909010269,-0.110656494210556
+1,1,-1.89442810374214,-1.31996134398007,0.955713896876243,0.478117201427488,0.777586191100329,0.655369716778557,0.174914171427966,-0.288498877530604,-0.045316536149489,-0.606586193752411
+2,1,-1.47460150711424,-1.22614964523433,0.330140292484796,-0.389505498689912,0.138935265824808,-0.871869282167555,0.37472462048701,0.16418591189513,0.293560701443717,0.285608940220021
+3,1,-1.30213414336735,-1.82621262418812,-0.71381302228685,0.968769585007681,0.683148179202864,0.799125092538796,0.309479173526504,0.728052031003468,0.725495580994886,-0.676576302804248
+4,1,-1.73938632269334,-1.58349866505488,-0.067783417095816,0.229988549891323,0.582427598044647,0.261947149184825,-0.31573435079735,0.61178122144268,-0.187058216967238,0.871764347690334
+5,1,-1.56660896632398,-1.05861814902183,-0.212800982302764,0.915459776146607,-0.263465552591813,-0.666126495988014,-0.195028996490007,-0.237170057680116,-0.933358858596883,-0.19726273171241
+6,1,-1.55340876153895,-1.25209231285838,0.972585787901787,-0.872502887185675,0.729110910814452,0.265244787210866,-0.726900973624432,0.248626170107939,0.809004396880265,-0.278494064780479
+7,1,-1.54625325136447,-1.81238888450819,0.431645876221063,-0.595223273279383,0.953557069758112,-0.222915219121563,0.257670939076174,0.728927879098318,-0.579783055417687,-0.179960319428095
+8,1,-1.12735554524035,-1.69261497444728,0.827091199052693,0.208781482910759,-0.573317187361529,-0.00488758921352,0.341918716034638,-0.292901671356202,-0.34531700628784,0.766920547630073
+9,1,-1.35367834815884,-1.38141056472962,0.023965203621816,-0.805412569418484,0.806810139718495,-0.806576263127819,-0.39279977856172,-0.463819363774079,0.16095248005356,-0.553419747131608
+10,1,-1.17853151888796,-1.27705829298504,0.916015229666356,0.0275946645157,-0.829507007977635,-0.700063689327201,-0.715601456588714,-0.439799165143527,-0.487241220494887,0.245279267056121
+11,1,-1.17547049766875,-1.05613281246665,0.404557718897757,0.175997361062361,0.227383730822478,0.774254652577977,-0.616080996917636,-0.948639007451084,0.954076433375225,-0.497102001172339
+12,1,-1.67277915033943,-1.86190239883588,0.906708844886064,-0.592737030373698,0.885229803890949,0.822069297241907,0.204367485562992,0.24610324883505,-0.079476866422163,-0.244006995583434
+13,1,-1.96326165438884,-1.31680458089693,-0.284570394188414,-0.198686061574238,-0.168897609541112,-0.92939259112691,0.265899059671913,-0.828727642584781,-0.427453097474168,-0.738368091608883
+14,1,-1.79497769808481,-1.13948217357082,0.011556817105957,0.499408314757229,0.253163424774478,0.017645446880421,0.401735167095264,-0.650287617298501,-0.262217482830833,-0.510102120130588
+15,1,-1.07957120536262,-1.93245955077991,-0.520161174281201,-0.392478459347396,0.147495839565868,0.780879606474075,-0.281044687799173,-0.148036908135786,-0.208810472224093,0.278961929718128
+16,1,-1.52555145037507,-1.72455209196736,0.562803219191695,0.323676061636996,0.490737136410372,-0.95476192699496,0.028603504036769,-0.246295219650507,-0.297736293597739,-0.632473830957653
+17,1,-1.29142309507315,-1.9506961526212,0.859669461054104,0.485772819254089,0.268883598825009,0.253553321699552,-0.045743087569395,0.66793403278249,0.308591963919947,0.771084301464027
+18,1,-1.23404787121001,-1.68173519287847,-0.118871100462413,0.159000937768132,0.2985428841756,-0.203829205332538,-0.637945695251352,0.658049690810909,0.949529589134008,-0.577812553880056
+19,1,-1.13513050029551,-1.3119036669604,0.74037411093045,0.558782660077505,-0.096052126354583,0.529119817360537,0.372190604770144,0.688656466253683,-0.819433165315505,-0.12814415930811
+20,1,-0.2,0.132,-10,-10,0.785237349732891,-0.387217730495401,-0.942409218899448,0.160806577297675,-0.723969983661972,-0.452650134415823
+21,1,1.1507658618081,1.7260505392724,-0.92290734251321,0.465751384219632,-0.81727500527083,-0.182472640926628,0.887526070620356,0.111592883978488,0.386435078880162,-0.440017211221272
+22,1,1.90389768224701,1.71880759316591,0.28033979546451,0.379365407838544,0.634843008192624,0.371753918780839,-0.611653305369863,0.732567927874185,0.85803611350317,-0.577973441708411
+23,1,1.77751976452381,1.28697050370578,0.222850898945077,-0.424057088828287,-0.27619426781836,0.616941667680694,-0.696779972923147,0.23612770730498,0.760705889780527,0.34004139732033
+24,1,1.65314327493874,1.16282810211312,-0.339501197382944,0.898529591365812,-0.881538228231582,0.090728826664301,-0.858395870780934,0.907272331515896,0.160531735619067,0.678911811768841
+25,1,1.30955180558204,1.36827755737648,-0.444576754578563,-0.78871174512572,0.246625773070183,-0.663474018818313,-0.446355552060464,0.750312773134634,-0.98959522970136,-0.150120109840706
+26,1,1.44924431171893,1.40328864581169,-0.388679577334402,-0.708193450791952,0.850310084800308,-0.767256338531612,0.370509317329194,0.387354921015751,0.533160321164986,0.149390212455131
+27,1,1.61362501391753,1.05448314414567,-0.359644680155969,0.682555404147728,-0.53195400936544,0.934101689590862,-0.73600284018832,-0.29960291454053,0.351125596355818,-0.187842884669279
+28,1,1.0243392720598,1.91059602121133,-0.249409157470717,-0.137070024386644,-0.707128531134104,0.944932049234295,0.431233366052987,0.449543990959262,0.912901932280027,0.77394610963827
+29,1,1.99444678594607,1.67204984441306,0.935302642480463,0.833720966523807,0.254167956717343,-0.007922712021391,-0.114776295376767,-0.276042896002242,-0.813098403125419,0.341922052212475
+30,1,1.40110330287926,1.109011516196,0.425716772255087,0.544174803732763,0.492863854358204,-0.047589791717166,-0.743840790633672,0.525289489060411,0.829611715544936,0.015193221845522
+31,1,1.94995090625471,1.05727410799969,0.12665368551441,0.469705238170149,0.874436248273008,-0.759571175468135,0.310230735619265,-0.80342084374485,-0.462431082486477,-0.407165886759129
+32,1,1.47264625042994,1.18913643279065,-0.731393018031039,0.461102224603009,-0.884528391885322,-0.419893944840798,0.647518214389067,0.688126326408485,0.754656371691603,0.116881923067816
+33,1,1.45901677207507,1.17024364037294,-0.527433424947131,-0.598774697808646,0.113197791601676,-0.50528865259863,0.117572114288939,0.590400320594326,-0.155159386769126,0.354827977413197
+34,1,1.32042744243041,1.19801952930384,-0.818626622405165,-0.029008564510599,0.952315968378468,0.817495784213924,0.182224554845043,-0.01377304364653,-0.26273195293588,-0.859530562808673
+35,1,1.88138237976289,1.03670081839679,0.305218688016626,-0.060885647660027,-0.387903446605514,-0.108064042735465,-0.962980405009682,-0.424289604203511,-0.253442293077285,0.309637368411297
+36,1,1.9986688782461,1.36909257128618,0.54312844740039,0.591372473040837,-0.835367086693457,0.198315253422507,-0.181434739783802,0.636429105754948,0.420628646992331,0.990122364664621
+37,1,1.50455818499044,1.19094974349673,-0.653263607332762,0.359359450868376,0.30131719114182,0.649581794356589,0.942268955633086,0.884659894489377,-0.473171239344398,0.039635066570717
+38,1,1.00833361547154,1.98150630000827,-0.812352457176761,0.219766101590983,-0.65021067790289,0.423621690291556,-0.58865099275791,0.061487886019891,-0.237737474016087,0.641284347380825
+39,1,1.60179185724619,1.12508599627141,-0.829819386940741,-0.345104687573802,0.485166070545119,-0.258839727448056,-0.920615208326881,0.275498215871427,-0.629075534110342,-0.642527887960687
+40,0,0.2,0.58,10,-10,0.041519856511361,0.23303461629095,-0.497233246191187,-0.023544587617095,-0.418540837770003,-0.550233932792512
+41,0,-1.09147574370355,1.70418701701285,-0.480316474702795,-0.753784710340632,-0.613234235616998,0.167955573662474,0.455636631315042,-0.380227635953206,0.48021383007369,-0.453674929885108
+42,0,-1.9425392252915,1.59311394144654,0.310098050913387,-0.835007082906627,0.407580140850853,0.556924247596553,-0.388616604639346,0.60215104751412,-0.984322198098753,-0.996332888983337
+43,0,-1.40302421044915,1.05041379743038,-0.898456453446964,-0.797029924245349,0.47491891024478,0.480193220538417,-0.750856163558686,-0.043960372032018,-0.242651391805662,-0.109239061054006
+44,0,-1.45810616907354,1.08468326497063,0.571329522934018,-0.677379826379623,0.098396984688832,-0.961599170104035,-0.753922591922157,0.361435891257559,-0.638030455493982,0.404349024843908
+45,0,-1.60421432901638,1.57730973247518,0.402433205555268,-0.06801187450078,-0.373089661152032,0.23970878487105,0.416451106643361,-0.50599166271433,-0.88669034806741,0.30364523616443
+46,0,-1.54868661350102,1.32883184576708,-0.174925245509766,0.050330391451536,-0.388676795741932,-0.72307604978553,0.646076107724964,-0.00105589909588,0.491928720743773,-0.647995101369186
+47,0,-1.8920756792535,1.76576258461153,0.289410761217525,0.48566510896872,0.338684773860801,0.374319581439648,-0.105479014627167,0.004520417892418,0.222862261975939,0.23538363683764
+48,0,-1.51442922313653,1.69840409315155,-0.199044563017407,-0.855804112781183,0.947572000564906,0.654939562810152,0.802084131057488,0.010033694468233,0.449766366250574,0.119974134618433
+49,0,-1.33469144171841,1.80124846893287,-0.346729568989951,-0.614828863660672,0.578150372001527,-0.697356489908387,-0.272496177427547,-0.326679505363186,0.403185907494623,0.659834986972357
+50,0,-1.39216086591683,1.96030807097305,-0.470918775433235,-0.165965173767875,-0.118373275802139,-0.804671542299309,-0.273096283874977,0.964676290034708,-0.240786016285174,0.781092750718218
+51,0,-1.10818774496527,1.1321805921252,-0.546315077724052,-0.263397808061131,0.073416112160648,-0.561584513583351,-0.003812545601594,-0.067901708659798,-0.797337624892413,-0.502494288676279
+52,0,-1.12733422378345,1.22290093390908,-0.239618151680487,0.281282683112064,-0.122253338243164,-0.416340912422471,-0.302944823763312,0.950697167857575,0.084774348269755,0.245643637478141
+53,0,-1.54504585318447,1.46465556555859,0.672465261607398,0.963677112876299,-0.732866944741014,0.269879007022312,-0.734121763984793,-0.18475004364869,0.494783604230457,-0.563469688908407
+54,0,-1.69728989778812,1.93427938064611,0.916674666213795,0.744100669613517,-0.536325680879341,0.745349313896706,-0.608494971121628,-0.036147807131094,0.730097211981708,-0.986020687921255
+55,0,-1.46716685688328,1.91950733639359,-0.040012375137611,0.248257524389148,-0.795936343325832,-0.755933622220192,0.664943062567423,-0.560825069941966,-0.987328335835364,0.00918182383389
+56,0,-1.5078580841421,1.11065681931139,-0.75655271526814,-0.433965979475654,-0.925820800763387,0.621204380538264,-0.725355435802351,-0.087195045278291,0.500040007799584,-0.351024070867477
+57,0,-1.79333947783294,1.64615611570236,0.593670368718185,0.74125415566331,-0.835056311664806,-0.128283340965351,0.795769070113583,0.338062872249377,0.961610282279288,-0.519755961049099
+58,0,-1.68562328688306,1.79136645116331,-0.917792004629201,-0.224807652067029,0.751172530954049,0.744925497765574,0.054821387540181,-0.268146122719043,-0.373795753322288,-0.023619900695578
+59,0,-1.70325116873164,1.56173898398367,0.937331444475048,-0.189146596668676,0.726757528139029,0.571196020214809,0.150478496659529,0.716370904753891,0.645947936391794,-0.096512499841381
+60,0,-0.31,-0.164,-10,10,0.303748234076738,0.094684069184242,0.846651908762107,0.505710991097032,-0.664846620425076,-0.722934785670171
+61,0,1.51747503460744,-1.57976833969122,-0.313853456471656,-0.670641690437042,0.337481189036041,-0.695059667580877,0.382512664766648,-0.754051294565859,-0.540912893771664,-0.152736592481289
+62,0,1.36729416399966,-1.54942606995245,0.746279765035798,0.320667909398266,0.075712278316126,0.557089028326803,-0.314459962457274,-0.091179395352991,-0.712572618352738,-0.862523770264919
+63,0,1.87551859565403,-1.01245024447758,0.961634242304571,0.99902517180177,0.428576472620752,0.790254229843056,-0.162732148014183,0.057108415575022,0.099625367521191,-0.41779573726667
+64,0,1.8407338686869,-1.58680706359952,-0.293737994923213,-0.654603713924763,-0.15830470325221,-0.4506171823593,0.106217286056366,-0.250165079508456,-0.598894350859836,-0.860382476004742
+65,0,1.47999238640346,-1.68861965445586,0.661374709635725,0.335413696048534,0.295408469126627,-0.340725080366546,0.611961227458239,0.53327702260923,-0.960254363897463,0.913251337834092
+66,0,1.0735581028252,-1.06052424530937,-0.790281335013236,0.372594655247821,-0.940075790261345,0.972106617215367,-0.246874887198155,-0.501544524013033,-0.134947611932188,0.130090806976322
+67,0,1.63769743034008,-1.64946099093265,-0.600590046972624,0.281621309709353,0.836244003088172,0.56250556179443,-0.244248244001593,0.274273110413607,0.988229164412892,-0.903492892429764
+68,0,1.9226795203725,-1.58810792001545,0.230397844467249,0.458000795025685,0.160534364807898,0.106760231103633,0.084376336290482,-0.410257096809632,-0.388975913032382,0.233684932760446
+69,0,1.42821810695172,-1.75832976379165,0.122894112900537,-0.193746425367835,0.602411133999453,-0.895694511099768,0.347280223444287,0.045175117581033,-0.232661771389541,-0.314648785155521
+70,0,1.42602875697361,-1.16082451050484,0.906027162216176,0.736418182225292,-0.041284854438203,0.308524126840497,0.369205540497406,0.333193031466162,0.98544497734097,-0.253876502721057
+71,0,1.73002019404142,-1.80947421953802,-0.677661468333469,0.07388223501889,-0.682147267310905,0.024126391992196,0.848946249678909,-0.516253994735439,0.202627425635043,-0.897477249843204
+72,0,1.11605808678586,-1.05622349137538,0.492431513300772,-0.737330353527688,0.594894327441348,0.805436037154752,-0.912716679245893,-0.390199322338262,-0.735805203184445,-0.05803264345169
+73,0,1.52878306779173,-1.52822073704896,-0.863821530585294,-0.987125905118183,-0.698190916645222,-0.17859271120364,-0.902497993400075,0.777448050547606,0.03349780154213,0.569802193246196
+74,0,1.69602091303769,-1.68791329506752,-0.919679036112179,-0.083795023015624,0.492078750634905,-0.102786002654994,0.168000984501864,-0.984910911120671,-0.901017886055053,0.639813560268343
+75,0,1.82292095427058,-1.79921516167805,0.107455937171145,-0.854711756750333,0.344969246269787,0.519092986129825,0.410230657805076,-0.91216461269154,0.033943611687528,-0.306643316979961
+76,0,1.15382245032495,-1.9125109596393,-0.80848616018294,-0.010443047871684,-0.706296790283886,0.822118261736111,0.163327430772402,0.252786291364115,-0.501338527911191,-0.28349201031843
+77,0,1.19521627831595,-1.4347201247938,-0.814416838367815,-0.02940231646999,-0.841428202408144,-0.004586605289201,-0.606434730541928,0.714277316437912,-0.44481897692423,-0.753698456302665
+78,0,1.99358961720643,-1.52499478281942,-0.877637461379848,0.414405535550407,-0.03365581494898,0.624692043559635,-0.832402658891314,-0.723028062732401,-0.867099034604054,-0.185632378061498
+79,0,1.6235192049324,-1.52045677356057,0.977828685636029,-0.57502380941392,-0.402617609462035,0.631967959251952,-0.426504420434097,0.480579460496328,0.686338078276468,-0.793812851707889
+80,1,-1.9061964810895,-1.28908450646839,10,10,0.101102136284509,-0.416199695149021,-0.494850987164782,-0.568698448483212,-0.184782382471875,-0.552230498856606
+81,1,-1.12334568706136,-1.43192728687949,-0.202671045004157,-0.138914163603925,-0.937156710796857,-0.116790109384378,-0.094237431941851,-0.896761118553971,-0.183423320636867,0.458624633065419
+82,1,-1.85938009020988,-1.2014277824818,-0.699402902052328,0.801606907908076,0.618074329335756,-0.172568708757076,-0.075693445304373,0.488815268086692,-0.612225386267585,0.515474858015819
+83,1,-1.44593059276162,-1.50738144143115,-0.146467066237161,0.773717178872341,0.498796984960351,-0.015862721592055,0.487162827649467,-0.002016922590367,-0.480395455657278,0.140660394856319
+84,1,-1.5068337349461,-1.39605748721966,0.935273336022611,-0.65840232577507,0.254028615496319,-0.207949363786322,0.494233964181716,0.342544015156094,-0.2790717466048,0.681766781920308
+85,1,1.29459521637362,1.25954745515179,-10,-10,-0.772948300582061,-0.755591080857131,-0.795691897784493,0.140653835392209,-0.160483486922781,0.460920935704452
+86,1,1.04689401512909,1.48899924906156,-0.102806023076495,-0.232256721754397,0.982487312078063,0.220639487969972,0.466108251058299,-0.328239000603224,0.955688285869012,0.98401214247364
+87,1,1.58830474403604,1.70226055213414,-0.400909948872293,-0.887240029691788,-0.796366553971199,-0.189011341359002,-0.984264269832423,0.228539348323108,0.696045037642922,-0.734941166556072
+88,1,1.07001216284605,1.81845698640496,-0.906675421892372,0.643501800272306,0.2964442904515,-0.212339822521429,-0.624947347663644,-0.076505534185115,0.690006945874019,0.603178865697037
+89,1,1.47818853391931,1.1810797217516,-0.777878371782176,0.158700400185078,0.77008386941758,0.318201581494366,-0.577373286340777,0.207915408782256,0.169898207168944,-0.134718349741109
+90,0,-1.39792536337696,1.8903759983709,10,-10,-0.381543623044489,-0.150608604917312,0.940200935058958,-0.260126956593852,0.011178432296195,-0.552646188796202
+91,0,-1.34181919280501,1.37770384290606,-0.273896107346467,0.9218628887177,0.183329714125041,0.794995796775324,0.47034078624241,0.587159127993906,0.656065190534019,0.710378359435155
+92,0,-1.08535749655328,1.25684564483175,-0.093438684660175,0.867363731909897,0.501979335337794,0.929133531466716,0.853038546233495,0.231647371842096,-0.921363933789468,0.9955206665909
+93,0,-1.5078347061732,1.75537297346943,-0.353047628963401,0.686996459628496,0.12650715249212,-0.584157551233493,0.67801198459735,0.130184075673761,-0.541365882749818,0.804095414322346
+94,0,-1.67232665291775,1.91398842184753,-0.055989266428472,0.083972688856283,0.495406878960658,-0.531851511151842,-0.68298755038252,-0.762719341237422,0.044183568378214,0.569492860435106
+95,0,1.52196747373202,-1.81272431584475,-10,10,-0.592427348924565,-0.245215291809175,0.450286805609337,-0.61720080602177,-0.078323806376631,-0.138400199664094
+96,0,1.34277619089321,-1.04264614535854,-0.840523610880692,-0.579768061766314,0.207088065224924,-0.30689024242517,-0.707319832593209,0.067209487208095,-0.219041441615042,0.651618314592841
+97,0,1.72996670685819,-1.26148185356343,-0.071347258910479,-0.571647931880792,0.00248497405952,0.410346123251162,0.294254262248804,0.698018369247902,0.652553267893053,-0.960621219815728
+98,0,1.63679608599505,-1.40483117266873,0.133355343382705,0.785183623637213,0.106494106522641,0.457003384754942,-0.314470768070196,-0.05337112691883,0.86147345141363,-0.770167158107586
+99,0,1.22531932528574,-1.39832123108255,0.751819680541469,0.843477659731268,0.880714646905367,0.20665859661747,-0.85053999542226,0.770244035843202,-0.790477429383416,-0.219373260405667
diff --git a/tests/exec_test/classification_gen_proj/sisso.json b/tests/exec_test/classification_gen_proj/sisso.json
new file mode 100644
index 0000000000000000000000000000000000000000..1be640db39dd72bf93ae0d05470b5dd92684e040
--- /dev/null
+++ b/tests/exec_test/classification_gen_proj/sisso.json
@@ -0,0 +1,17 @@
+{
+    "desc_dim": 2,
+    "n_sis_select": 5,
+    "max_rung": 1,
+    "n_residual": 1,
+    "data_file": "data.csv",
+    "data_file_relatice_to_json": true,
+    "property_key": "prop",
+    "leave_out_frac": 0.2,
+    "n_models_store": 1,
+    "n_rung_generate": 1,
+    "calc_type": "classification",
+    "leave_out_inds": [80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95 ,96 ,97, 98 , 99],
+    "opset": ["add", "sub", "abs_diff", "mult", "div", "inv", "abs", "exp", "log", "sin", "cos", "sq", "cb", "six_pow", "sqrt", "cbrt", "neg_exp"],
+    "param_opset" : [],
+    "fix_intercept": false
+}
diff --git a/tests/exec_test/classification_max_corr/check_model.py b/tests/exec_test/classification_max_corr/check_model.py
new file mode 100644
index 0000000000000000000000000000000000000000..5958c75a5d751910b4baa9cd22593a2de8289c5d
--- /dev/null
+++ b/tests/exec_test/classification_max_corr/check_model.py
@@ -0,0 +1,10 @@
+from sissopp import ModelClassifier
+from pathlib import Path
+
+import numpy as np
+
+model = ModelClassifier(
+    str("models/train_dim_2_model_0.dat"), str("models/test_dim_2_model_0.dat")
+)
+assert model.percent_error < 1e-7
+assert model.percent_test_error < 1e-7
diff --git a/tests/exec_test/classification_max_corr/data.csv b/tests/exec_test/classification_max_corr/data.csv
new file mode 100644
index 0000000000000000000000000000000000000000..3fa9f64bd0b9d2133040ea4849a71789ac8f078a
--- /dev/null
+++ b/tests/exec_test/classification_max_corr/data.csv
@@ -0,0 +1,101 @@
+index,prop,A,B,C,D,E,F,G,H,I,J
+0,1,0.1,-0.3,10,10,-0.492825179098274,0.173025977694162,0.598942935224295,-0.298754475196825,-0.581254909010269,-0.110656494210556
+1,1,-1.89442810374214,-1.31996134398007,0.955713896876243,0.478117201427488,0.777586191100329,0.655369716778557,0.174914171427966,-0.288498877530604,-0.045316536149489,-0.606586193752411
+2,1,-1.47460150711424,-1.22614964523433,0.330140292484796,-0.389505498689912,0.138935265824808,-0.871869282167555,0.37472462048701,0.16418591189513,0.293560701443717,0.285608940220021
+3,1,-1.30213414336735,-1.82621262418812,-0.71381302228685,0.968769585007681,0.683148179202864,0.799125092538796,0.309479173526504,0.728052031003468,0.725495580994886,-0.676576302804248
+4,1,-1.73938632269334,-1.58349866505488,-0.067783417095816,0.229988549891323,0.582427598044647,0.261947149184825,-0.31573435079735,0.61178122144268,-0.187058216967238,0.871764347690334
+5,1,-1.56660896632398,-1.05861814902183,-0.212800982302764,0.915459776146607,-0.263465552591813,-0.666126495988014,-0.195028996490007,-0.237170057680116,-0.933358858596883,-0.19726273171241
+6,1,-1.55340876153895,-1.25209231285838,0.972585787901787,-0.872502887185675,0.729110910814452,0.265244787210866,-0.726900973624432,0.248626170107939,0.809004396880265,-0.278494064780479
+7,1,-1.54625325136447,-1.81238888450819,0.431645876221063,-0.595223273279383,0.953557069758112,-0.222915219121563,0.257670939076174,0.728927879098318,-0.579783055417687,-0.179960319428095
+8,1,-1.12735554524035,-1.69261497444728,0.827091199052693,0.208781482910759,-0.573317187361529,-0.00488758921352,0.341918716034638,-0.292901671356202,-0.34531700628784,0.766920547630073
+9,1,-1.35367834815884,-1.38141056472962,0.023965203621816,-0.805412569418484,0.806810139718495,-0.806576263127819,-0.39279977856172,-0.463819363774079,0.16095248005356,-0.553419747131608
+10,1,-1.17853151888796,-1.27705829298504,0.916015229666356,0.0275946645157,-0.829507007977635,-0.700063689327201,-0.715601456588714,-0.439799165143527,-0.487241220494887,0.245279267056121
+11,1,-1.17547049766875,-1.05613281246665,0.404557718897757,0.175997361062361,0.227383730822478,0.774254652577977,-0.616080996917636,-0.948639007451084,0.954076433375225,-0.497102001172339
+12,1,-1.67277915033943,-1.86190239883588,0.906708844886064,-0.592737030373698,0.885229803890949,0.822069297241907,0.204367485562992,0.24610324883505,-0.079476866422163,-0.244006995583434
+13,1,-1.96326165438884,-1.31680458089693,-0.284570394188414,-0.198686061574238,-0.168897609541112,-0.92939259112691,0.265899059671913,-0.828727642584781,-0.427453097474168,-0.738368091608883
+14,1,-1.79497769808481,-1.13948217357082,0.011556817105957,0.499408314757229,0.253163424774478,0.017645446880421,0.401735167095264,-0.650287617298501,-0.262217482830833,-0.510102120130588
+15,1,-1.07957120536262,-1.93245955077991,-0.520161174281201,-0.392478459347396,0.147495839565868,0.780879606474075,-0.281044687799173,-0.148036908135786,-0.208810472224093,0.278961929718128
+16,1,-1.52555145037507,-1.72455209196736,0.562803219191695,0.323676061636996,0.490737136410372,-0.95476192699496,0.028603504036769,-0.246295219650507,-0.297736293597739,-0.632473830957653
+17,1,-1.29142309507315,-1.9506961526212,0.859669461054104,0.485772819254089,0.268883598825009,0.253553321699552,-0.045743087569395,0.66793403278249,0.308591963919947,0.771084301464027
+18,1,-1.23404787121001,-1.68173519287847,-0.118871100462413,0.159000937768132,0.2985428841756,-0.203829205332538,-0.637945695251352,0.658049690810909,0.949529589134008,-0.577812553880056
+19,1,-1.13513050029551,-1.3119036669604,0.74037411093045,0.558782660077505,-0.096052126354583,0.529119817360537,0.372190604770144,0.688656466253683,-0.819433165315505,-0.12814415930811
+20,1,-0.2,0.132,-10,-10,0.785237349732891,-0.387217730495401,-0.942409218899448,0.160806577297675,-0.723969983661972,-0.452650134415823
+21,1,1.1507658618081,1.7260505392724,-0.92290734251321,0.465751384219632,-0.81727500527083,-0.182472640926628,0.887526070620356,0.111592883978488,0.386435078880162,-0.440017211221272
+22,1,1.90389768224701,1.71880759316591,0.28033979546451,0.379365407838544,0.634843008192624,0.371753918780839,-0.611653305369863,0.732567927874185,0.85803611350317,-0.577973441708411
+23,1,1.77751976452381,1.28697050370578,0.222850898945077,-0.424057088828287,-0.27619426781836,0.616941667680694,-0.696779972923147,0.23612770730498,0.760705889780527,0.34004139732033
+24,1,1.65314327493874,1.16282810211312,-0.339501197382944,0.898529591365812,-0.881538228231582,0.090728826664301,-0.858395870780934,0.907272331515896,0.160531735619067,0.678911811768841
+25,1,1.30955180558204,1.36827755737648,-0.444576754578563,-0.78871174512572,0.246625773070183,-0.663474018818313,-0.446355552060464,0.750312773134634,-0.98959522970136,-0.150120109840706
+26,1,1.44924431171893,1.40328864581169,-0.388679577334402,-0.708193450791952,0.850310084800308,-0.767256338531612,0.370509317329194,0.387354921015751,0.533160321164986,0.149390212455131
+27,1,1.61362501391753,1.05448314414567,-0.359644680155969,0.682555404147728,-0.53195400936544,0.934101689590862,-0.73600284018832,-0.29960291454053,0.351125596355818,-0.187842884669279
+28,1,1.0243392720598,1.91059602121133,-0.249409157470717,-0.137070024386644,-0.707128531134104,0.944932049234295,0.431233366052987,0.449543990959262,0.912901932280027,0.77394610963827
+29,1,1.99444678594607,1.67204984441306,0.935302642480463,0.833720966523807,0.254167956717343,-0.007922712021391,-0.114776295376767,-0.276042896002242,-0.813098403125419,0.341922052212475
+30,1,1.40110330287926,1.109011516196,0.425716772255087,0.544174803732763,0.492863854358204,-0.047589791717166,-0.743840790633672,0.525289489060411,0.829611715544936,0.015193221845522
+31,1,1.94995090625471,1.05727410799969,0.12665368551441,0.469705238170149,0.874436248273008,-0.759571175468135,0.310230735619265,-0.80342084374485,-0.462431082486477,-0.407165886759129
+32,1,1.47264625042994,1.18913643279065,-0.731393018031039,0.461102224603009,-0.884528391885322,-0.419893944840798,0.647518214389067,0.688126326408485,0.754656371691603,0.116881923067816
+33,1,1.45901677207507,1.17024364037294,-0.527433424947131,-0.598774697808646,0.113197791601676,-0.50528865259863,0.117572114288939,0.590400320594326,-0.155159386769126,0.354827977413197
+34,1,1.32042744243041,1.19801952930384,-0.818626622405165,-0.029008564510599,0.952315968378468,0.817495784213924,0.182224554845043,-0.01377304364653,-0.26273195293588,-0.859530562808673
+35,1,1.88138237976289,1.03670081839679,0.305218688016626,-0.060885647660027,-0.387903446605514,-0.108064042735465,-0.962980405009682,-0.424289604203511,-0.253442293077285,0.309637368411297
+36,1,1.9986688782461,1.36909257128618,0.54312844740039,0.591372473040837,-0.835367086693457,0.198315253422507,-0.181434739783802,0.636429105754948,0.420628646992331,0.990122364664621
+37,1,1.50455818499044,1.19094974349673,-0.653263607332762,0.359359450868376,0.30131719114182,0.649581794356589,0.942268955633086,0.884659894489377,-0.473171239344398,0.039635066570717
+38,1,1.00833361547154,1.98150630000827,-0.812352457176761,0.219766101590983,-0.65021067790289,0.423621690291556,-0.58865099275791,0.061487886019891,-0.237737474016087,0.641284347380825
+39,1,1.60179185724619,1.12508599627141,-0.829819386940741,-0.345104687573802,0.485166070545119,-0.258839727448056,-0.920615208326881,0.275498215871427,-0.629075534110342,-0.642527887960687
+40,0,0.2,0.58,10,-10,0.041519856511361,0.23303461629095,-0.497233246191187,-0.023544587617095,-0.418540837770003,-0.550233932792512
+41,0,-1.09147574370355,1.70418701701285,-0.480316474702795,-0.753784710340632,-0.613234235616998,0.167955573662474,0.455636631315042,-0.380227635953206,0.48021383007369,-0.453674929885108
+42,0,-1.9425392252915,1.59311394144654,0.310098050913387,-0.835007082906627,0.407580140850853,0.556924247596553,-0.388616604639346,0.60215104751412,-0.984322198098753,-0.996332888983337
+43,0,-1.40302421044915,1.05041379743038,-0.898456453446964,-0.797029924245349,0.47491891024478,0.480193220538417,-0.750856163558686,-0.043960372032018,-0.242651391805662,-0.109239061054006
+44,0,-1.45810616907354,1.08468326497063,0.571329522934018,-0.677379826379623,0.098396984688832,-0.961599170104035,-0.753922591922157,0.361435891257559,-0.638030455493982,0.404349024843908
+45,0,-1.60421432901638,1.57730973247518,0.402433205555268,-0.06801187450078,-0.373089661152032,0.23970878487105,0.416451106643361,-0.50599166271433,-0.88669034806741,0.30364523616443
+46,0,-1.54868661350102,1.32883184576708,-0.174925245509766,0.050330391451536,-0.388676795741932,-0.72307604978553,0.646076107724964,-0.00105589909588,0.491928720743773,-0.647995101369186
+47,0,-1.8920756792535,1.76576258461153,0.289410761217525,0.48566510896872,0.338684773860801,0.374319581439648,-0.105479014627167,0.004520417892418,0.222862261975939,0.23538363683764
+48,0,-1.51442922313653,1.69840409315155,-0.199044563017407,-0.855804112781183,0.947572000564906,0.654939562810152,0.802084131057488,0.010033694468233,0.449766366250574,0.119974134618433
+49,0,-1.33469144171841,1.80124846893287,-0.346729568989951,-0.614828863660672,0.578150372001527,-0.697356489908387,-0.272496177427547,-0.326679505363186,0.403185907494623,0.659834986972357
+50,0,-1.39216086591683,1.96030807097305,-0.470918775433235,-0.165965173767875,-0.118373275802139,-0.804671542299309,-0.273096283874977,0.964676290034708,-0.240786016285174,0.781092750718218
+51,0,-1.10818774496527,1.1321805921252,-0.546315077724052,-0.263397808061131,0.073416112160648,-0.561584513583351,-0.003812545601594,-0.067901708659798,-0.797337624892413,-0.502494288676279
+52,0,-1.12733422378345,1.22290093390908,-0.239618151680487,0.281282683112064,-0.122253338243164,-0.416340912422471,-0.302944823763312,0.950697167857575,0.084774348269755,0.245643637478141
+53,0,-1.54504585318447,1.46465556555859,0.672465261607398,0.963677112876299,-0.732866944741014,0.269879007022312,-0.734121763984793,-0.18475004364869,0.494783604230457,-0.563469688908407
+54,0,-1.69728989778812,1.93427938064611,0.916674666213795,0.744100669613517,-0.536325680879341,0.745349313896706,-0.608494971121628,-0.036147807131094,0.730097211981708,-0.986020687921255
+55,0,-1.46716685688328,1.91950733639359,-0.040012375137611,0.248257524389148,-0.795936343325832,-0.755933622220192,0.664943062567423,-0.560825069941966,-0.987328335835364,0.00918182383389
+56,0,-1.5078580841421,1.11065681931139,-0.75655271526814,-0.433965979475654,-0.925820800763387,0.621204380538264,-0.725355435802351,-0.087195045278291,0.500040007799584,-0.351024070867477
+57,0,-1.79333947783294,1.64615611570236,0.593670368718185,0.74125415566331,-0.835056311664806,-0.128283340965351,0.795769070113583,0.338062872249377,0.961610282279288,-0.519755961049099
+58,0,-1.68562328688306,1.79136645116331,-0.917792004629201,-0.224807652067029,0.751172530954049,0.744925497765574,0.054821387540181,-0.268146122719043,-0.373795753322288,-0.023619900695578
+59,0,-1.70325116873164,1.56173898398367,0.937331444475048,-0.189146596668676,0.726757528139029,0.571196020214809,0.150478496659529,0.716370904753891,0.645947936391794,-0.096512499841381
+60,0,-0.31,-0.164,-10,10,0.303748234076738,0.094684069184242,0.846651908762107,0.505710991097032,-0.664846620425076,-0.722934785670171
+61,0,1.51747503460744,-1.57976833969122,-0.313853456471656,-0.670641690437042,0.337481189036041,-0.695059667580877,0.382512664766648,-0.754051294565859,-0.540912893771664,-0.152736592481289
+62,0,1.36729416399966,-1.54942606995245,0.746279765035798,0.320667909398266,0.075712278316126,0.557089028326803,-0.314459962457274,-0.091179395352991,-0.712572618352738,-0.862523770264919
+63,0,1.87551859565403,-1.01245024447758,0.961634242304571,0.99902517180177,0.428576472620752,0.790254229843056,-0.162732148014183,0.057108415575022,0.099625367521191,-0.41779573726667
+64,0,1.8407338686869,-1.58680706359952,-0.293737994923213,-0.654603713924763,-0.15830470325221,-0.4506171823593,0.106217286056366,-0.250165079508456,-0.598894350859836,-0.860382476004742
+65,0,1.47999238640346,-1.68861965445586,0.661374709635725,0.335413696048534,0.295408469126627,-0.340725080366546,0.611961227458239,0.53327702260923,-0.960254363897463,0.913251337834092
+66,0,1.0735581028252,-1.06052424530937,-0.790281335013236,0.372594655247821,-0.940075790261345,0.972106617215367,-0.246874887198155,-0.501544524013033,-0.134947611932188,0.130090806976322
+67,0,1.63769743034008,-1.64946099093265,-0.600590046972624,0.281621309709353,0.836244003088172,0.56250556179443,-0.244248244001593,0.274273110413607,0.988229164412892,-0.903492892429764
+68,0,1.9226795203725,-1.58810792001545,0.230397844467249,0.458000795025685,0.160534364807898,0.106760231103633,0.084376336290482,-0.410257096809632,-0.388975913032382,0.233684932760446
+69,0,1.42821810695172,-1.75832976379165,0.122894112900537,-0.193746425367835,0.602411133999453,-0.895694511099768,0.347280223444287,0.045175117581033,-0.232661771389541,-0.314648785155521
+70,0,1.42602875697361,-1.16082451050484,0.906027162216176,0.736418182225292,-0.041284854438203,0.308524126840497,0.369205540497406,0.333193031466162,0.98544497734097,-0.253876502721057
+71,0,1.73002019404142,-1.80947421953802,-0.677661468333469,0.07388223501889,-0.682147267310905,0.024126391992196,0.848946249678909,-0.516253994735439,0.202627425635043,-0.897477249843204
+72,0,1.11605808678586,-1.05622349137538,0.492431513300772,-0.737330353527688,0.594894327441348,0.805436037154752,-0.912716679245893,-0.390199322338262,-0.735805203184445,-0.05803264345169
+73,0,1.52878306779173,-1.52822073704896,-0.863821530585294,-0.987125905118183,-0.698190916645222,-0.17859271120364,-0.902497993400075,0.777448050547606,0.03349780154213,0.569802193246196
+74,0,1.69602091303769,-1.68791329506752,-0.919679036112179,-0.083795023015624,0.492078750634905,-0.102786002654994,0.168000984501864,-0.984910911120671,-0.901017886055053,0.639813560268343
+75,0,1.82292095427058,-1.79921516167805,0.107455937171145,-0.854711756750333,0.344969246269787,0.519092986129825,0.410230657805076,-0.91216461269154,0.033943611687528,-0.306643316979961
+76,0,1.15382245032495,-1.9125109596393,-0.80848616018294,-0.010443047871684,-0.706296790283886,0.822118261736111,0.163327430772402,0.252786291364115,-0.501338527911191,-0.28349201031843
+77,0,1.19521627831595,-1.4347201247938,-0.814416838367815,-0.02940231646999,-0.841428202408144,-0.004586605289201,-0.606434730541928,0.714277316437912,-0.44481897692423,-0.753698456302665
+78,0,1.99358961720643,-1.52499478281942,-0.877637461379848,0.414405535550407,-0.03365581494898,0.624692043559635,-0.832402658891314,-0.723028062732401,-0.867099034604054,-0.185632378061498
+79,0,1.6235192049324,-1.52045677356057,0.977828685636029,-0.57502380941392,-0.402617609462035,0.631967959251952,-0.426504420434097,0.480579460496328,0.686338078276468,-0.793812851707889
+80,1,-1.9061964810895,-1.28908450646839,10,10,0.101102136284509,-0.416199695149021,-0.494850987164782,-0.568698448483212,-0.184782382471875,-0.552230498856606
+81,1,-1.12334568706136,-1.43192728687949,-0.202671045004157,-0.138914163603925,-0.937156710796857,-0.116790109384378,-0.094237431941851,-0.896761118553971,-0.183423320636867,0.458624633065419
+82,1,-1.85938009020988,-1.2014277824818,-0.699402902052328,0.801606907908076,0.618074329335756,-0.172568708757076,-0.075693445304373,0.488815268086692,-0.612225386267585,0.515474858015819
+83,1,-1.44593059276162,-1.50738144143115,-0.146467066237161,0.773717178872341,0.498796984960351,-0.015862721592055,0.487162827649467,-0.002016922590367,-0.480395455657278,0.140660394856319
+84,1,-1.5068337349461,-1.39605748721966,0.935273336022611,-0.65840232577507,0.254028615496319,-0.207949363786322,0.494233964181716,0.342544015156094,-0.2790717466048,0.681766781920308
+85,1,1.29459521637362,1.25954745515179,-10,-10,-0.772948300582061,-0.755591080857131,-0.795691897784493,0.140653835392209,-0.160483486922781,0.460920935704452
+86,1,1.04689401512909,1.48899924906156,-0.102806023076495,-0.232256721754397,0.982487312078063,0.220639487969972,0.466108251058299,-0.328239000603224,0.955688285869012,0.98401214247364
+87,1,1.58830474403604,1.70226055213414,-0.400909948872293,-0.887240029691788,-0.796366553971199,-0.189011341359002,-0.984264269832423,0.228539348323108,0.696045037642922,-0.734941166556072
+88,1,1.07001216284605,1.81845698640496,-0.906675421892372,0.643501800272306,0.2964442904515,-0.212339822521429,-0.624947347663644,-0.076505534185115,0.690006945874019,0.603178865697037
+89,1,1.47818853391931,1.1810797217516,-0.777878371782176,0.158700400185078,0.77008386941758,0.318201581494366,-0.577373286340777,0.207915408782256,0.169898207168944,-0.134718349741109
+90,0,-1.39792536337696,1.8903759983709,10,-10,-0.381543623044489,-0.150608604917312,0.940200935058958,-0.260126956593852,0.011178432296195,-0.552646188796202
+91,0,-1.34181919280501,1.37770384290606,-0.273896107346467,0.9218628887177,0.183329714125041,0.794995796775324,0.47034078624241,0.587159127993906,0.656065190534019,0.710378359435155
+92,0,-1.08535749655328,1.25684564483175,-0.093438684660175,0.867363731909897,0.501979335337794,0.929133531466716,0.853038546233495,0.231647371842096,-0.921363933789468,0.9955206665909
+93,0,-1.5078347061732,1.75537297346943,-0.353047628963401,0.686996459628496,0.12650715249212,-0.584157551233493,0.67801198459735,0.130184075673761,-0.541365882749818,0.804095414322346
+94,0,-1.67232665291775,1.91398842184753,-0.055989266428472,0.083972688856283,0.495406878960658,-0.531851511151842,-0.68298755038252,-0.762719341237422,0.044183568378214,0.569492860435106
+95,0,1.52196747373202,-1.81272431584475,-10,10,-0.592427348924565,-0.245215291809175,0.450286805609337,-0.61720080602177,-0.078323806376631,-0.138400199664094
+96,0,1.34277619089321,-1.04264614535854,-0.840523610880692,-0.579768061766314,0.207088065224924,-0.30689024242517,-0.707319832593209,0.067209487208095,-0.219041441615042,0.651618314592841
+97,0,1.72996670685819,-1.26148185356343,-0.071347258910479,-0.571647931880792,0.00248497405952,0.410346123251162,0.294254262248804,0.698018369247902,0.652553267893053,-0.960621219815728
+98,0,1.63679608599505,-1.40483117266873,0.133355343382705,0.785183623637213,0.106494106522641,0.457003384754942,-0.314470768070196,-0.05337112691883,0.86147345141363,-0.770167158107586
+99,0,1.22531932528574,-1.39832123108255,0.751819680541469,0.843477659731268,0.880714646905367,0.20665859661747,-0.85053999542226,0.770244035843202,-0.790477429383416,-0.219373260405667
diff --git a/tests/exec_test/classification_max_corr/sisso.json b/tests/exec_test/classification_max_corr/sisso.json
new file mode 100644
index 0000000000000000000000000000000000000000..bc7fa4cd2b4bf9ee1093d3f7e33ca3052bdab7f5
--- /dev/null
+++ b/tests/exec_test/classification_max_corr/sisso.json
@@ -0,0 +1,17 @@
+{
+    "desc_dim": 2,
+    "n_sis_select": 5,
+    "max_rung": 1,
+    "n_residual": 1,
+    "data_file": "data.csv",
+    "data_file_relatice_to_json": true,
+    "max_feat_cross_correlation": 0.9,
+    "property_key": "prop",
+    "leave_out_frac": 0.2,
+    "n_models_store": 1,
+    "calc_type": "classification",
+    "leave_out_inds": [80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95 ,96 ,97, 98 , 99],
+    "opset": ["add", "sub", "abs_diff", "mult", "div", "inv", "abs", "exp", "log", "sin", "cos", "sq", "cb", "six_pow", "sqrt", "cbrt", "neg_exp"],
+    "param_opset" : [],
+    "fix_intercept": false
+}
diff --git a/tests/exec_test/classification_max_corr_gen_proj/check_model.py b/tests/exec_test/classification_max_corr_gen_proj/check_model.py
new file mode 100644
index 0000000000000000000000000000000000000000..5958c75a5d751910b4baa9cd22593a2de8289c5d
--- /dev/null
+++ b/tests/exec_test/classification_max_corr_gen_proj/check_model.py
@@ -0,0 +1,10 @@
+from sissopp import ModelClassifier
+from pathlib import Path
+
+import numpy as np
+
+model = ModelClassifier(
+    str("models/train_dim_2_model_0.dat"), str("models/test_dim_2_model_0.dat")
+)
+assert model.percent_error < 1e-7
+assert model.percent_test_error < 1e-7
diff --git a/tests/exec_test/classification_max_corr_gen_proj/data.csv b/tests/exec_test/classification_max_corr_gen_proj/data.csv
new file mode 100644
index 0000000000000000000000000000000000000000..3fa9f64bd0b9d2133040ea4849a71789ac8f078a
--- /dev/null
+++ b/tests/exec_test/classification_max_corr_gen_proj/data.csv
@@ -0,0 +1,101 @@
+index,prop,A,B,C,D,E,F,G,H,I,J
+0,1,0.1,-0.3,10,10,-0.492825179098274,0.173025977694162,0.598942935224295,-0.298754475196825,-0.581254909010269,-0.110656494210556
+1,1,-1.89442810374214,-1.31996134398007,0.955713896876243,0.478117201427488,0.777586191100329,0.655369716778557,0.174914171427966,-0.288498877530604,-0.045316536149489,-0.606586193752411
+2,1,-1.47460150711424,-1.22614964523433,0.330140292484796,-0.389505498689912,0.138935265824808,-0.871869282167555,0.37472462048701,0.16418591189513,0.293560701443717,0.285608940220021
+3,1,-1.30213414336735,-1.82621262418812,-0.71381302228685,0.968769585007681,0.683148179202864,0.799125092538796,0.309479173526504,0.728052031003468,0.725495580994886,-0.676576302804248
+4,1,-1.73938632269334,-1.58349866505488,-0.067783417095816,0.229988549891323,0.582427598044647,0.261947149184825,-0.31573435079735,0.61178122144268,-0.187058216967238,0.871764347690334
+5,1,-1.56660896632398,-1.05861814902183,-0.212800982302764,0.915459776146607,-0.263465552591813,-0.666126495988014,-0.195028996490007,-0.237170057680116,-0.933358858596883,-0.19726273171241
+6,1,-1.55340876153895,-1.25209231285838,0.972585787901787,-0.872502887185675,0.729110910814452,0.265244787210866,-0.726900973624432,0.248626170107939,0.809004396880265,-0.278494064780479
+7,1,-1.54625325136447,-1.81238888450819,0.431645876221063,-0.595223273279383,0.953557069758112,-0.222915219121563,0.257670939076174,0.728927879098318,-0.579783055417687,-0.179960319428095
+8,1,-1.12735554524035,-1.69261497444728,0.827091199052693,0.208781482910759,-0.573317187361529,-0.00488758921352,0.341918716034638,-0.292901671356202,-0.34531700628784,0.766920547630073
+9,1,-1.35367834815884,-1.38141056472962,0.023965203621816,-0.805412569418484,0.806810139718495,-0.806576263127819,-0.39279977856172,-0.463819363774079,0.16095248005356,-0.553419747131608
+10,1,-1.17853151888796,-1.27705829298504,0.916015229666356,0.0275946645157,-0.829507007977635,-0.700063689327201,-0.715601456588714,-0.439799165143527,-0.487241220494887,0.245279267056121
+11,1,-1.17547049766875,-1.05613281246665,0.404557718897757,0.175997361062361,0.227383730822478,0.774254652577977,-0.616080996917636,-0.948639007451084,0.954076433375225,-0.497102001172339
+12,1,-1.67277915033943,-1.86190239883588,0.906708844886064,-0.592737030373698,0.885229803890949,0.822069297241907,0.204367485562992,0.24610324883505,-0.079476866422163,-0.244006995583434
+13,1,-1.96326165438884,-1.31680458089693,-0.284570394188414,-0.198686061574238,-0.168897609541112,-0.92939259112691,0.265899059671913,-0.828727642584781,-0.427453097474168,-0.738368091608883
+14,1,-1.79497769808481,-1.13948217357082,0.011556817105957,0.499408314757229,0.253163424774478,0.017645446880421,0.401735167095264,-0.650287617298501,-0.262217482830833,-0.510102120130588
+15,1,-1.07957120536262,-1.93245955077991,-0.520161174281201,-0.392478459347396,0.147495839565868,0.780879606474075,-0.281044687799173,-0.148036908135786,-0.208810472224093,0.278961929718128
+16,1,-1.52555145037507,-1.72455209196736,0.562803219191695,0.323676061636996,0.490737136410372,-0.95476192699496,0.028603504036769,-0.246295219650507,-0.297736293597739,-0.632473830957653
+17,1,-1.29142309507315,-1.9506961526212,0.859669461054104,0.485772819254089,0.268883598825009,0.253553321699552,-0.045743087569395,0.66793403278249,0.308591963919947,0.771084301464027
+18,1,-1.23404787121001,-1.68173519287847,-0.118871100462413,0.159000937768132,0.2985428841756,-0.203829205332538,-0.637945695251352,0.658049690810909,0.949529589134008,-0.577812553880056
+19,1,-1.13513050029551,-1.3119036669604,0.74037411093045,0.558782660077505,-0.096052126354583,0.529119817360537,0.372190604770144,0.688656466253683,-0.819433165315505,-0.12814415930811
+20,1,-0.2,0.132,-10,-10,0.785237349732891,-0.387217730495401,-0.942409218899448,0.160806577297675,-0.723969983661972,-0.452650134415823
+21,1,1.1507658618081,1.7260505392724,-0.92290734251321,0.465751384219632,-0.81727500527083,-0.182472640926628,0.887526070620356,0.111592883978488,0.386435078880162,-0.440017211221272
+22,1,1.90389768224701,1.71880759316591,0.28033979546451,0.379365407838544,0.634843008192624,0.371753918780839,-0.611653305369863,0.732567927874185,0.85803611350317,-0.577973441708411
+23,1,1.77751976452381,1.28697050370578,0.222850898945077,-0.424057088828287,-0.27619426781836,0.616941667680694,-0.696779972923147,0.23612770730498,0.760705889780527,0.34004139732033
+24,1,1.65314327493874,1.16282810211312,-0.339501197382944,0.898529591365812,-0.881538228231582,0.090728826664301,-0.858395870780934,0.907272331515896,0.160531735619067,0.678911811768841
+25,1,1.30955180558204,1.36827755737648,-0.444576754578563,-0.78871174512572,0.246625773070183,-0.663474018818313,-0.446355552060464,0.750312773134634,-0.98959522970136,-0.150120109840706
+26,1,1.44924431171893,1.40328864581169,-0.388679577334402,-0.708193450791952,0.850310084800308,-0.767256338531612,0.370509317329194,0.387354921015751,0.533160321164986,0.149390212455131
+27,1,1.61362501391753,1.05448314414567,-0.359644680155969,0.682555404147728,-0.53195400936544,0.934101689590862,-0.73600284018832,-0.29960291454053,0.351125596355818,-0.187842884669279
+28,1,1.0243392720598,1.91059602121133,-0.249409157470717,-0.137070024386644,-0.707128531134104,0.944932049234295,0.431233366052987,0.449543990959262,0.912901932280027,0.77394610963827
+29,1,1.99444678594607,1.67204984441306,0.935302642480463,0.833720966523807,0.254167956717343,-0.007922712021391,-0.114776295376767,-0.276042896002242,-0.813098403125419,0.341922052212475
+30,1,1.40110330287926,1.109011516196,0.425716772255087,0.544174803732763,0.492863854358204,-0.047589791717166,-0.743840790633672,0.525289489060411,0.829611715544936,0.015193221845522
+31,1,1.94995090625471,1.05727410799969,0.12665368551441,0.469705238170149,0.874436248273008,-0.759571175468135,0.310230735619265,-0.80342084374485,-0.462431082486477,-0.407165886759129
+32,1,1.47264625042994,1.18913643279065,-0.731393018031039,0.461102224603009,-0.884528391885322,-0.419893944840798,0.647518214389067,0.688126326408485,0.754656371691603,0.116881923067816
+33,1,1.45901677207507,1.17024364037294,-0.527433424947131,-0.598774697808646,0.113197791601676,-0.50528865259863,0.117572114288939,0.590400320594326,-0.155159386769126,0.354827977413197
+34,1,1.32042744243041,1.19801952930384,-0.818626622405165,-0.029008564510599,0.952315968378468,0.817495784213924,0.182224554845043,-0.01377304364653,-0.26273195293588,-0.859530562808673
+35,1,1.88138237976289,1.03670081839679,0.305218688016626,-0.060885647660027,-0.387903446605514,-0.108064042735465,-0.962980405009682,-0.424289604203511,-0.253442293077285,0.309637368411297
+36,1,1.9986688782461,1.36909257128618,0.54312844740039,0.591372473040837,-0.835367086693457,0.198315253422507,-0.181434739783802,0.636429105754948,0.420628646992331,0.990122364664621
+37,1,1.50455818499044,1.19094974349673,-0.653263607332762,0.359359450868376,0.30131719114182,0.649581794356589,0.942268955633086,0.884659894489377,-0.473171239344398,0.039635066570717
+38,1,1.00833361547154,1.98150630000827,-0.812352457176761,0.219766101590983,-0.65021067790289,0.423621690291556,-0.58865099275791,0.061487886019891,-0.237737474016087,0.641284347380825
+39,1,1.60179185724619,1.12508599627141,-0.829819386940741,-0.345104687573802,0.485166070545119,-0.258839727448056,-0.920615208326881,0.275498215871427,-0.629075534110342,-0.642527887960687
+40,0,0.2,0.58,10,-10,0.041519856511361,0.23303461629095,-0.497233246191187,-0.023544587617095,-0.418540837770003,-0.550233932792512
+41,0,-1.09147574370355,1.70418701701285,-0.480316474702795,-0.753784710340632,-0.613234235616998,0.167955573662474,0.455636631315042,-0.380227635953206,0.48021383007369,-0.453674929885108
+42,0,-1.9425392252915,1.59311394144654,0.310098050913387,-0.835007082906627,0.407580140850853,0.556924247596553,-0.388616604639346,0.60215104751412,-0.984322198098753,-0.996332888983337
+43,0,-1.40302421044915,1.05041379743038,-0.898456453446964,-0.797029924245349,0.47491891024478,0.480193220538417,-0.750856163558686,-0.043960372032018,-0.242651391805662,-0.109239061054006
+44,0,-1.45810616907354,1.08468326497063,0.571329522934018,-0.677379826379623,0.098396984688832,-0.961599170104035,-0.753922591922157,0.361435891257559,-0.638030455493982,0.404349024843908
+45,0,-1.60421432901638,1.57730973247518,0.402433205555268,-0.06801187450078,-0.373089661152032,0.23970878487105,0.416451106643361,-0.50599166271433,-0.88669034806741,0.30364523616443
+46,0,-1.54868661350102,1.32883184576708,-0.174925245509766,0.050330391451536,-0.388676795741932,-0.72307604978553,0.646076107724964,-0.00105589909588,0.491928720743773,-0.647995101369186
+47,0,-1.8920756792535,1.76576258461153,0.289410761217525,0.48566510896872,0.338684773860801,0.374319581439648,-0.105479014627167,0.004520417892418,0.222862261975939,0.23538363683764
+48,0,-1.51442922313653,1.69840409315155,-0.199044563017407,-0.855804112781183,0.947572000564906,0.654939562810152,0.802084131057488,0.010033694468233,0.449766366250574,0.119974134618433
+49,0,-1.33469144171841,1.80124846893287,-0.346729568989951,-0.614828863660672,0.578150372001527,-0.697356489908387,-0.272496177427547,-0.326679505363186,0.403185907494623,0.659834986972357
+50,0,-1.39216086591683,1.96030807097305,-0.470918775433235,-0.165965173767875,-0.118373275802139,-0.804671542299309,-0.273096283874977,0.964676290034708,-0.240786016285174,0.781092750718218
+51,0,-1.10818774496527,1.1321805921252,-0.546315077724052,-0.263397808061131,0.073416112160648,-0.561584513583351,-0.003812545601594,-0.067901708659798,-0.797337624892413,-0.502494288676279
+52,0,-1.12733422378345,1.22290093390908,-0.239618151680487,0.281282683112064,-0.122253338243164,-0.416340912422471,-0.302944823763312,0.950697167857575,0.084774348269755,0.245643637478141
+53,0,-1.54504585318447,1.46465556555859,0.672465261607398,0.963677112876299,-0.732866944741014,0.269879007022312,-0.734121763984793,-0.18475004364869,0.494783604230457,-0.563469688908407
+54,0,-1.69728989778812,1.93427938064611,0.916674666213795,0.744100669613517,-0.536325680879341,0.745349313896706,-0.608494971121628,-0.036147807131094,0.730097211981708,-0.986020687921255
+55,0,-1.46716685688328,1.91950733639359,-0.040012375137611,0.248257524389148,-0.795936343325832,-0.755933622220192,0.664943062567423,-0.560825069941966,-0.987328335835364,0.00918182383389
+56,0,-1.5078580841421,1.11065681931139,-0.75655271526814,-0.433965979475654,-0.925820800763387,0.621204380538264,-0.725355435802351,-0.087195045278291,0.500040007799584,-0.351024070867477
+57,0,-1.79333947783294,1.64615611570236,0.593670368718185,0.74125415566331,-0.835056311664806,-0.128283340965351,0.795769070113583,0.338062872249377,0.961610282279288,-0.519755961049099
+58,0,-1.68562328688306,1.79136645116331,-0.917792004629201,-0.224807652067029,0.751172530954049,0.744925497765574,0.054821387540181,-0.268146122719043,-0.373795753322288,-0.023619900695578
+59,0,-1.70325116873164,1.56173898398367,0.937331444475048,-0.189146596668676,0.726757528139029,0.571196020214809,0.150478496659529,0.716370904753891,0.645947936391794,-0.096512499841381
+60,0,-0.31,-0.164,-10,10,0.303748234076738,0.094684069184242,0.846651908762107,0.505710991097032,-0.664846620425076,-0.722934785670171
+61,0,1.51747503460744,-1.57976833969122,-0.313853456471656,-0.670641690437042,0.337481189036041,-0.695059667580877,0.382512664766648,-0.754051294565859,-0.540912893771664,-0.152736592481289
+62,0,1.36729416399966,-1.54942606995245,0.746279765035798,0.320667909398266,0.075712278316126,0.557089028326803,-0.314459962457274,-0.091179395352991,-0.712572618352738,-0.862523770264919
+63,0,1.87551859565403,-1.01245024447758,0.961634242304571,0.99902517180177,0.428576472620752,0.790254229843056,-0.162732148014183,0.057108415575022,0.099625367521191,-0.41779573726667
+64,0,1.8407338686869,-1.58680706359952,-0.293737994923213,-0.654603713924763,-0.15830470325221,-0.4506171823593,0.106217286056366,-0.250165079508456,-0.598894350859836,-0.860382476004742
+65,0,1.47999238640346,-1.68861965445586,0.661374709635725,0.335413696048534,0.295408469126627,-0.340725080366546,0.611961227458239,0.53327702260923,-0.960254363897463,0.913251337834092
+66,0,1.0735581028252,-1.06052424530937,-0.790281335013236,0.372594655247821,-0.940075790261345,0.972106617215367,-0.246874887198155,-0.501544524013033,-0.134947611932188,0.130090806976322
+67,0,1.63769743034008,-1.64946099093265,-0.600590046972624,0.281621309709353,0.836244003088172,0.56250556179443,-0.244248244001593,0.274273110413607,0.988229164412892,-0.903492892429764
+68,0,1.9226795203725,-1.58810792001545,0.230397844467249,0.458000795025685,0.160534364807898,0.106760231103633,0.084376336290482,-0.410257096809632,-0.388975913032382,0.233684932760446
+69,0,1.42821810695172,-1.75832976379165,0.122894112900537,-0.193746425367835,0.602411133999453,-0.895694511099768,0.347280223444287,0.045175117581033,-0.232661771389541,-0.314648785155521
+70,0,1.42602875697361,-1.16082451050484,0.906027162216176,0.736418182225292,-0.041284854438203,0.308524126840497,0.369205540497406,0.333193031466162,0.98544497734097,-0.253876502721057
+71,0,1.73002019404142,-1.80947421953802,-0.677661468333469,0.07388223501889,-0.682147267310905,0.024126391992196,0.848946249678909,-0.516253994735439,0.202627425635043,-0.897477249843204
+72,0,1.11605808678586,-1.05622349137538,0.492431513300772,-0.737330353527688,0.594894327441348,0.805436037154752,-0.912716679245893,-0.390199322338262,-0.735805203184445,-0.05803264345169
+73,0,1.52878306779173,-1.52822073704896,-0.863821530585294,-0.987125905118183,-0.698190916645222,-0.17859271120364,-0.902497993400075,0.777448050547606,0.03349780154213,0.569802193246196
+74,0,1.69602091303769,-1.68791329506752,-0.919679036112179,-0.083795023015624,0.492078750634905,-0.102786002654994,0.168000984501864,-0.984910911120671,-0.901017886055053,0.639813560268343
+75,0,1.82292095427058,-1.79921516167805,0.107455937171145,-0.854711756750333,0.344969246269787,0.519092986129825,0.410230657805076,-0.91216461269154,0.033943611687528,-0.306643316979961
+76,0,1.15382245032495,-1.9125109596393,-0.80848616018294,-0.010443047871684,-0.706296790283886,0.822118261736111,0.163327430772402,0.252786291364115,-0.501338527911191,-0.28349201031843
+77,0,1.19521627831595,-1.4347201247938,-0.814416838367815,-0.02940231646999,-0.841428202408144,-0.004586605289201,-0.606434730541928,0.714277316437912,-0.44481897692423,-0.753698456302665
+78,0,1.99358961720643,-1.52499478281942,-0.877637461379848,0.414405535550407,-0.03365581494898,0.624692043559635,-0.832402658891314,-0.723028062732401,-0.867099034604054,-0.185632378061498
+79,0,1.6235192049324,-1.52045677356057,0.977828685636029,-0.57502380941392,-0.402617609462035,0.631967959251952,-0.426504420434097,0.480579460496328,0.686338078276468,-0.793812851707889
+80,1,-1.9061964810895,-1.28908450646839,10,10,0.101102136284509,-0.416199695149021,-0.494850987164782,-0.568698448483212,-0.184782382471875,-0.552230498856606
+81,1,-1.12334568706136,-1.43192728687949,-0.202671045004157,-0.138914163603925,-0.937156710796857,-0.116790109384378,-0.094237431941851,-0.896761118553971,-0.183423320636867,0.458624633065419
+82,1,-1.85938009020988,-1.2014277824818,-0.699402902052328,0.801606907908076,0.618074329335756,-0.172568708757076,-0.075693445304373,0.488815268086692,-0.612225386267585,0.515474858015819
+83,1,-1.44593059276162,-1.50738144143115,-0.146467066237161,0.773717178872341,0.498796984960351,-0.015862721592055,0.487162827649467,-0.002016922590367,-0.480395455657278,0.140660394856319
+84,1,-1.5068337349461,-1.39605748721966,0.935273336022611,-0.65840232577507,0.254028615496319,-0.207949363786322,0.494233964181716,0.342544015156094,-0.2790717466048,0.681766781920308
+85,1,1.29459521637362,1.25954745515179,-10,-10,-0.772948300582061,-0.755591080857131,-0.795691897784493,0.140653835392209,-0.160483486922781,0.460920935704452
+86,1,1.04689401512909,1.48899924906156,-0.102806023076495,-0.232256721754397,0.982487312078063,0.220639487969972,0.466108251058299,-0.328239000603224,0.955688285869012,0.98401214247364
+87,1,1.58830474403604,1.70226055213414,-0.400909948872293,-0.887240029691788,-0.796366553971199,-0.189011341359002,-0.984264269832423,0.228539348323108,0.696045037642922,-0.734941166556072
+88,1,1.07001216284605,1.81845698640496,-0.906675421892372,0.643501800272306,0.2964442904515,-0.212339822521429,-0.624947347663644,-0.076505534185115,0.690006945874019,0.603178865697037
+89,1,1.47818853391931,1.1810797217516,-0.777878371782176,0.158700400185078,0.77008386941758,0.318201581494366,-0.577373286340777,0.207915408782256,0.169898207168944,-0.134718349741109
+90,0,-1.39792536337696,1.8903759983709,10,-10,-0.381543623044489,-0.150608604917312,0.940200935058958,-0.260126956593852,0.011178432296195,-0.552646188796202
+91,0,-1.34181919280501,1.37770384290606,-0.273896107346467,0.9218628887177,0.183329714125041,0.794995796775324,0.47034078624241,0.587159127993906,0.656065190534019,0.710378359435155
+92,0,-1.08535749655328,1.25684564483175,-0.093438684660175,0.867363731909897,0.501979335337794,0.929133531466716,0.853038546233495,0.231647371842096,-0.921363933789468,0.9955206665909
+93,0,-1.5078347061732,1.75537297346943,-0.353047628963401,0.686996459628496,0.12650715249212,-0.584157551233493,0.67801198459735,0.130184075673761,-0.541365882749818,0.804095414322346
+94,0,-1.67232665291775,1.91398842184753,-0.055989266428472,0.083972688856283,0.495406878960658,-0.531851511151842,-0.68298755038252,-0.762719341237422,0.044183568378214,0.569492860435106
+95,0,1.52196747373202,-1.81272431584475,-10,10,-0.592427348924565,-0.245215291809175,0.450286805609337,-0.61720080602177,-0.078323806376631,-0.138400199664094
+96,0,1.34277619089321,-1.04264614535854,-0.840523610880692,-0.579768061766314,0.207088065224924,-0.30689024242517,-0.707319832593209,0.067209487208095,-0.219041441615042,0.651618314592841
+97,0,1.72996670685819,-1.26148185356343,-0.071347258910479,-0.571647931880792,0.00248497405952,0.410346123251162,0.294254262248804,0.698018369247902,0.652553267893053,-0.960621219815728
+98,0,1.63679608599505,-1.40483117266873,0.133355343382705,0.785183623637213,0.106494106522641,0.457003384754942,-0.314470768070196,-0.05337112691883,0.86147345141363,-0.770167158107586
+99,0,1.22531932528574,-1.39832123108255,0.751819680541469,0.843477659731268,0.880714646905367,0.20665859661747,-0.85053999542226,0.770244035843202,-0.790477429383416,-0.219373260405667
diff --git a/tests/exec_test/classification_max_corr_gen_proj/sisso.json b/tests/exec_test/classification_max_corr_gen_proj/sisso.json
new file mode 100644
index 0000000000000000000000000000000000000000..8ff0480a434963cdfcd12bc13cc137a8a66dfd11
--- /dev/null
+++ b/tests/exec_test/classification_max_corr_gen_proj/sisso.json
@@ -0,0 +1,18 @@
+{
+    "desc_dim": 2,
+    "n_sis_select": 5,
+    "max_rung": 1,
+    "n_residual": 1,
+    "data_file": "data.csv",
+    "data_file_relatice_to_json": true,
+    "max_feat_cross_correlation": 0.9,
+    "property_key": "prop",
+    "leave_out_frac": 0.2,
+    "n_models_store": 1,
+    "n_rung_generate": 1,
+    "calc_type": "classification",
+    "leave_out_inds": [80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95 ,96 ,97, 98 , 99],
+    "opset": ["add", "sub", "abs_diff", "mult", "div", "inv", "abs", "exp", "log", "sin", "cos", "sq", "cb", "six_pow", "sqrt", "cbrt", "neg_exp"],
+    "param_opset" : [],
+    "fix_intercept": false
+}
diff --git a/tests/exec_test/max_corr_gen_proj/sisso.json b/tests/exec_test/max_corr_gen_proj/sisso.json
new file mode 100644
index 0000000000000000000000000000000000000000..da652afb601069edc062461557a576153c67260e
--- /dev/null
+++ b/tests/exec_test/max_corr_gen_proj/sisso.json
@@ -0,0 +1,18 @@
+{
+    "desc_dim": 2,
+    "n_sis_select": 1,
+    "max_rung": 2,
+    "n_residual": 1,
+    "data_file": "../data.csv",
+    "data_file_relatice_to_json": true,
+    "property_key": "Prop",
+    "task_key": "Task",
+    "leave_out_frac": 0.05,
+    "n_models_store": 1,
+    "n_rung_generate": 1,
+    "max_feat_cross_correlation": 0.99,
+    "leave_out_inds": [0, 1, 2, 60, 61],
+    "opset": ["add", "sub", "abs_diff", "mult", "div", "inv", "abs", "exp", "log", "sin", "cos", "sq", "cb", "six_pow", "sqrt", "cbrt", "neg_exp"],
+    "param_opset": [],
+    "fix_intercept": false
+}
diff --git a/tests/exec_test/reparam/sisso.json b/tests/exec_test/reparam/sisso.json
index 64267ee61bae531b89fa71491fe34d8d68141ce6..d90917e122558ad90f6b7dcb206d3a76015d0627 100644
--- a/tests/exec_test/reparam/sisso.json
+++ b/tests/exec_test/reparam/sisso.json
@@ -1,7 +1,7 @@
 {
     "desc_dim": 2,
     "n_sis_select": 5,
-    "max_rung": 2,
+    "max_rung": 1,
     "n_residual": 5,
     "data_file": "data.csv",
     "data_file_relatice_to_json": true,
diff --git a/tests/exec_test/reparam_gen_proj/sisso.json b/tests/exec_test/reparam_gen_proj/sisso.json
index 653537310ef21991ab7fb9e5614ca59fd89c5266..c3c554cf65f999cfd552d8a0bac735f1664be560 100644
--- a/tests/exec_test/reparam_gen_proj/sisso.json
+++ b/tests/exec_test/reparam_gen_proj/sisso.json
@@ -1,7 +1,7 @@
 {
     "desc_dim": 2,
     "n_sis_select": 5,
-    "max_rung": 2,
+    "max_rung": 1,
     "n_residual": 5,
     "data_file": "data.csv",
     "data_file_relatice_to_json": true,
diff --git a/tests/googletest/feature_creation/feature_generation/test_abs_diff_node.cc b/tests/googletest/feature_creation/feature_generation/test_abs_diff_node.cc
index 15db40a58c96c5e629f5ccbf74569acf2d1e205d..9668bf16a15d3b82754db809463b3220f8d50a11 100644
--- a/tests/googletest/feature_creation/feature_generation/test_abs_diff_node.cc
+++ b/tests/googletest/feature_creation/feature_generation/test_abs_diff_node.cc
@@ -153,6 +153,26 @@ namespace
         {}
     }
 
+    TEST_F(AbsDiffNodeTest, HardCopyTest)
+    {
+        _abs_diff_test = std::make_shared<AbsDiffNode>(_phi[0], _phi[1], 5, 1e-50, 1e50);
+        _abs_diff_test = std::make_shared<AbsDiffNode>(_abs_diff_test, _phi[1], 6, 1e-50, 1e50);
+        node_ptr copy_test = _abs_diff_test->hard_copy();
+
+        EXPECT_EQ(copy_test->rung(), 2);
+
+        EXPECT_EQ(copy_test->value_ptr()[0], 1.0);
+        EXPECT_EQ(copy_test->test_value_ptr()[0], 5.0);
+
+        EXPECT_EQ(copy_test->value()[0], 1.0);
+        EXPECT_EQ(copy_test->test_value()[0], 5.0);
+
+        EXPECT_STREQ(copy_test->unit().toString().c_str(), "m");
+
+        EXPECT_STREQ(copy_test->expr().c_str(), "(|(|A - B|) - B|)");
+        EXPECT_STREQ(copy_test->postfix_expr().c_str(), "0|1|abd|1|abd");
+    }
+
     TEST_F(AbsDiffNodeTest, AttributesTest)
     {
         _abs_diff_test = std::make_shared<AbsDiffNode>(_phi[0], _phi[1], 5, 1e-50, 1e50);
diff --git a/tests/googletest/feature_creation/feature_generation/test_abs_node.cc b/tests/googletest/feature_creation/feature_generation/test_abs_node.cc
index 24ec3033d94c4037efc74d44f2af8dc22a271ce0..989b7500af98f03c35dedb27276f18356ce86e6a 100644
--- a/tests/googletest/feature_creation/feature_generation/test_abs_node.cc
+++ b/tests/googletest/feature_creation/feature_generation/test_abs_node.cc
@@ -142,6 +142,25 @@ namespace
         {}
     }
 
+    TEST_F(AbsNodeTest, HardCopyTest)
+    {
+        _abs_test = std::make_shared<AbsNode>(_phi[0], 5, 1e-50, 1e50);
+        node_ptr copy_test = _abs_test->hard_copy();
+
+        EXPECT_EQ(copy_test->rung(), 1);
+
+        EXPECT_EQ(copy_test->value_ptr()[0], 1.0);
+        EXPECT_EQ(copy_test->test_value_ptr()[0], 50.0);
+
+        EXPECT_EQ(copy_test->value()[0], 1.0);
+        EXPECT_EQ(copy_test->test_value()[0], 50.0);
+
+        EXPECT_STREQ(copy_test->unit().toString().c_str(), "m");
+
+        EXPECT_STREQ(copy_test->expr().c_str(), "(|A|)");
+        EXPECT_STREQ(copy_test->postfix_expr().c_str(), "0|abs");
+    }
+
     TEST_F(AbsNodeTest, AttributesTest)
     {
         _abs_test = std::make_shared<AbsNode>(_phi[0], 5, 1e-50, 1e50);
diff --git a/tests/googletest/feature_creation/feature_generation/test_add_node.cc b/tests/googletest/feature_creation/feature_generation/test_add_node.cc
index 7a721416b9952e6cb0f73808b1e09f1cd1cafe8f..329bb0bc73f31cc1a7d3016c451532eb65aa5653 100644
--- a/tests/googletest/feature_creation/feature_generation/test_add_node.cc
+++ b/tests/googletest/feature_creation/feature_generation/test_add_node.cc
@@ -151,6 +151,27 @@ namespace
         {}
     }
 
+    TEST_F(AddNodeTest, HardCopyTest)
+    {
+        _add_test = std::make_shared<AddNode>(_phi[0], _phi[1], 5, 1e-50, 1e50);
+        _add_test = std::make_shared<AddNode>(_add_test, _phi[1], 6, 1e-50, 1e50);
+
+        node_ptr copy_test = _add_test->hard_copy();
+
+        EXPECT_EQ(_add_test->rung(), 2);
+
+        EXPECT_EQ(_add_test->value_ptr()[0], 21.0);
+        EXPECT_EQ(_add_test->test_value_ptr()[0], 105.0);
+
+        EXPECT_EQ(_add_test->value()[0], 21.0);
+        EXPECT_EQ(_add_test->test_value()[0], 105.0);
+
+        EXPECT_STREQ(_add_test->unit().toString().c_str(), "m");
+
+        EXPECT_STREQ(_add_test->expr().c_str(), "((A + B) + B)");
+        EXPECT_STREQ(_add_test->postfix_expr().c_str(), "0|1|add|1|add");
+    }
+
     TEST_F(AddNodeTest, AttributesTest)
     {
         _add_test = std::make_shared<AddNode>(_phi[0], _phi[1], 5, 1e-50, 1e50);
diff --git a/tests/googletest/feature_creation/feature_generation/test_cb_node.cc b/tests/googletest/feature_creation/feature_generation/test_cb_node.cc
index dcb9552d4ea84f4411a82c3b9942bf30290cb086..a658d9de2c8142d17efa6db8fb953cd606424a7b 100644
--- a/tests/googletest/feature_creation/feature_generation/test_cb_node.cc
+++ b/tests/googletest/feature_creation/feature_generation/test_cb_node.cc
@@ -139,6 +139,26 @@ namespace
         }
     }
 
+    TEST_F(CbNodeTest, HardCopyTest)
+    {
+        _cb_test = std::make_shared<CbNode>(_phi[0], 5, 1e-50, 1e50);
+
+        node_ptr copy_test = _cb_test->hard_copy();
+
+        EXPECT_EQ(copy_test->rung(), 1);
+
+        EXPECT_EQ(copy_test->value_ptr()[1], 8.0);
+        EXPECT_EQ(copy_test->test_value_ptr()[0], 8.0);
+
+        EXPECT_EQ(copy_test->value()[1], 8.0);
+        EXPECT_EQ(copy_test->test_value()[0], 8.0);
+
+        EXPECT_STREQ(copy_test->unit().toString().c_str(), "m^3");
+
+        EXPECT_STREQ(copy_test->expr().c_str(), "(A^3)");
+        EXPECT_STREQ(copy_test->postfix_expr().c_str(), "0|cb");
+    }
+
     TEST_F(CbNodeTest, AttributesTest)
     {
         _cb_test = std::make_shared<CbNode>(_phi[0], 5, 1e-50, 1e50);
diff --git a/tests/googletest/feature_creation/feature_generation/test_cbrt_node.cc b/tests/googletest/feature_creation/feature_generation/test_cbrt_node.cc
index 6317a29e917b1836b2880422ce222ac83d306ba7..8de70ccc7dbaece3459afde0235cfadda15c5e20 100644
--- a/tests/googletest/feature_creation/feature_generation/test_cbrt_node.cc
+++ b/tests/googletest/feature_creation/feature_generation/test_cbrt_node.cc
@@ -164,6 +164,26 @@ namespace
         }
     }
 
+    TEST_F(CbrtNodeTest, HardCopyTest)
+    {
+        _cbrt_test = std::make_shared<CbrtNode>(_phi[0], 5, 1e-50, 1e50);
+
+        node_ptr copy_test = _cbrt_test->hard_copy();
+
+        EXPECT_EQ(copy_test->rung(), 1);
+
+        EXPECT_EQ(copy_test->value_ptr()[3], 2.0);
+        EXPECT_EQ(copy_test->test_value_ptr()[0], 2.0);
+
+        EXPECT_EQ(copy_test->value()[3], 2.0);
+        EXPECT_EQ(copy_test->test_value()[0], 2.0);
+
+        EXPECT_STREQ(copy_test->unit().toString().c_str(), "m^0.333333");
+
+        EXPECT_STREQ(copy_test->expr().c_str(), "cbrt(A)");
+        EXPECT_STREQ(copy_test->postfix_expr().c_str(), "0|cbrt");
+    }
+
     TEST_F(CbrtNodeTest, AttributesTest)
     {
         _cbrt_test = std::make_shared<CbrtNode>(_phi[0], 5, 1e-50, 1e50);
diff --git a/tests/googletest/feature_creation/feature_generation/test_cos_node.cc b/tests/googletest/feature_creation/feature_generation/test_cos_node.cc
index 23da36c272f47ea70bc699b069854a3667ddf3fb..0b54bc0d490fc95cdb227ca6c3d3c075d23eb0df 100644
--- a/tests/googletest/feature_creation/feature_generation/test_cos_node.cc
+++ b/tests/googletest/feature_creation/feature_generation/test_cos_node.cc
@@ -140,6 +140,26 @@ namespace
         {}
     }
 
+    TEST_F(CosNodeTest, HardCopyTestTest)
+    {
+        _cos_test = std::make_shared<CosNode>(_phi[0], 5, 1e-50, 1e50);
+
+        node_ptr copy_test = _cos_test->hard_copy();
+
+        EXPECT_EQ(copy_test->rung(), 1);
+
+        EXPECT_EQ(copy_test->value_ptr()[0], 1.0);
+        EXPECT_EQ(copy_test->test_value_ptr()[0], 1.0);
+
+        EXPECT_EQ(copy_test->value()[0], 1.0);
+        EXPECT_EQ(copy_test->test_value()[0], 1.0);
+
+        EXPECT_STREQ(copy_test->unit().toString().c_str(), "Unitless");
+
+        EXPECT_STREQ(copy_test->expr().c_str(), "cos(A)");
+        EXPECT_STREQ(copy_test->postfix_expr().c_str(), "0|cos");
+    }
+
     TEST_F(CosNodeTest, AttributesTest)
     {
         _cos_test = std::make_shared<CosNode>(_phi[0], 5, 1e-50, 1e50);
diff --git a/tests/googletest/feature_creation/feature_generation/test_div_node.cc b/tests/googletest/feature_creation/feature_generation/test_div_node.cc
index 5a4997ed351d7fd15bf1953f6bb4cd69bad200fa..a90af33f6e60f283c7c04e6f85d912909a9c3bc7 100644
--- a/tests/googletest/feature_creation/feature_generation/test_div_node.cc
+++ b/tests/googletest/feature_creation/feature_generation/test_div_node.cc
@@ -201,6 +201,27 @@ namespace
         }
     }
 
+    TEST_F(DivNodeTest, HardCopyTest)
+    {
+        _div_test = std::make_shared<DivNode>(_phi[0], _phi[1], 5, 1e-50, 1e50);
+        _div_test = std::make_shared<DivNode>(_div_test, _phi[1], 6, 1e-50, 1e50);
+
+        node_ptr copy_test = _div_test->hard_copy();
+
+        EXPECT_EQ(copy_test->rung(), 2);
+
+        EXPECT_EQ(copy_test->value_ptr()[0], 0.01);
+        EXPECT_EQ(copy_test->test_value_ptr()[0], 0.002);
+
+        EXPECT_EQ(copy_test->value()[0], 0.01);
+        EXPECT_EQ(copy_test->test_value()[0], 0.002);
+
+        EXPECT_STREQ(copy_test->unit().toString().c_str(), "m^-1");
+
+        EXPECT_STREQ(copy_test->expr().c_str(), "((A / B) / B)");
+        EXPECT_STREQ(copy_test->postfix_expr().c_str(), "0|1|div|1|div");
+    }
+
     TEST_F(DivNodeTest, AttributesTest)
     {
         _div_test = std::make_shared<DivNode>(_phi[0], _phi[1], 5, 1e-50, 1e50);
diff --git a/tests/googletest/feature_creation/feature_generation/test_exp_node.cc b/tests/googletest/feature_creation/feature_generation/test_exp_node.cc
index ea994d6994b84ec3de3791310495a5279df7bf3b..1337adbc4ea7672b686833d5a81a8df356c5ac6d 100644
--- a/tests/googletest/feature_creation/feature_generation/test_exp_node.cc
+++ b/tests/googletest/feature_creation/feature_generation/test_exp_node.cc
@@ -169,6 +169,26 @@ namespace
         {}
     }
 
+    TEST_F(ExpNodeTest, HardCopyTest)
+    {
+        _exp_test = std::make_shared<ExpNode>(_phi[0], 5, 1e-50, 1e50);
+
+        node_ptr copy_test = _exp_test->hard_copy();
+
+        EXPECT_EQ(copy_test->rung(), 1);
+
+        EXPECT_EQ(copy_test->value_ptr()[0], 1.0);
+        EXPECT_EQ(copy_test->test_value_ptr()[0], 1.0);
+
+        EXPECT_EQ(copy_test->value()[0], 1.0);
+        EXPECT_EQ(copy_test->test_value()[0], 1.0);
+
+        EXPECT_STREQ(copy_test->unit().toString().c_str(), "Unitless");
+
+        EXPECT_STREQ(copy_test->expr().c_str(), "exp(A)");
+        EXPECT_STREQ(copy_test->postfix_expr().c_str(), "0|exp");
+    }
+
     TEST_F(ExpNodeTest, AttributesTest)
     {
         _exp_test = std::make_shared<ExpNode>(_phi[0], 5, 1e-50, 1e50);
diff --git a/tests/googletest/feature_creation/feature_generation/test_feat_node.cc b/tests/googletest/feature_creation/feature_generation/test_feat_node.cc
index 852a83952f7cce3d36ed75eb18e1d480b1f8e1e5..51480b42dd6fce9cb95f4e4df39b841be095c737 100644
--- a/tests/googletest/feature_creation/feature_generation/test_feat_node.cc
+++ b/tests/googletest/feature_creation/feature_generation/test_feat_node.cc
@@ -11,14 +11,13 @@
 // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 // See the License for the specific language governing permissions and
 // limitations under the License.
-#include <feature_creation/node/FeatureNode.hpp>
 #include <feature_creation/node/value_storage/nodes_value_containers.hpp>
 #include <feature_creation/node/FeatureNode.hpp>
 #include "gtest/gtest.h"
 
 namespace
 {
-    class FeatNodeTest : public ::testing::Test
+    class FeatureNodeTest : public ::testing::Test
     {
     protected:
         void SetUp() override
@@ -28,10 +27,10 @@ namespace
             _value_1 = {1.0, 2.0, 3.0, 4.0};
             _test_value_1 =  {5.0};
 
-            _value_2 = {10.0, 10.0, 10.0, 10.0};
+            _value_2 = {10.0, 10.0, 10.0, 1.0};
             _test_value_2 =  {10.0};
 
-            _value_3 = {1.0, 2.0, 3.0, 1.0/0.0};
+            _value_3 = {1.0, 2.0, 3.0, 1.0};
             _test_value_3 =  {5.0};
         }
 
@@ -45,11 +44,30 @@ namespace
         std::vector<double> _test_value_3;
     };
 
-    TEST_F(FeatNodeTest, ConstructorTest)
+    TEST_F(FeatureNodeTest, ConstructorTest)
     {
-        node_ptr feat_1 = std::make_shared<FeatureNode>(0, "A", _value_1, _test_value_1, Unit("m"));
-        node_ptr feat_2 = std::make_shared<FeatureNode>(1, "B", _value_2, _test_value_2, Unit());
-        node_ptr feat_3 = std::make_shared<FeatureNode>(2, "C", _value_3, _test_value_3, Unit("m"));
+        std::shared_ptr<FeatureNode> feat_1 = std::make_shared<FeatureNode>(
+            0,
+            "A",
+            _value_1,
+            _test_value_1,
+            Unit("m")
+        );
+        std::shared_ptr<FeatureNode> feat_2 = std::make_shared<FeatureNode>(
+            1,
+            "B",
+            _value_2,
+            _test_value_2,
+            Unit()
+        );
+        std::shared_ptr<FeatureNode> feat_3 = std::make_shared<FeatureNode>(
+            2,
+            "C",
+            _value_3,
+            _test_value_3,
+            Unit("m")
+        );
+        node_ptr feat_4 = feat_1->hard_copy();
 
         EXPECT_FALSE(feat_1->is_const());
         EXPECT_FALSE(feat_1->is_nan());
@@ -60,9 +78,11 @@ namespace
         EXPECT_EQ(feat_1->test_value()[0], _test_value_1[0]);
         EXPECT_EQ(feat_1->value_ptr()[0], _value_1[0]);
         EXPECT_EQ(feat_1->test_value_ptr()[0], _test_value_1[0]);
+        EXPECT_EQ(feat_1->rung(), 0);
         EXPECT_EQ(feat_1->n_feats(), 0);
+        EXPECT_EQ(feat_1->sort_score(10), 0);
 
-        EXPECT_TRUE(feat_2->is_const());
+        EXPECT_FALSE(feat_2->is_const());
         EXPECT_FALSE(feat_2->is_nan());
         EXPECT_STREQ(feat_2->unit().toString().c_str(), "Unitless");
         EXPECT_STREQ(feat_2->expr().c_str(), "B");
@@ -71,10 +91,12 @@ namespace
         EXPECT_EQ(feat_2->test_value()[0], _test_value_2[0]);
         EXPECT_EQ(feat_2->value_ptr()[0], _value_2[0]);
         EXPECT_EQ(feat_2->test_value_ptr()[0], _test_value_2[0]);
+        EXPECT_EQ(feat_2->rung(), 0);
         EXPECT_EQ(feat_2->n_feats(), 0);
+        EXPECT_EQ(feat_2->sort_score(10), 1);
 
         EXPECT_FALSE(feat_3->is_const());
-        EXPECT_TRUE(feat_3->is_nan());
+        EXPECT_FALSE(feat_3->is_nan());
         EXPECT_STREQ(feat_3->unit().toString().c_str(), "m");
         EXPECT_STREQ(feat_3->expr().c_str(), "C");
         EXPECT_STREQ(feat_3->postfix_expr().c_str(), "2");
@@ -82,6 +104,21 @@ namespace
         EXPECT_EQ(feat_3->test_value()[0], _test_value_3[0]);
         EXPECT_EQ(feat_3->value_ptr()[0], _value_3[0]);
         EXPECT_EQ(feat_3->test_value_ptr()[0], _test_value_3[0]);
+        EXPECT_EQ(feat_3->rung(), 0);
         EXPECT_EQ(feat_3->n_feats(), 0);
+        EXPECT_EQ(feat_3->sort_score(10), 2);
+
+        EXPECT_FALSE(feat_4->is_const());
+        EXPECT_FALSE(feat_4->is_nan());
+        EXPECT_STREQ(feat_4->unit().toString().c_str(), "m");
+        EXPECT_STREQ(feat_4->expr().c_str(), "A");
+        EXPECT_STREQ(feat_4->postfix_expr().c_str(), "0");
+        EXPECT_EQ(feat_4->value()[0], _value_1[0]);
+        EXPECT_EQ(feat_4->test_value()[0], _test_value_1[0]);
+        EXPECT_EQ(feat_4->value_ptr()[0], _value_1[0]);
+        EXPECT_EQ(feat_4->test_value_ptr()[0], _test_value_1[0]);
+        EXPECT_EQ(feat_4->rung(), 0);
+        EXPECT_EQ(feat_4->n_feats(), 0);
+        EXPECT_EQ(feat_4->sort_score(10), 0);
     }
 }
diff --git a/tests/googletest/feature_creation/feature_generation/test_inv_node.cc b/tests/googletest/feature_creation/feature_generation/test_inv_node.cc
index 3d844655e9e701dcea4b64eb3195a0ce3686f657..56f46c633e3a69da63f49cc8d233f052ef54acd2 100644
--- a/tests/googletest/feature_creation/feature_generation/test_inv_node.cc
+++ b/tests/googletest/feature_creation/feature_generation/test_inv_node.cc
@@ -163,6 +163,26 @@ namespace
         {}
     }
 
+    TEST_F(InvNodeTest, HardCopyTest)
+    {
+        _inv_test = std::make_shared<InvNode>(_phi[0], 5, 1e-50, 1e50);
+
+        node_ptr copy_test = _inv_test->hard_copy();
+
+        EXPECT_EQ(copy_test->rung(), 1);
+
+        EXPECT_EQ(copy_test->value_ptr()[1], 0.5);
+        EXPECT_EQ(copy_test->test_value_ptr()[0], 0.5);
+
+        EXPECT_EQ(copy_test->value()[1], 0.5);
+        EXPECT_EQ(copy_test->test_value()[0], 0.50);
+
+        EXPECT_STREQ(copy_test->unit().toString().c_str(), "m^-1");
+
+        EXPECT_STREQ(copy_test->expr().c_str(), "(1.0 / A)");
+        EXPECT_STREQ(copy_test->postfix_expr().c_str(), "0|inv");
+    }
+
     TEST_F(InvNodeTest, AttributesTest)
     {
         _inv_test = std::make_shared<InvNode>(_phi[0], 5, 1e-50, 1e50);
diff --git a/tests/googletest/feature_creation/feature_generation/test_log_node.cc b/tests/googletest/feature_creation/feature_generation/test_log_node.cc
index f7a948841804684aae885dc97274e7a3f635eb06..872d087d46d6189f108079b1d569ad6a7abce9ec 100644
--- a/tests/googletest/feature_creation/feature_generation/test_log_node.cc
+++ b/tests/googletest/feature_creation/feature_generation/test_log_node.cc
@@ -263,6 +263,26 @@ namespace
         {}
     }
 
+    TEST_F(LogNodeTest, HardCopyTest)
+    {
+        _log_test = std::make_shared<LogNode>(_phi[0], 5, 1e-50, 1e50);
+
+        node_ptr copy_test = _log_test->hard_copy();
+
+        EXPECT_EQ(copy_test->rung(), 1);
+
+        EXPECT_EQ(copy_test->value_ptr()[0], 0.0);
+        EXPECT_EQ(copy_test->test_value_ptr()[0], 0.0);
+
+        EXPECT_EQ(copy_test->value()[0], 0.0);
+        EXPECT_EQ(copy_test->test_value()[0], 0.0);
+
+        EXPECT_STREQ(copy_test->unit().toString().c_str(), "Unitless");
+
+        EXPECT_STREQ(copy_test->expr().c_str(), "ln(A)");
+        EXPECT_STREQ(copy_test->postfix_expr().c_str(), "0|log");
+    }
+
     TEST_F(LogNodeTest, AttributesTest)
     {
         _log_test = std::make_shared<LogNode>(_phi[0], 5, 1e-50, 1e50);
diff --git a/tests/googletest/feature_creation/feature_generation/test_model_node.cc b/tests/googletest/feature_creation/feature_generation/test_model_node.cc
index d1ad4c00dfc5f0b67ca9f02d0322f971bd7234a3..9b50206945ff2139655e1d3d243256021f41eb6d 100644
--- a/tests/googletest/feature_creation/feature_generation/test_model_node.cc
+++ b/tests/googletest/feature_creation/feature_generation/test_model_node.cc
@@ -50,6 +50,7 @@ namespace
         std::shared_ptr<ModelNode> feat_1 = std::make_shared<ModelNode>(0, 1, "A", "$A$", "0", "A", _value_1, _test_value_1,  std::vector<std::string>(1, "A"), Unit("m"));
         std::shared_ptr<ModelNode> feat_2 = std::make_shared<ModelNode>(1, 1, "B", "$B$", "1", "B", _value_2, _test_value_2,  std::vector<std::string>(1, "B"), Unit());
         std::shared_ptr<ModelNode> feat_3 = std::make_shared<ModelNode>(2, 1, "C", "$C$", "2", "C", _value_3, _test_value_3,  std::vector<std::string>(1, "C"), Unit("m"));
+        node_ptr feat_4 = feat_1->hard_copy();
 
         EXPECT_FALSE(feat_1->is_const());
         EXPECT_FALSE(feat_1->is_nan());
@@ -58,8 +59,6 @@ namespace
         EXPECT_STREQ(feat_1->postfix_expr().c_str(), "0");
         EXPECT_EQ(feat_1->value()[0], _value_1[0]);
         EXPECT_EQ(feat_1->test_value()[0], _test_value_1[0]);
-        EXPECT_EQ(feat_1->value_ptr()[0], _value_1[0]);
-        EXPECT_EQ(feat_1->test_value_ptr()[0], _test_value_1[0]);
         EXPECT_EQ(feat_1->rung(), 1);
         EXPECT_EQ(feat_1->n_feats(), 0);
         EXPECT_EQ(feat_1->n_feats(), 0);
@@ -71,8 +70,6 @@ namespace
         EXPECT_STREQ(feat_2->postfix_expr().c_str(), "1");
         EXPECT_EQ(feat_2->value()[0], _value_2[0]);
         EXPECT_EQ(feat_2->test_value()[0], _test_value_2[0]);
-        EXPECT_EQ(feat_2->value_ptr()[0], _value_2[0]);
-        EXPECT_EQ(feat_2->test_value_ptr()[0], _test_value_2[0]);
         EXPECT_EQ(feat_2->rung(), 1);
         EXPECT_EQ(feat_2->n_feats(), 0);
         EXPECT_EQ(feat_2->n_feats(), 0);
@@ -84,10 +81,19 @@ namespace
         EXPECT_STREQ(feat_3->postfix_expr().c_str(), "2");
         EXPECT_EQ(feat_3->value()[0], _value_3[0]);
         EXPECT_EQ(feat_3->test_value()[0], _test_value_3[0]);
-        EXPECT_EQ(feat_3->value_ptr()[0], _value_3[0]);
-        EXPECT_EQ(feat_3->test_value_ptr()[0], _test_value_3[0]);
         EXPECT_EQ(feat_3->rung(), 1);
         EXPECT_EQ(feat_3->n_feats(), 0);
         EXPECT_EQ(feat_3->n_feats(), 0);
+
+        EXPECT_FALSE(feat_4->is_const());
+        EXPECT_FALSE(feat_4->is_nan());
+        EXPECT_STREQ(feat_4->unit().toString().c_str(), "m");
+        EXPECT_STREQ(feat_4->expr().c_str(), "A");
+        EXPECT_STREQ(feat_4->postfix_expr().c_str(), "0");
+        EXPECT_EQ(feat_4->value()[0], _value_1[0]);
+        EXPECT_EQ(feat_4->test_value()[0], _test_value_1[0]);
+        EXPECT_EQ(feat_4->rung(), 1);
+        EXPECT_EQ(feat_4->n_feats(), 0);
+        EXPECT_EQ(feat_4->n_feats(), 0);
     }
 }
diff --git a/tests/googletest/feature_creation/feature_generation/test_mult_node.cc b/tests/googletest/feature_creation/feature_generation/test_mult_node.cc
index ce8839c70318671da39a0656740f495bbe6ddf23..411902d32a2198500ca6b0abb7a72dd4b7c657ad 100644
--- a/tests/googletest/feature_creation/feature_generation/test_mult_node.cc
+++ b/tests/googletest/feature_creation/feature_generation/test_mult_node.cc
@@ -152,6 +152,27 @@ namespace
         {}
     }
 
+    TEST_F(MultNodeTest, HardCopyTest)
+    {
+        _mult_test = std::make_shared<MultNode>(_phi[0], _phi[1], 5, 1e-50, 1e50);
+        _mult_test = std::make_shared<MultNode>(_mult_test, _phi[1], 6, 1e-50, 1e50);
+
+        node_ptr copy_test = _mult_test->hard_copy();
+
+        EXPECT_EQ(copy_test->rung(), 2);
+
+        EXPECT_EQ(copy_test->value_ptr()[0], 100.0);
+        EXPECT_EQ(copy_test->test_value_ptr()[0], 12500);
+
+        EXPECT_EQ(copy_test->value()[0], 100.0);
+        EXPECT_EQ(copy_test->test_value()[0], 12500.0);
+
+        EXPECT_STREQ(copy_test->unit().toString().c_str(), "m^3");
+
+        EXPECT_STREQ(copy_test->expr().c_str(), "((A * B) * B)");
+        EXPECT_STREQ(copy_test->postfix_expr().c_str(), "0|1|mult|1|mult");
+    }
+
     TEST_F(MultNodeTest, AttributesTest)
     {
         _mult_test = std::make_shared<MultNode>(_phi[0], _phi[1], 5, 1e-50, 1e50);
diff --git a/tests/googletest/feature_creation/feature_generation/test_neg_exp_node.cc b/tests/googletest/feature_creation/feature_generation/test_neg_exp_node.cc
index a894434d3e3cfed66de9430394de62bc03112fc7..0ed9d1ca6b638f19c0b11f51c29bdb5a866de9fa 100644
--- a/tests/googletest/feature_creation/feature_generation/test_neg_exp_node.cc
+++ b/tests/googletest/feature_creation/feature_generation/test_neg_exp_node.cc
@@ -169,6 +169,26 @@ namespace
         {}
     }
 
+    TEST_F(NegExpNodeTest, HardCopyTest)
+    {
+        _neg_exp_test = std::make_shared<NegExpNode>(_phi[0], 5, 1e-50, 1e50);
+
+        node_ptr copy_test = _neg_exp_test->hard_copy();
+
+        EXPECT_EQ(copy_test->rung(), 1);
+
+        EXPECT_EQ(copy_test->value_ptr()[0], 1.0);
+        EXPECT_EQ(copy_test->test_value_ptr()[0], 1.0);
+
+        EXPECT_EQ(copy_test->value()[0], 1.0);
+        EXPECT_EQ(copy_test->test_value()[0], 1.0);
+
+        EXPECT_STREQ(copy_test->unit().toString().c_str(), "Unitless");
+
+        EXPECT_STREQ(copy_test->expr().c_str(), "(exp(-1.0 * A))");
+        EXPECT_STREQ(copy_test->postfix_expr().c_str(), "0|nexp");
+    }
+
     TEST_F(NegExpNodeTest, AttributesTest)
     {
         _neg_exp_test = std::make_shared<NegExpNode>(_phi[0], 5, 1e-50, 1e50);
diff --git a/tests/googletest/feature_creation/feature_generation/test_sin_node.cc b/tests/googletest/feature_creation/feature_generation/test_sin_node.cc
index 5e3f541d355bf0ce22aa4ead3b88a5cc88637616..aa4263c51b4971147704332e38339e7b0726a76f 100644
--- a/tests/googletest/feature_creation/feature_generation/test_sin_node.cc
+++ b/tests/googletest/feature_creation/feature_generation/test_sin_node.cc
@@ -140,6 +140,26 @@ namespace
         {}
     }
 
+    TEST_F(SinNodeTest, HardCopyTest)
+    {
+        _sin_test = std::make_shared<SinNode>(_phi[0], 5, 1e-50, 1e50);
+
+        node_ptr copy_test = _sin_test->hard_copy();
+
+        EXPECT_EQ(copy_test->rung(), 1);
+
+        EXPECT_EQ(copy_test->value_ptr()[0], 0.0);
+        EXPECT_EQ(copy_test->test_value_ptr()[0], 0.0);
+
+        EXPECT_EQ(copy_test->value()[0], 0.0);
+        EXPECT_EQ(copy_test->test_value()[0], 0.0);
+
+        EXPECT_STREQ(copy_test->unit().toString().c_str(), "Unitless");
+
+        EXPECT_STREQ(copy_test->expr().c_str(), "sin(A)");
+        EXPECT_STREQ(copy_test->postfix_expr().c_str(), "0|sin");
+    }
+
     TEST_F(SinNodeTest, AttributesTest)
     {
         _sin_test = std::make_shared<SinNode>(_phi[0], 5, 1e-50, 1e50);
diff --git a/tests/googletest/feature_creation/feature_generation/test_six_pow_node.cc b/tests/googletest/feature_creation/feature_generation/test_six_pow_node.cc
index f7ae6b719cb9e2a5ea1a22a03da7c08d2328ac68..e5d3d0d81c55a61d9095fcc9f02b9ba2c422edc5 100644
--- a/tests/googletest/feature_creation/feature_generation/test_six_pow_node.cc
+++ b/tests/googletest/feature_creation/feature_generation/test_six_pow_node.cc
@@ -166,6 +166,26 @@ namespace
         }
     }
 
+    TEST_F(SixPowNodeTest, HardCopyTest)
+    {
+        _six_pow_test = std::make_shared<SixPowNode>(_phi[0], 5, 1e-50, 1e50);
+
+        node_ptr copy_test = _six_pow_test->hard_copy();
+
+        EXPECT_EQ(copy_test->rung(), 1);
+
+        EXPECT_EQ(copy_test->value_ptr()[1], 64.0);
+        EXPECT_EQ(copy_test->test_value_ptr()[0], 64.0);
+
+        EXPECT_EQ(copy_test->value()[1], 64.0);
+        EXPECT_EQ(copy_test->test_value()[0], 64.0);
+
+        EXPECT_STREQ(copy_test->unit().toString().c_str(), "m^6");
+
+        EXPECT_STREQ(copy_test->expr().c_str(), "(A^6)");
+        EXPECT_STREQ(copy_test->postfix_expr().c_str(), "0|sp");
+    }
+
     TEST_F(SixPowNodeTest, AttributesTest)
     {
         _six_pow_test = std::make_shared<SixPowNode>(_phi[0], 5, 1e-50, 1e50);
diff --git a/tests/googletest/feature_creation/feature_generation/test_sq_node.cc b/tests/googletest/feature_creation/feature_generation/test_sq_node.cc
index ac56f17a6a831e20421cc96c319d0a9cbe7873ab..632899ccba12cab652cade7202268e30b4ae643c 100644
--- a/tests/googletest/feature_creation/feature_generation/test_sq_node.cc
+++ b/tests/googletest/feature_creation/feature_generation/test_sq_node.cc
@@ -126,6 +126,26 @@ namespace
         }
     }
 
+    TEST_F(SqNodeTest, HardCopyTest)
+    {
+        _sq_test = std::make_shared<SqNode>(_phi[0], 5, 1e-50, 1e50);
+
+        node_ptr copy_test = _sq_test->hard_copy();
+
+        EXPECT_EQ(copy_test->rung(), 1);
+
+        EXPECT_EQ(copy_test->value_ptr()[1], 4.0);
+        EXPECT_EQ(copy_test->test_value_ptr()[0], 4.0);
+
+        EXPECT_EQ(copy_test->value()[1], 4.0);
+        EXPECT_EQ(copy_test->test_value()[0], 4.0);
+
+        EXPECT_STREQ(copy_test->unit().toString().c_str(), "m^2");
+
+        EXPECT_STREQ(copy_test->expr().c_str(), "(A^2)");
+        EXPECT_STREQ(copy_test->postfix_expr().c_str(), "0|sq");
+    }
+
     TEST_F(SqNodeTest, AttributesTest)
     {
         _sq_test = std::make_shared<SqNode>(_phi[0], 5, 1e-50, 1e50);
diff --git a/tests/googletest/feature_creation/feature_generation/test_sqrt_node.cc b/tests/googletest/feature_creation/feature_generation/test_sqrt_node.cc
index 16d9dd291ed942e75291cca3d5387ae1fb7d6fa2..254fc49d7998fcd2b977622849fc308b3f6a05d3 100644
--- a/tests/googletest/feature_creation/feature_generation/test_sqrt_node.cc
+++ b/tests/googletest/feature_creation/feature_generation/test_sqrt_node.cc
@@ -177,6 +177,26 @@ namespace
         }
     }
 
+    TEST_F(SqrtNodeTest, HardCopyTest)
+    {
+        _sqrt_test = std::make_shared<SqrtNode>(_phi[0], 5, 1e-50, 1e50);
+
+        node_ptr copy_test = _sqrt_test->hard_copy();
+
+        EXPECT_EQ(copy_test->rung(), 1);
+
+        EXPECT_EQ(copy_test->value_ptr()[3], 2.0);
+        EXPECT_EQ(copy_test->test_value_ptr()[0], 2.0);
+
+        EXPECT_EQ(copy_test->value()[3], 2.0);
+        EXPECT_EQ(copy_test->test_value()[0], 2.0);
+
+        EXPECT_STREQ(copy_test->unit().toString().c_str(), "m^0.5");
+
+        EXPECT_STREQ(copy_test->expr().c_str(), "sqrt(A)");
+        EXPECT_STREQ(copy_test->postfix_expr().c_str(), "0|sqrt");
+    }
+
     TEST_F(SqrtNodeTest, AttributesTest)
     {
         _sqrt_test = std::make_shared<SqrtNode>(_phi[0], 5, 1e-50, 1e50);
diff --git a/tests/googletest/feature_creation/feature_generation/test_sub_node.cc b/tests/googletest/feature_creation/feature_generation/test_sub_node.cc
index ab5559d790214a0f63be7ee743730d21ba01ef1f..dc308a944e2d95ef4af2d63ed13a2d48ff47bf8e 100644
--- a/tests/googletest/feature_creation/feature_generation/test_sub_node.cc
+++ b/tests/googletest/feature_creation/feature_generation/test_sub_node.cc
@@ -151,6 +151,27 @@ namespace
         {}
     }
 
+    TEST_F(SubNodeTest, HardCopyTest)
+    {
+        _sub_test = std::make_shared<SubNode>(_phi[0], _phi[1], 5, 1e-50, 1e50);
+        _sub_test = std::make_shared<SubNode>(_sub_test, _phi[1], 6, 1e-50, 1e50);
+
+        node_ptr copy_test = _sub_test->hard_copy();
+
+        EXPECT_EQ(copy_test->rung(), 2);
+
+        EXPECT_EQ(copy_test->value_ptr()[0], -19.0);
+        EXPECT_EQ(copy_test->test_value_ptr()[0], -95.0);
+
+        EXPECT_EQ(copy_test->value()[0], -19.0);
+        EXPECT_EQ(copy_test->test_value()[0], -95.0);
+
+        EXPECT_STREQ(copy_test->unit().toString().c_str(), "m");
+
+        EXPECT_STREQ(copy_test->expr().c_str(), "((A - B) - B)");
+        EXPECT_STREQ(copy_test->postfix_expr().c_str(), "0|1|sub|1|sub");
+    }
+
     TEST_F(SubNodeTest, AttributesTest)
     {
         _sub_test = std::make_shared<SubNode>(_phi[0], _phi[1], 5, 1e-50, 1e50);
diff --git a/tests/googletest/feature_creation/parameterization/test_abs_diff_node.cc b/tests/googletest/feature_creation/parameterization/test_abs_diff_node.cc
index 4a39804331815cebd145f1a08c85eaea2a1949e0..f0688b45524d98ecc17dae85aae169309fc67d5e 100644
--- a/tests/googletest/feature_creation/parameterization/test_abs_diff_node.cc
+++ b/tests/googletest/feature_creation/parameterization/test_abs_diff_node.cc
@@ -62,7 +62,10 @@ namespace
             _phi = {_feat_1, _feat_2};
             _a = distribution_params(generator);
             _alpha = distribution_params(generator);
+
             _prop = std::vector<double>(_task_sizes_train[0], 0.0);
+            _gradient.resize(_task_sizes_train[0] * 2, 1.0);
+            _dfdp.resize(_task_sizes_train[0]);
 
             allowed_op_funcs::abs_diff(_task_sizes_train[0], _phi[0]->value_ptr(), _phi[1]->value_ptr(), _alpha, _a, _prop.data());
 
@@ -75,6 +78,8 @@ namespace
 
         std::vector<node_ptr> _phi;
         std::vector<double> _prop;
+        std::vector<double> _gradient;
+        std::vector<double> _dfdp;
         std::vector<int> _task_sizes_train;
         std::vector<int> _task_sizes_test;
 
@@ -131,6 +136,42 @@ namespace
         }
     }
 
+    TEST_F(AbsDiffParamNodeTest, HardCopyTest)
+    {
+        unsigned long int feat_ind = _phi.size();
+        _abs_diff_test = std::make_shared<AbsDiffParamNode>(_phi[0], _phi[1], feat_ind, 1e-50, 1e50, _optimizer);
+        node_ptr copy_test = _abs_diff_test->hard_copy();
+
+        EXPECT_EQ(copy_test->rung(), 1);
+
+        std::vector<double> expected_val(90, 0.0);
+
+        allowed_op_funcs::abs_diff(90, _phi[0]->value_ptr(), _phi[1]->value_ptr(), copy_test->parameters()[0], copy_test->parameters()[1], expected_val.data());
+        EXPECT_LT(std::abs(copy_test->value_ptr()[0] - expected_val[0]), 1e-10);
+        EXPECT_LT(std::abs(copy_test->value()[0] - expected_val[0]), 1e-10);
+
+        allowed_op_funcs::abs_diff(10, _phi[0]->test_value_ptr(), _phi[1]->test_value_ptr(), copy_test->parameters()[0], copy_test->parameters()[1], expected_val.data());
+        EXPECT_LT(std::abs(copy_test->test_value_ptr()[0] - expected_val[0]), 1e-10);
+        EXPECT_LT(std::abs(copy_test->test_value()[0] - expected_val[0]), 1e-10);
+
+        std::stringstream postfix;
+        postfix << "0|1|abd: " << std::setprecision(13) << std::scientific << copy_test->parameters()[0] << ',' << copy_test->parameters()[1];
+        EXPECT_STREQ(copy_test->unit().toString().c_str(), "m");
+        EXPECT_STREQ(copy_test->postfix_expr().c_str(), postfix.str().c_str());
+
+        copy_test->gradient(_gradient.data(), _dfdp.data());
+
+        double v1 = copy_test->feat(0)->value_ptr()[0];
+        double v2 = copy_test->feat(1)->value_ptr()[0];
+
+        double alpha = copy_test->parameters()[0];
+        double a = copy_test->parameters()[1];
+        double df_dp = -1.0 * util_funcs::sign(v1 - (alpha * v2 + a));
+
+        EXPECT_EQ(_gradient[0], df_dp * v2);
+        EXPECT_EQ(_gradient[_task_sizes_train[0]], df_dp);
+    }
+
     TEST_F(AbsDiffParamNodeTest, AttributesTest)
     {
         unsigned long int feat_ind = _phi.size();
diff --git a/tests/googletest/feature_creation/parameterization/test_abs_node.cc b/tests/googletest/feature_creation/parameterization/test_abs_node.cc
index 3705565571cabc9f9249a039795de9e2bab51426..cf7bf7c62e720c135f79ecaaa609c306538e0fdb 100644
--- a/tests/googletest/feature_creation/parameterization/test_abs_node.cc
+++ b/tests/googletest/feature_creation/parameterization/test_abs_node.cc
@@ -53,6 +53,9 @@ namespace
             _alpha = distribution_params(generator);
 
             _prop = std::vector<double>(_task_sizes_train[0], 0.0);
+            _gradient.resize(_task_sizes_train[0] * 2, 1.0);
+            _dfdp.resize(_task_sizes_train[0]);
+
             allowed_op_funcs::abs(_task_sizes_train[0], _phi[0]->value_ptr(), _alpha, _a, _prop.data());
 
             _optimizer = nlopt_wrapper::get_optimizer("regression",_task_sizes_train, _prop, 1);
@@ -63,6 +66,8 @@ namespace
 
         std::vector<node_ptr> _phi;
         std::vector<double> _prop;
+        std::vector<double> _gradient;
+        std::vector<double> _dfdp;
         std::vector<int> _task_sizes_train;
         std::vector<int> _task_sizes_test;
 
@@ -117,6 +122,41 @@ namespace
         }
     }
 
+    TEST_F(AbsParamNodeTest, HardCopyTest)
+    {
+        unsigned long int feat_ind = _phi.size();
+        _abs_test = std::make_shared<AbsParamNode>(_phi[0], feat_ind, 1e-50, 1e50, _optimizer);
+
+        node_ptr copy_test = _abs_test->hard_copy();
+
+        EXPECT_EQ(copy_test->rung(), 1);
+
+        std::vector<double> expected_val(900, 0.0);
+
+        allowed_op_funcs::abs(900, _phi[0]->value_ptr(), copy_test->parameters()[0], copy_test->parameters()[1], expected_val.data());
+        EXPECT_LT(std::abs(copy_test->value_ptr()[0] - expected_val[0]), 1e-10);
+        EXPECT_LT(std::abs(copy_test->value()[0] - expected_val[0]), 1e-10);
+
+        allowed_op_funcs::abs(10, _phi[0]->test_value_ptr(), copy_test->parameters()[0], copy_test->parameters()[1], expected_val.data());
+        EXPECT_LT(std::abs(copy_test->test_value_ptr()[0] - expected_val[0]), 1e-10);
+        EXPECT_LT(std::abs(copy_test->test_value()[0] - expected_val[0]), 1e-10);
+
+        std::stringstream postfix;
+        postfix << "0|abs: " << std::setprecision(13) << std::scientific <<copy_test->parameters()[0] << ',' << copy_test->parameters()[1];
+        EXPECT_STREQ(copy_test->unit().toString().c_str(), "m");
+        EXPECT_STREQ(copy_test->postfix_expr().c_str(), postfix.str().c_str());
+
+        copy_test->gradient(_gradient.data(), _dfdp.data());
+        double v1 = copy_test->feat(0)->value_ptr()[0];
+
+        double alpha = copy_test->parameters()[0];
+        double a = copy_test->parameters()[1];
+        double df_dp = util_funcs::sign(alpha * v1 + a);
+
+        EXPECT_EQ(_gradient[0], df_dp * v1);
+        EXPECT_EQ(_gradient[_task_sizes_train[0]], df_dp);
+    }
+
     TEST_F(AbsParamNodeTest, AttributesTest)
     {
         unsigned long int feat_ind = _phi.size();
diff --git a/tests/googletest/feature_creation/parameterization/test_add_node.cc b/tests/googletest/feature_creation/parameterization/test_add_node.cc
index 185d5ca2dfeb740e5fee1775787a5329bb9decc6..9399eacf28b99ac9b832c9b20c1ab3325556209a 100644
--- a/tests/googletest/feature_creation/parameterization/test_add_node.cc
+++ b/tests/googletest/feature_creation/parameterization/test_add_node.cc
@@ -62,7 +62,11 @@ namespace
             _phi = {_feat_1, _feat_2};
             _a = distribution_params(generator);
             _alpha = distribution_params(generator);
+
             _prop = std::vector<double>(_task_sizes_train[0], 0.0);
+            _gradient.resize(_task_sizes_train[0] * 2, 1.0);
+            _dfdp.resize(_task_sizes_train[0]);
+
             allowed_op_funcs::add(_task_sizes_train[0], _phi[0]->value_ptr(), _phi[1]->value_ptr(), _alpha, _a, _prop.data());
 
             _optimizer = nlopt_wrapper::get_optimizer("regression",_task_sizes_train, _prop, 1);
@@ -74,6 +78,8 @@ namespace
 
         std::vector<node_ptr> _phi;
         std::vector<double> _prop;
+        std::vector<double> _gradient;
+        std::vector<double> _dfdp;
         std::vector<int> _task_sizes_train;
         std::vector<int> _task_sizes_test;
 
@@ -128,6 +134,42 @@ namespace
         }
     }
 
+    TEST_F(AddParamNodeTest, HardCopyTest)
+    {
+        unsigned long int feat_ind = _phi.size();
+        _add_test = std::make_shared<AddParamNode>(_phi[0], _phi[1], feat_ind, 1e-50, 1e50, _optimizer);
+
+        node_ptr copy_test = _add_test->hard_copy();
+
+        EXPECT_EQ(copy_test->rung(), 1);
+
+        std::vector<double> expected_val(90, 0.0);
+
+        allowed_op_funcs::add(90, _phi[0]->value_ptr(), _phi[1]->value_ptr(), copy_test->parameters()[0], copy_test->parameters()[1], expected_val.data());
+        EXPECT_LT(std::abs(copy_test->value_ptr()[0] - expected_val[0]), 1e-10);
+        EXPECT_LT(std::abs(copy_test->value()[0] - expected_val[0]), 1e-10);
+
+        allowed_op_funcs::add(10, _phi[0]->test_value_ptr(), _phi[1]->test_value_ptr(), copy_test->parameters()[0], copy_test->parameters()[1], expected_val.data());
+        EXPECT_LT(std::abs(copy_test->test_value_ptr()[0] - expected_val[0]), 1e-10);
+        EXPECT_LT(std::abs(copy_test->test_value()[0] - expected_val[0]), 1e-10);
+
+        std::stringstream postfix;
+        postfix << "0|1|add: " << std::setprecision(13) << std::scientific <<copy_test->parameters()[0] << ',' << copy_test->parameters()[1];
+        EXPECT_STREQ(copy_test->unit().toString().c_str(), "m");
+        EXPECT_STREQ(copy_test->postfix_expr().c_str(), postfix.str().c_str());
+
+        copy_test->gradient(_gradient.data(), _dfdp.data());
+        double v1 = copy_test->feat(0)->value_ptr()[0];
+        double v2 = copy_test->feat(1)->value_ptr()[0];
+
+        double alpha = copy_test->parameters()[0];
+        double a = copy_test->parameters()[1];
+        double df_dp = 1.0;
+
+        EXPECT_EQ(_gradient[0], df_dp * v2);
+        EXPECT_EQ(_gradient[_task_sizes_train[0]], df_dp);
+    }
+
     TEST_F(AddParamNodeTest, AttributesTest)
     {
         unsigned long int feat_ind = _phi.size();
diff --git a/tests/googletest/feature_creation/parameterization/test_cb_node.cc b/tests/googletest/feature_creation/parameterization/test_cb_node.cc
index 34cb54cdd87e43e423773a2880105d3de91dc680..81e5d78de8c7fa79a96db2bccb66e89d90a04683 100644
--- a/tests/googletest/feature_creation/parameterization/test_cb_node.cc
+++ b/tests/googletest/feature_creation/parameterization/test_cb_node.cc
@@ -64,7 +64,11 @@ namespace
             _phi = {_feat_1, _feat_2};
             _a = distribution_params(generator);
             _alpha = distribution_params(generator);
+
             _prop = std::vector<double>(_task_sizes_train[0], 0.0);
+            _gradient.resize(_task_sizes_train[0] * 2, 1.0);
+            _dfdp.resize(_task_sizes_train[0]);
+
             allowed_op_funcs::cb(_task_sizes_train[0], _phi[1]->value_ptr(), _alpha, _a, _prop.data());
             std::transform(_prop.begin(), _prop.end(), _prop.begin(), [&](double p){return p + distribution_err(generator);});
 
@@ -77,6 +81,8 @@ namespace
 
         std::vector<node_ptr> _phi;
         std::vector<double> _prop;
+        std::vector<double> _gradient;
+        std::vector<double> _dfdp;
         std::vector<int> _task_sizes_train;
         std::vector<int> _task_sizes_test;
 
@@ -131,6 +137,41 @@ namespace
         }
     }
 
+    TEST_F(CbParamNodeTest, HardCopyTest)
+    {
+        unsigned long int feat_ind = _phi.size();
+        _exp_test = std::make_shared<CbParamNode>(_phi[1], feat_ind, 1e-50, 1e50, _optimizer);
+
+        node_ptr copy_test = _exp_test->hard_copy();
+
+        EXPECT_EQ(copy_test->rung(), 1);
+
+        std::vector<double> expected_val(900, 0.0);
+
+        allowed_op_funcs::cb(900, _phi[1]->value_ptr(), copy_test->parameters()[0], copy_test->parameters()[1], expected_val.data());
+        EXPECT_LT(std::abs(copy_test->value_ptr()[0] - expected_val[0]), 1e-10);
+        EXPECT_LT(std::abs(copy_test->value()[0] - expected_val[0]), 1e-10);
+
+        allowed_op_funcs::cb(10, _phi[1]->test_value_ptr(), copy_test->parameters()[0], copy_test->parameters()[1], expected_val.data());
+        EXPECT_LT(std::abs(copy_test->test_value_ptr()[0] - expected_val[0]), 1e-10);
+        EXPECT_LT(std::abs(copy_test->test_value()[0] - expected_val[0]), 1e-10);
+
+        std::stringstream postfix;
+        postfix << "1|cb: " << std::setprecision(13) << std::scientific <<copy_test->parameters()[0] << ',' << copy_test->parameters()[1];
+        EXPECT_STREQ(copy_test->unit().toString().c_str(), "s^3");
+        EXPECT_STREQ(copy_test->postfix_expr().c_str(), postfix.str().c_str());
+
+        copy_test->gradient(_gradient.data(), _dfdp.data());
+        double v1 = copy_test->feat(0)->value_ptr()[0];
+
+        double alpha = copy_test->parameters()[0];
+        double a = copy_test->parameters()[1];
+        double df_dp = 3.0 * std::pow(alpha * v1 + a, 2.0);
+
+        EXPECT_EQ(_gradient[0], df_dp * v1);
+        EXPECT_EQ(_gradient[_task_sizes_train[0]], df_dp);
+    }
+
     TEST_F(CbParamNodeTest, AttributesTest)
     {
         unsigned long int feat_ind = _phi.size();
diff --git a/tests/googletest/feature_creation/parameterization/test_cbrt_node.cc b/tests/googletest/feature_creation/parameterization/test_cbrt_node.cc
index 4139dc438141be35735f3831ecf6cb5beaf3ea62..6b3c1bef87e643ef452e9ae2f46ceed3a70f193c 100644
--- a/tests/googletest/feature_creation/parameterization/test_cbrt_node.cc
+++ b/tests/googletest/feature_creation/parameterization/test_cbrt_node.cc
@@ -62,7 +62,11 @@ namespace
             _phi = {_feat_1, _feat_2};
             _a = distribution_params(generator);
             _alpha = std::pow(distribution_params(generator), 3.0);
+
             _prop = std::vector<double>(_task_sizes_train[0], 0.0);
+            _gradient.resize(_task_sizes_train[0] * 2, 1.0);
+            _dfdp.resize(_task_sizes_train[0]);
+
             allowed_op_funcs::cbrt(_task_sizes_train[0], _phi[1]->value_ptr(), _alpha, _a, _prop.data());
 
             _optimizer = nlopt_wrapper::get_optimizer("regression",_task_sizes_train, _prop, 1);
@@ -74,6 +78,8 @@ namespace
 
         std::vector<node_ptr> _phi;
         std::vector<double> _prop;
+        std::vector<double> _gradient;
+        std::vector<double> _dfdp;
         std::vector<int> _task_sizes_train;
         std::vector<int> _task_sizes_test;
 
@@ -128,6 +134,41 @@ namespace
         }
     }
 
+    TEST_F(CbrtParamNodeTest, HardCopyTest)
+    {
+        unsigned long int feat_ind = _phi.size();
+        _cbrt_test = std::make_shared<CbrtParamNode>(_phi[1], feat_ind, 1e-50, 1e50, _optimizer);
+
+        node_ptr copy_test = _cbrt_test->hard_copy();
+
+        EXPECT_EQ(copy_test->rung(), 1);
+
+        std::vector<double> expected_val(900, 0.0);
+
+        allowed_op_funcs::cbrt(900, _phi[1]->value_ptr(), copy_test->parameters()[0], copy_test->parameters()[1], expected_val.data());
+        EXPECT_LT(std::abs(copy_test->value_ptr()[0] - expected_val[0]), 1e-10);
+        EXPECT_LT(std::abs(copy_test->value()[0] - expected_val[0]), 1e-10);
+
+        allowed_op_funcs::cbrt(10, _phi[1]->test_value_ptr(), copy_test->parameters()[0], copy_test->parameters()[1], expected_val.data());
+        EXPECT_LT(std::abs(copy_test->test_value_ptr()[0] - expected_val[0]), 1e-10);
+        EXPECT_LT(std::abs(copy_test->test_value()[0] - expected_val[0]), 1e-10);
+
+        std::stringstream postfix;
+        postfix << "1|cbrt: " << std::setprecision(13) << std::scientific <<copy_test->parameters()[0] << ',' << copy_test->parameters()[1];
+        EXPECT_STREQ(copy_test->unit().toString().c_str(), "s^0.333333");
+        EXPECT_STREQ(copy_test->postfix_expr().c_str(), postfix.str().c_str());
+
+        copy_test->gradient(_gradient.data(), _dfdp.data());
+        double v1 = copy_test->feat(0)->value_ptr()[0];
+
+        double alpha = copy_test->parameters()[0];
+        double a = copy_test->parameters()[1];
+        double df_dp = 1.0 / 3.0 * std::pow(alpha * v1 + a, -2.0 / 3.0);
+
+        EXPECT_EQ(_gradient[0], df_dp * v1);
+        EXPECT_EQ(_gradient[_task_sizes_train[0]], df_dp);
+    }
+
     TEST_F(CbrtParamNodeTest, AttributesTest)
     {
         unsigned long int feat_ind = _phi.size();
diff --git a/tests/googletest/feature_creation/parameterization/test_cos_node.cc b/tests/googletest/feature_creation/parameterization/test_cos_node.cc
index 34c554e6a2fe9688de4c377c0a1e14d9df032447..e8e988ceafafab6bb96dcfbc184b3e060e187464 100644
--- a/tests/googletest/feature_creation/parameterization/test_cos_node.cc
+++ b/tests/googletest/feature_creation/parameterization/test_cos_node.cc
@@ -66,7 +66,11 @@ namespace
 
             _a = 0.143;
             _alpha = 1.05;
+
             _prop = std::vector<double>(_task_sizes_train[0], 0.0);
+            _gradient.resize(_task_sizes_train[0] * 2, 1.0);
+            _dfdp.resize(_task_sizes_train[0]);
+
             allowed_op_funcs::cos(_task_sizes_train[0], _phi[0]->value_ptr(), _alpha, _a, _prop.data());
 
             _optimizer = nlopt_wrapper::get_optimizer("regression",_task_sizes_train, _prop, 1);
@@ -79,6 +83,8 @@ namespace
 
         std::vector<node_ptr> _phi;
         std::vector<double> _prop;
+        std::vector<double> _gradient;
+        std::vector<double> _dfdp;
         std::vector<int> _task_sizes_train;
         std::vector<int> _task_sizes_test;
 
@@ -156,6 +162,41 @@ namespace
         }
     }
 
+    TEST_F(CosParamNodeTest, HardCopyTest)
+    {
+        unsigned long int feat_ind = _phi.size();
+        _cos_test = std::make_shared<CosParamNode>(_phi[1], feat_ind, 1e-50, 1e50, _optimizer);
+
+        node_ptr copy_test = _cos_test->hard_copy();
+
+        EXPECT_EQ(copy_test->rung(), 1);
+
+        std::vector<double> expected_val(900, 0.0);
+
+        allowed_op_funcs::cos(900, _phi[1]->value_ptr(), copy_test->parameters()[0], copy_test->parameters()[1], expected_val.data());
+        EXPECT_LT(std::abs(copy_test->value_ptr()[0] - expected_val[0]), 1e-10);
+        EXPECT_LT(std::abs(copy_test->value()[0] - expected_val[0]), 1e-10);
+
+        allowed_op_funcs::cos(10, _phi[1]->test_value_ptr(), copy_test->parameters()[0], copy_test->parameters()[1], expected_val.data());
+        EXPECT_LT(std::abs(copy_test->test_value_ptr()[0] - expected_val[0]), 1e-10);
+        EXPECT_LT(std::abs(copy_test->test_value()[0] - expected_val[0]), 1e-10);
+
+        std::stringstream postfix;
+        postfix << "1|cos: " << std::setprecision(13) << std::scientific <<copy_test->parameters()[0] << ',' << copy_test->parameters()[1];
+        EXPECT_STREQ(copy_test->unit().toString().c_str(), "Unitless");
+        EXPECT_STREQ(copy_test->postfix_expr().c_str(), postfix.str().c_str());
+
+        copy_test->gradient(_gradient.data(), _dfdp.data());
+        double v1 = copy_test->feat(0)->value_ptr()[0];
+
+        double alpha = copy_test->parameters()[0];
+        double a = copy_test->parameters()[1];
+        double df_dp = -1.0 * std::sin(alpha * v1 + a);
+
+        EXPECT_EQ(_gradient[0], df_dp * v1);
+        EXPECT_EQ(_gradient[_task_sizes_train[0]], df_dp);
+    }
+
     TEST_F(CosParamNodeTest, AttributesTest)
     {
         unsigned long int feat_ind = _phi.size();
diff --git a/tests/googletest/feature_creation/parameterization/test_div_node.cc b/tests/googletest/feature_creation/parameterization/test_div_node.cc
index f5b50f5dfebbef5e3d63e039f4e715313e0bbf83..202b05e366d25fa0f22c76c890995916e75476ce 100644
--- a/tests/googletest/feature_creation/parameterization/test_div_node.cc
+++ b/tests/googletest/feature_creation/parameterization/test_div_node.cc
@@ -62,7 +62,11 @@ namespace
             _phi = {_feat_1, _feat_2};
             _a = distribution_params(generator);
             _alpha = distribution_params(generator);
+
             _prop = std::vector<double>(_task_sizes_train[0], 0.0);
+            _gradient.resize(_task_sizes_train[0] * 2, 1.0);
+            _dfdp.resize(_task_sizes_train[0]);
+
             allowed_op_funcs::div(_task_sizes_train[0], _phi[0]->value_ptr(), _phi[1]->value_ptr(), _alpha, _a, _prop.data());
 
             _optimizer = nlopt_wrapper::get_optimizer("regression",_task_sizes_train, _prop, 1);
@@ -74,6 +78,8 @@ namespace
 
         std::vector<node_ptr> _phi;
         std::vector<double> _prop;
+        std::vector<double> _gradient;
+        std::vector<double> _dfdp;
         std::vector<int> _task_sizes_train;
         std::vector<int> _task_sizes_test;
 
@@ -128,6 +134,42 @@ namespace
         }
     }
 
+    TEST_F(DivParamNodeTest, HardCopyTest)
+    {
+        unsigned long int feat_ind = _phi.size();
+        _div_test = std::make_shared<DivParamNode>(_phi[0], _phi[1], feat_ind, 1e-50, 1e50, _optimizer);
+
+        node_ptr copy_test = _div_test->hard_copy();
+
+        EXPECT_EQ(copy_test->rung(), 1);
+
+        std::vector<double> expected_val(90, 0.0);
+
+        allowed_op_funcs::div(90, _phi[0]->value_ptr(), _phi[1]->value_ptr(), copy_test->parameters()[0], copy_test->parameters()[1], expected_val.data());
+        EXPECT_LT(std::abs(copy_test->value_ptr()[0] - expected_val[0]), 1e-10);
+        EXPECT_LT(std::abs(copy_test->value()[0] - expected_val[0]), 1e-10);
+
+        allowed_op_funcs::div(10, _phi[0]->test_value_ptr(), _phi[1]->test_value_ptr(), copy_test->parameters()[0], copy_test->parameters()[1], expected_val.data());
+        EXPECT_LT(std::abs(copy_test->test_value_ptr()[0] - expected_val[0]), 1e-10);
+        EXPECT_LT(std::abs(copy_test->test_value()[0] - expected_val[0]), 1e-10);
+
+        std::stringstream postfix;
+        postfix << "0|1|div: " << std::setprecision(13) << std::scientific <<copy_test->parameters()[0] << ',' << copy_test->parameters()[1];
+        EXPECT_STREQ(copy_test->unit().toString().c_str(), "m * s^-1");
+        EXPECT_STREQ(copy_test->postfix_expr().c_str(), postfix.str().c_str());
+
+        copy_test->gradient(_gradient.data(), _dfdp.data());
+        double v1 = copy_test->feat(0)->value_ptr()[0];
+        double v2 = copy_test->feat(1)->value_ptr()[0];
+
+        double alpha = copy_test->parameters()[0];
+        double a = copy_test->parameters()[1];
+        double df_dp = -1.0 * v1 / std::pow(alpha * v2 + a, 2.0);
+
+        EXPECT_EQ(_gradient[0], df_dp * v2);
+        EXPECT_EQ(_gradient[_task_sizes_train[0]], df_dp);
+    }
+
     TEST_F(DivParamNodeTest, AttributesTest)
     {
         unsigned long int feat_ind = _phi.size();
diff --git a/tests/googletest/feature_creation/parameterization/test_exp_node.cc b/tests/googletest/feature_creation/parameterization/test_exp_node.cc
index 6e91b250171bd1c85579e31809aaae95baf2b486..19008656b7f277f611aa792f236cc71bee70bebd 100644
--- a/tests/googletest/feature_creation/parameterization/test_exp_node.cc
+++ b/tests/googletest/feature_creation/parameterization/test_exp_node.cc
@@ -69,7 +69,11 @@ namespace
 
             _a = std::log(distribution_params(generator));
             _alpha = distribution_params(generator);
+
             _prop = std::vector<double>(_task_sizes_train[0], 0.0);
+            _gradient.resize(_task_sizes_train[0] * 2, 1.0);
+            _dfdp.resize(_task_sizes_train[0]);
+
             allowed_op_funcs::exp(_task_sizes_train[0], _phi[0]->value_ptr(), _alpha, _a, _prop.data());
 
             _optimizer = nlopt_wrapper::get_optimizer("regression",_task_sizes_train, _prop, 1);
@@ -82,6 +86,8 @@ namespace
 
         std::vector<node_ptr> _phi;
         std::vector<double> _prop;
+        std::vector<double> _gradient;
+        std::vector<double> _dfdp;
         std::vector<int> _task_sizes_train;
         std::vector<int> _task_sizes_test;
 
@@ -169,6 +175,41 @@ namespace
         }
     }
 
+    TEST_F(ExpParamNodeTest, HardCopyTest)
+    {
+        unsigned long int feat_ind = _phi.size();
+        _exp_test = std::make_shared<ExpParamNode>(_phi[0], feat_ind, 1e-50, 1e50, _optimizer);
+
+        node_ptr copy_test = _exp_test->hard_copy();
+
+        EXPECT_EQ(copy_test->rung(), 1);
+
+        std::vector<double> expected_val(900, 0.0);
+
+        allowed_op_funcs::exp(900, _phi[0]->value_ptr(), copy_test->parameters()[0], copy_test->parameters()[1], expected_val.data());
+        EXPECT_LT(std::abs(copy_test->value_ptr()[0] - expected_val[0]), 1e-10);
+        EXPECT_LT(std::abs(copy_test->value()[0] - expected_val[0]), 1e-10);
+
+        allowed_op_funcs::exp(10, _phi[0]->test_value_ptr(), copy_test->parameters()[0], copy_test->parameters()[1], expected_val.data());
+        EXPECT_LT(std::abs(copy_test->test_value_ptr()[0] - expected_val[0]), 1e-10);
+        EXPECT_LT(std::abs(copy_test->test_value()[0] - expected_val[0]), 1e-10);
+
+        std::stringstream postfix;
+        postfix << "0|exp: " << std::setprecision(13) << std::scientific <<copy_test->parameters()[0] << ',' << copy_test->parameters()[1];
+        EXPECT_STREQ(copy_test->unit().toString().c_str(), "Unitless");
+        EXPECT_STREQ(copy_test->postfix_expr().c_str(), postfix.str().c_str());
+
+        copy_test->gradient(_gradient.data(), _dfdp.data());
+        double v1 = copy_test->feat(0)->value_ptr()[0];
+
+        double alpha = copy_test->parameters()[0];
+        double a = copy_test->parameters()[1];
+        double df_dp = std::exp(alpha * v1 + a);
+
+        EXPECT_EQ(_gradient[0], df_dp * v1);
+        EXPECT_EQ(_gradient[_task_sizes_train[0]], df_dp);
+    }
+
     TEST_F(ExpParamNodeTest, AttributesTest)
     {
         unsigned long int feat_ind = _phi.size();
diff --git a/tests/googletest/feature_creation/parameterization/test_inv_node.cc b/tests/googletest/feature_creation/parameterization/test_inv_node.cc
index e1f8ecca477927a1ec0129ce6b9747a2e974537b..cef2d1bb8bb9f5ba768f4835f38e7938ea9a6ada 100644
--- a/tests/googletest/feature_creation/parameterization/test_inv_node.cc
+++ b/tests/googletest/feature_creation/parameterization/test_inv_node.cc
@@ -62,7 +62,11 @@ namespace
             _phi = {_feat_1, _feat_2};
             _a = distribution_params(generator);
             _alpha = distribution_params(generator);
+
             _prop = std::vector<double>(_task_sizes_train[0], 0.0);
+            _gradient.resize(_task_sizes_train[0] * 2, 1.0);
+            _dfdp.resize(_task_sizes_train[0]);
+
             allowed_op_funcs::inv(_task_sizes_train[0], _phi[1]->value_ptr(), _alpha, _a, _prop.data());
 
             _optimizer = nlopt_wrapper::get_optimizer("regression",_task_sizes_train, _prop, 1);
@@ -74,6 +78,8 @@ namespace
 
         std::vector<node_ptr> _phi;
         std::vector<double> _prop;
+        std::vector<double> _gradient;
+        std::vector<double> _dfdp;
         std::vector<int> _task_sizes_train;
         std::vector<int> _task_sizes_test;
 
@@ -128,6 +134,41 @@ namespace
         }
     }
 
+    TEST_F(InvParamNodeTest, HardCopyTest)
+    {
+        unsigned long int feat_ind = _phi.size();
+        _inv_test = std::make_shared<InvParamNode>(_phi[1], feat_ind, 1e-50, 1e50, _optimizer);
+
+        node_ptr copy_test = _inv_test->hard_copy();
+
+        EXPECT_EQ(copy_test->rung(), 1);
+
+        std::vector<double> expected_val(90, 0.0);
+
+        allowed_op_funcs::inv(90, _phi[1]->value_ptr(), copy_test->parameters()[0], copy_test->parameters()[1], expected_val.data());
+        EXPECT_LT(std::abs(copy_test->value_ptr()[0] - expected_val[0]), 1e-10);
+        EXPECT_LT(std::abs(copy_test->value()[0] - expected_val[0]), 1e-10);
+
+        allowed_op_funcs::inv(10, _phi[1]->test_value_ptr(), copy_test->parameters()[0], copy_test->parameters()[1], expected_val.data());
+        EXPECT_LT(std::abs(copy_test->test_value_ptr()[0] - expected_val[0]), 1e-10);
+        EXPECT_LT(std::abs(copy_test->test_value()[0] - expected_val[0]), 1e-10);
+
+        std::stringstream postfix;
+        postfix << "1|inv: " << std::setprecision(13) << std::scientific <<copy_test->parameters()[0] << ',' << copy_test->parameters()[1];
+        EXPECT_STREQ(copy_test->unit().toString().c_str(), "s^-1");
+        EXPECT_STREQ(copy_test->postfix_expr().c_str(), postfix.str().c_str());
+
+        copy_test->gradient(_gradient.data(), _dfdp.data());
+        double v1 = copy_test->feat(0)->value_ptr()[0];
+
+        double alpha = copy_test->parameters()[0];
+        double a = copy_test->parameters()[1];
+        double df_dp = -1.0 / std::pow(alpha * v1 + a, 2.0);
+
+        EXPECT_EQ(_gradient[0], df_dp * v1);
+        EXPECT_EQ(_gradient[_task_sizes_train[0]], df_dp);
+    }
+
     TEST_F(InvParamNodeTest, AttributesTest)
     {
         unsigned long int feat_ind = _phi.size();
diff --git a/tests/googletest/feature_creation/parameterization/test_log_node.cc b/tests/googletest/feature_creation/parameterization/test_log_node.cc
index f716721b344558dde98c0fdc2f506cd76f90e262..dccbf234488cfe2b9c25e28117601ab726fb6e06 100644
--- a/tests/googletest/feature_creation/parameterization/test_log_node.cc
+++ b/tests/googletest/feature_creation/parameterization/test_log_node.cc
@@ -71,7 +71,11 @@ namespace
 
             _a = distribution_params(generator);
             _alpha = distribution_params(generator);
+
             _prop = std::vector<double>(_task_sizes_train[0], 0.0);
+            _gradient.resize(_task_sizes_train[0] * 2, 1.0);
+            _dfdp.resize(_task_sizes_train[0]);
+
             allowed_op_funcs::log(_task_sizes_train[0], _phi[2]->value_ptr(), _alpha, _a, _prop.data());
 
             _optimizer = nlopt_wrapper::get_optimizer("regression",_task_sizes_train, _prop, 1);
@@ -80,10 +84,12 @@ namespace
         node_ptr _feat_1;
         node_ptr _feat_2;
         node_ptr _feat_3;
-        node_ptr _exp_test;
+        node_ptr _log_test;
 
         std::vector<node_ptr> _phi;
         std::vector<double> _prop;
+        std::vector<double> _gradient;
+        std::vector<double> _dfdp;
         std::vector<int> _task_sizes_train;
         std::vector<int> _task_sizes_test;
 
@@ -123,7 +129,7 @@ namespace
 
         try
         {
-            _exp_test = std::make_shared<LogParamNode>(_phi[1], feat_ind, 1e-50, 1e-40, _optimizer);
+            _log_test = std::make_shared<LogParamNode>(_phi[1], feat_ind, 1e-50, 1e-40, _optimizer);
             EXPECT_TRUE(false) << " (LogParamNode created with an absolute value above the upper bound)";
         }
         catch(const InvalidFeatureException& e)
@@ -131,7 +137,7 @@ namespace
 
         try
         {
-            _exp_test = std::make_shared<LogParamNode>(_phi[1], feat_ind, 1e49, 1e50, _optimizer);
+            _log_test = std::make_shared<LogParamNode>(_phi[1], feat_ind, 1e49, 1e50, _optimizer);
             EXPECT_TRUE(false) << " (LogParamNode created with an absolute value below the lower bound)";
         }
         catch(const InvalidFeatureException& e)
@@ -139,7 +145,7 @@ namespace
 
         try
         {
-            _exp_test = std::make_shared<LogParamNode>(_phi[3], feat_ind, 1e-50, 1e50, _optimizer);
+            _log_test = std::make_shared<LogParamNode>(_phi[3], feat_ind, 1e-50, 1e50, _optimizer);
             EXPECT_TRUE(false) << " (LogParamNode created from ExpNode)";
         }
         catch(const InvalidFeatureException& e)
@@ -147,7 +153,7 @@ namespace
 
         try
         {
-            _exp_test = std::make_shared<LogParamNode>(_phi[4], feat_ind, 1e-50, 1e50, _optimizer);
+            _log_test = std::make_shared<LogParamNode>(_phi[4], feat_ind, 1e-50, 1e50, _optimizer);
             EXPECT_TRUE(false) << " (LogParamNode created from LogNode)";
         }
         catch(const InvalidFeatureException& e)
@@ -155,7 +161,7 @@ namespace
 
         try
         {
-            _exp_test = std::make_shared<LogParamNode>(_phi[5], feat_ind, 1e-50, 1e50, _optimizer);
+            _log_test = std::make_shared<LogParamNode>(_phi[5], feat_ind, 1e-50, 1e50, _optimizer);
             EXPECT_TRUE(false) << " (LogParamNode created from NegExpNode)";
         }
         catch(const InvalidFeatureException& e)
@@ -163,8 +169,8 @@ namespace
 
         try
         {
-            _exp_test = std::make_shared<LogParamNode>(_phi[2], feat_ind, 1e-50, 1e50, _optimizer);
-            EXPECT_LT(1.0 - util_funcs::r2(_prop.data(), _exp_test->value_ptr(), 900), 1e-4);
+            _log_test = std::make_shared<LogParamNode>(_phi[2], feat_ind, 1e-50, 1e50, _optimizer);
+            EXPECT_LT(1.0 - util_funcs::r2(_prop.data(), _log_test->value_ptr(), 900), 1e-4);
         }
         catch(const InvalidFeatureException& e)
         {
@@ -172,27 +178,62 @@ namespace
         }
     }
 
+    TEST_F(LogParamNodeTest, HardCopyTest)
+    {
+        unsigned long int feat_ind = _phi.size();
+        _log_test = std::make_shared<LogParamNode>(_phi[2], feat_ind, 1e-50, 1e50, _optimizer);
+
+        node_ptr copy_test = _log_test->hard_copy();
+
+        EXPECT_EQ(copy_test->rung(), 1);
+
+        std::vector<double> expected_val(900, 0.0);
+
+        allowed_op_funcs::log(900, _phi[2]->value_ptr(), copy_test->parameters()[0], copy_test->parameters()[1], expected_val.data());
+        EXPECT_LT(std::abs(copy_test->value_ptr()[0] - expected_val[0]), 1e-10);
+        EXPECT_LT(std::abs(copy_test->value()[0] - expected_val[0]), 1e-10);
+
+        allowed_op_funcs::log(10, _phi[2]->test_value_ptr(), copy_test->parameters()[0], copy_test->parameters()[1], expected_val.data());
+        EXPECT_LT(std::abs(copy_test->test_value_ptr()[0] - expected_val[0]), 1e-10);
+        EXPECT_LT(std::abs(copy_test->test_value()[0] - expected_val[0]), 1e-10);
+
+        std::stringstream postfix;
+        postfix << "2|log: " << std::setprecision(13) << std::scientific <<copy_test->parameters()[0] << ',' << copy_test->parameters()[1];
+        EXPECT_STREQ(copy_test->unit().toString().c_str(), "Unitless");
+        EXPECT_STREQ(copy_test->postfix_expr().c_str(), postfix.str().c_str());
+
+        copy_test->gradient(_gradient.data(), _dfdp.data());
+        double v1 = copy_test->feat(0)->value_ptr()[0];
+
+        double alpha = copy_test->parameters()[0];
+        double a = copy_test->parameters()[1];
+        double df_dp = 1.0 / (alpha * v1 + a);
+
+        EXPECT_EQ(_gradient[0], df_dp * v1);
+        EXPECT_EQ(_gradient[_task_sizes_train[0]], df_dp);
+    }
+
     TEST_F(LogParamNodeTest, AttributesTest)
     {
         unsigned long int feat_ind = _phi.size();
-        _exp_test = std::make_shared<LogParamNode>(_phi[2], feat_ind, 1e-50, 1e50, _optimizer);
+        _log_test = std::make_shared<LogParamNode>(_phi[2], feat_ind, 1e-50, 1e50, _optimizer);
 
-        EXPECT_EQ(_exp_test->rung(), 1);
+        EXPECT_EQ(_log_test->rung(), 1);
 
         std::vector<double> expected_val(900, 0.0);
 
-        allowed_op_funcs::log(900, _phi[2]->value_ptr(), _exp_test->parameters()[0], _exp_test->parameters()[1], expected_val.data());
-        EXPECT_LT(std::abs(_exp_test->value_ptr()[0] - expected_val[0]), 1e-10);
-        EXPECT_LT(std::abs(_exp_test->value()[0] - expected_val[0]), 1e-10);
+        allowed_op_funcs::log(900, _phi[2]->value_ptr(), _log_test->parameters()[0], _log_test->parameters()[1], expected_val.data());
+        EXPECT_LT(std::abs(_log_test->value_ptr()[0] - expected_val[0]), 1e-10);
+        EXPECT_LT(std::abs(_log_test->value()[0] - expected_val[0]), 1e-10);
 
-        allowed_op_funcs::log(10, _phi[2]->test_value_ptr(), _exp_test->parameters()[0], _exp_test->parameters()[1], expected_val.data());
-        EXPECT_LT(std::abs(_exp_test->test_value_ptr()[0] - expected_val[0]), 1e-10);
-        EXPECT_LT(std::abs(_exp_test->test_value()[0] - expected_val[0]), 1e-10);
+        allowed_op_funcs::log(10, _phi[2]->test_value_ptr(), _log_test->parameters()[0], _log_test->parameters()[1], expected_val.data());
+        EXPECT_LT(std::abs(_log_test->test_value_ptr()[0] - expected_val[0]), 1e-10);
+        EXPECT_LT(std::abs(_log_test->test_value()[0] - expected_val[0]), 1e-10);
 
         std::stringstream postfix;
-        postfix << "2|log: " << std::setprecision(13) << std::scientific <<_exp_test->parameters()[0] << ',' << _exp_test->parameters()[1];
-        EXPECT_STREQ(_exp_test->unit().toString().c_str(), "Unitless");
-        EXPECT_STREQ(_exp_test->postfix_expr().c_str(), postfix.str().c_str());
+        postfix << "2|log: " << std::setprecision(13) << std::scientific <<_log_test->parameters()[0] << ',' << _log_test->parameters()[1];
+        EXPECT_STREQ(_log_test->unit().toString().c_str(), "Unitless");
+        EXPECT_STREQ(_log_test->postfix_expr().c_str(), postfix.str().c_str());
     }
 }
 #endif
diff --git a/tests/googletest/feature_creation/parameterization/test_mult_node.cc b/tests/googletest/feature_creation/parameterization/test_mult_node.cc
index 65e8e036c22b179380d1b8992f15f023fdfed6f1..9d6759d9dd1d16d79dc46cb6052bb13d5298d008 100644
--- a/tests/googletest/feature_creation/parameterization/test_mult_node.cc
+++ b/tests/googletest/feature_creation/parameterization/test_mult_node.cc
@@ -59,9 +59,14 @@ namespace
             _feat_2 = std::make_shared<FeatureNode>(1, "B", value_2, test_value_2, Unit("s"));
 
             _phi = {_feat_1, _feat_2};
+
             _a = distribution_params(generator);
             _alpha = distribution_params(generator);
+
             _prop = std::vector<double>(_task_sizes_train[0], 0.0);
+            _gradient.resize(_task_sizes_train[0] * 2, 1.0);
+            _dfdp.resize(_task_sizes_train[0]);
+
             allowed_op_funcs::mult(_task_sizes_train[0], _phi[0]->value_ptr(), _phi[1]->value_ptr(), _alpha, _a, _prop.data());
 
             _optimizer = nlopt_wrapper::get_optimizer("regression",_task_sizes_train, _prop, 1);
@@ -73,6 +78,8 @@ namespace
 
         std::vector<node_ptr> _phi;
         std::vector<double> _prop;
+        std::vector<double> _gradient;
+        std::vector<double> _dfdp;
         std::vector<int> _task_sizes_train;
         std::vector<int> _task_sizes_test;
 
@@ -127,6 +134,42 @@ namespace
         }
     }
 
+    TEST_F(MultParamNodeTest, HardCopyTest)
+    {
+        unsigned long int feat_ind = _phi.size();
+        _mult_test = std::make_shared<MultParamNode>(_phi[0], _phi[1], feat_ind, 1e-50, 1e50, _optimizer);
+
+        node_ptr copy_test = _mult_test->hard_copy();
+
+        EXPECT_EQ(copy_test->rung(), 1);
+
+        std::vector<double> expected_val(900, 0.0);
+
+        allowed_op_funcs::mult(900, _phi[0]->value_ptr(), _phi[1]->value_ptr(), copy_test->parameters()[0], copy_test->parameters()[1], expected_val.data());
+        EXPECT_LT(std::abs(copy_test->value_ptr()[0] - expected_val[0]), 1e-10);
+        EXPECT_LT(std::abs(copy_test->value()[0] - expected_val[0]), 1e-10);
+
+        allowed_op_funcs::mult(10, _phi[0]->test_value_ptr(), _phi[1]->test_value_ptr(), copy_test->parameters()[0], copy_test->parameters()[1], expected_val.data());
+        EXPECT_LT(std::abs(copy_test->test_value_ptr()[0] - expected_val[0]), 1e-10);
+        EXPECT_LT(std::abs(copy_test->test_value()[0] - expected_val[0]), 1e-10);
+
+        std::stringstream postfix;
+        postfix << "0|1|mult: " << std::setprecision(13) << std::scientific <<copy_test->parameters()[0] << ',' << copy_test->parameters()[1];
+        EXPECT_STREQ(copy_test->unit().toString().c_str(), "m * s");
+        EXPECT_STREQ(copy_test->postfix_expr().c_str(), postfix.str().c_str());
+
+        copy_test->gradient(_gradient.data(), _dfdp.data());
+        double v1 = copy_test->feat(0)->value_ptr()[0];
+        double v2 = copy_test->feat(1)->value_ptr()[0];
+
+        double alpha = copy_test->parameters()[0];
+        double a = copy_test->parameters()[1];
+        double df_dp = v1;
+
+        EXPECT_EQ(_gradient[0], df_dp * v2);
+        EXPECT_EQ(_gradient[_task_sizes_train[0]], df_dp);
+    }
+
     TEST_F(MultParamNodeTest, AttributesTest)
     {
         unsigned long int feat_ind = _phi.size();
diff --git a/tests/googletest/feature_creation/parameterization/test_neg_exp_node.cc b/tests/googletest/feature_creation/parameterization/test_neg_exp_node.cc
index ff9ad5cc52b6a8ff9d9f480fdeea481fef0d2fdd..cb2ef047e99751d439ac1d96c549294f975b558a 100644
--- a/tests/googletest/feature_creation/parameterization/test_neg_exp_node.cc
+++ b/tests/googletest/feature_creation/parameterization/test_neg_exp_node.cc
@@ -69,7 +69,11 @@ namespace
 
             _a = distribution_params(generator);
             _alpha = distribution_params(generator);
+
             _prop = std::vector<double>(_task_sizes_train[0], 0.0);
+            _gradient.resize(_task_sizes_train[0] * 2, 1.0);
+            _dfdp.resize(_task_sizes_train[0]);
+
             allowed_op_funcs::neg_exp(_task_sizes_train[0], _phi[0]->value_ptr(), _alpha, _a, _prop.data());
 
             _optimizer = nlopt_wrapper::get_optimizer("regression",_task_sizes_train, _prop, 1);
@@ -78,10 +82,12 @@ namespace
         node_ptr _feat_1;
         node_ptr _feat_2;
         node_ptr _feat_3;
-        node_ptr _exp_test;
+        node_ptr _neg_exp_test;
 
         std::vector<node_ptr> _phi;
         std::vector<double> _prop;
+        std::vector<double> _gradient;
+        std::vector<double> _dfdp;
         std::vector<int> _task_sizes_train;
         std::vector<int> _task_sizes_test;
 
@@ -121,7 +127,7 @@ namespace
 
         try
         {
-            _exp_test = std::make_shared<NegExpParamNode>(_phi[1], feat_ind, 1e-50, 1e-40, _optimizer);
+            _neg_exp_test = std::make_shared<NegExpParamNode>(_phi[1], feat_ind, 1e-50, 1e-40, _optimizer);
             EXPECT_TRUE(false) << " (NegExpParamNode created with an absolute value above the upper bound)";
         }
         catch(const InvalidFeatureException& e)
@@ -129,7 +135,7 @@ namespace
 
         try
         {
-            _exp_test = std::make_shared<NegExpParamNode>(_phi[1], feat_ind, 1e49, 1e50, _optimizer);
+            _neg_exp_test = std::make_shared<NegExpParamNode>(_phi[1], feat_ind, 1e49, 1e50, _optimizer);
             EXPECT_TRUE(false) << " (NegExpParamNode created with an absolute value below the lower bound)";
         }
         catch(const InvalidFeatureException& e)
@@ -137,7 +143,7 @@ namespace
 
         try
         {
-            _exp_test = std::make_shared<NegExpParamNode>(_phi[3], feat_ind, 1e-50, 1e50, _optimizer);
+            _neg_exp_test = std::make_shared<NegExpParamNode>(_phi[3], feat_ind, 1e-50, 1e50, _optimizer);
             EXPECT_TRUE(false) << " (NegExpParamNode created from ExpNode)";
         }
         catch(const InvalidFeatureException& e)
@@ -145,7 +151,7 @@ namespace
 
         try
         {
-            _exp_test = std::make_shared<NegExpParamNode>(_phi[4], feat_ind, 1e-50, 1e50, _optimizer);
+            _neg_exp_test = std::make_shared<NegExpParamNode>(_phi[4], feat_ind, 1e-50, 1e50, _optimizer);
             EXPECT_TRUE(false) << " (NegExpParamNode created from LogNode)";
         }
         catch(const InvalidFeatureException& e)
@@ -153,7 +159,7 @@ namespace
 
         try
         {
-            _exp_test = std::make_shared<NegExpParamNode>(_phi[5], feat_ind, 1e-50, 1e50, _optimizer);
+            _neg_exp_test = std::make_shared<NegExpParamNode>(_phi[5], feat_ind, 1e-50, 1e50, _optimizer);
             EXPECT_TRUE(false) << " (NegExpParamNode created from NegExpNode)";
         }
         catch(const InvalidFeatureException& e)
@@ -161,8 +167,8 @@ namespace
 
         try
         {
-            _exp_test = std::make_shared<NegExpParamNode>(_phi[0], feat_ind, 1e-50, 1e50, _optimizer);
-            EXPECT_LT(1.0 - util_funcs::r2(_prop.data(), _exp_test->value_ptr(), 900), 1e-5);
+            _neg_exp_test = std::make_shared<NegExpParamNode>(_phi[0], feat_ind, 1e-50, 1e50, _optimizer);
+            EXPECT_LT(1.0 - util_funcs::r2(_prop.data(), _neg_exp_test->value_ptr(), 900), 1e-5);
         }
         catch(const InvalidFeatureException& e)
         {
@@ -170,27 +176,62 @@ namespace
         }
     }
 
+    TEST_F(NegExpParamNodeTest, HardCopyTest)
+    {
+        unsigned long int feat_ind = _phi.size();
+        _neg_exp_test = std::make_shared<NegExpParamNode>(_phi[0], feat_ind, 1e-50, 1e50, _optimizer);
+
+        node_ptr copy_test = _neg_exp_test->hard_copy();
+
+        EXPECT_EQ(copy_test->rung(), 1);
+
+        std::vector<double> expected_val(900, 0.0);
+
+        allowed_op_funcs::neg_exp(900, _phi[0]->value_ptr(), copy_test->parameters()[0], copy_test->parameters()[1], expected_val.data());
+        EXPECT_LT(std::abs(copy_test->value_ptr()[0] - expected_val[0]), 1e-5);
+        EXPECT_LT(std::abs(copy_test->value()[0] - expected_val[0]), 1e-5);
+
+        allowed_op_funcs::neg_exp(10, _phi[0]->test_value_ptr(), copy_test->parameters()[0], copy_test->parameters()[1], expected_val.data());
+        EXPECT_LT(std::abs(copy_test->test_value_ptr()[0] - expected_val[0]), 1e-5);
+        EXPECT_LT(std::abs(copy_test->test_value()[0] - expected_val[0]), 1e-5);
+
+        std::stringstream postfix;
+        postfix << "0|nexp: " << std::setprecision(13) << std::scientific <<copy_test->parameters()[0] << ',' << copy_test->parameters()[1];
+        EXPECT_STREQ(copy_test->unit().toString().c_str(), "Unitless");
+        EXPECT_STREQ(copy_test->postfix_expr().c_str(), postfix.str().c_str());
+
+        copy_test->gradient(_gradient.data(), _dfdp.data());
+        double v1 = copy_test->feat(0)->value_ptr()[0];
+
+        double alpha = copy_test->parameters()[0];
+        double a = copy_test->parameters()[1];
+        double df_dp = -1.0 * std::exp(-1.0 * alpha * v1 + a);
+
+        EXPECT_EQ(_gradient[0], df_dp * v1);
+        EXPECT_EQ(_gradient[_task_sizes_train[0]], df_dp);
+    }
+
     TEST_F(NegExpParamNodeTest, AttributesTest)
     {
         unsigned long int feat_ind = _phi.size();
-        _exp_test = std::make_shared<NegExpParamNode>(_phi[0], feat_ind, 1e-50, 1e50, _optimizer);
+        _neg_exp_test = std::make_shared<NegExpParamNode>(_phi[0], feat_ind, 1e-50, 1e50, _optimizer);
 
-        EXPECT_EQ(_exp_test->rung(), 1);
+        EXPECT_EQ(_neg_exp_test->rung(), 1);
 
         std::vector<double> expected_val(900, 0.0);
 
-        allowed_op_funcs::neg_exp(900, _phi[0]->value_ptr(), _exp_test->parameters()[0], _exp_test->parameters()[1], expected_val.data());
-        EXPECT_LT(std::abs(_exp_test->value_ptr()[0] - expected_val[0]), 1e-5);
-        EXPECT_LT(std::abs(_exp_test->value()[0] - expected_val[0]), 1e-5);
+        allowed_op_funcs::neg_exp(900, _phi[0]->value_ptr(), _neg_exp_test->parameters()[0], _neg_exp_test->parameters()[1], expected_val.data());
+        EXPECT_LT(std::abs(_neg_exp_test->value_ptr()[0] - expected_val[0]), 1e-5);
+        EXPECT_LT(std::abs(_neg_exp_test->value()[0] - expected_val[0]), 1e-5);
 
-        allowed_op_funcs::neg_exp(10, _phi[0]->test_value_ptr(), _exp_test->parameters()[0], _exp_test->parameters()[1], expected_val.data());
-        EXPECT_LT(std::abs(_exp_test->test_value_ptr()[0] - expected_val[0]), 1e-5);
-        EXPECT_LT(std::abs(_exp_test->test_value()[0] - expected_val[0]), 1e-5);
+        allowed_op_funcs::neg_exp(10, _phi[0]->test_value_ptr(), _neg_exp_test->parameters()[0], _neg_exp_test->parameters()[1], expected_val.data());
+        EXPECT_LT(std::abs(_neg_exp_test->test_value_ptr()[0] - expected_val[0]), 1e-5);
+        EXPECT_LT(std::abs(_neg_exp_test->test_value()[0] - expected_val[0]), 1e-5);
 
         std::stringstream postfix;
-        postfix << "0|nexp: " << std::setprecision(13) << std::scientific <<_exp_test->parameters()[0] << ',' << _exp_test->parameters()[1];
-        EXPECT_STREQ(_exp_test->unit().toString().c_str(), "Unitless");
-        EXPECT_STREQ(_exp_test->postfix_expr().c_str(), postfix.str().c_str());
+        postfix << "0|nexp: " << std::setprecision(13) << std::scientific <<_neg_exp_test->parameters()[0] << ',' << _neg_exp_test->parameters()[1];
+        EXPECT_STREQ(_neg_exp_test->unit().toString().c_str(), "Unitless");
+        EXPECT_STREQ(_neg_exp_test->postfix_expr().c_str(), postfix.str().c_str());
     }
 }
 #endif
diff --git a/tests/googletest/feature_creation/parameterization/test_sin_node.cc b/tests/googletest/feature_creation/parameterization/test_sin_node.cc
index dd19a4e34221adc5fdc36f089b3527d7312fde76..3e3e8f500f2aef3aa2afb2c8b4766c9d0be57b90 100644
--- a/tests/googletest/feature_creation/parameterization/test_sin_node.cc
+++ b/tests/googletest/feature_creation/parameterization/test_sin_node.cc
@@ -66,7 +66,11 @@ namespace
 
             _a = 0.143;
             _alpha = 1.05;
+
             _prop = std::vector<double>(_task_sizes_train[0], 0.0);
+            _gradient.resize(_task_sizes_train[0] * 2, 1.0);
+            _dfdp.resize(_task_sizes_train[0]);
+
             allowed_op_funcs::sin(_task_sizes_train[0], _phi[0]->value_ptr(), _alpha, _a, _prop.data());
 
             _optimizer = nlopt_wrapper::get_optimizer("regression",_task_sizes_train, _prop, 1);
@@ -79,6 +83,8 @@ namespace
 
         std::vector<node_ptr> _phi;
         std::vector<double> _prop;
+        std::vector<double> _gradient;
+        std::vector<double> _dfdp;
         std::vector<int> _task_sizes_train;
         std::vector<int> _task_sizes_test;
 
@@ -156,6 +162,41 @@ namespace
         }
     }
 
+    TEST_F(SinParamNodeTest, HardCopyTest)
+    {
+        unsigned long int feat_ind = _phi.size();
+        _sin_test = std::make_shared<SinParamNode>(_phi[0], feat_ind, 1e-50, 1e50, _optimizer);
+
+        node_ptr copy_test = _sin_test->hard_copy();
+
+        EXPECT_EQ(copy_test->rung(), 1);
+
+        std::vector<double> expected_val(900, 0.0);
+
+        allowed_op_funcs::sin(900, _phi[0]->value_ptr(), copy_test->parameters()[0], copy_test->parameters()[1], expected_val.data());
+        EXPECT_LT(std::abs(copy_test->value_ptr()[0] - expected_val[0]), 1e-10);
+        EXPECT_LT(std::abs(copy_test->value()[0] - expected_val[0]), 1e-10);
+
+        allowed_op_funcs::sin(10, _phi[0]->test_value_ptr(), copy_test->parameters()[0], copy_test->parameters()[1], expected_val.data());
+        EXPECT_LT(std::abs(copy_test->test_value_ptr()[0] - expected_val[0]), 1e-10);
+        EXPECT_LT(std::abs(copy_test->test_value()[0] - expected_val[0]), 1e-10);
+
+        std::stringstream postfix;
+        postfix << "0|sin: " << std::setprecision(13) << std::scientific <<copy_test->parameters()[0] << ',' << copy_test->parameters()[1];
+        EXPECT_STREQ(copy_test->unit().toString().c_str(), "Unitless");
+        EXPECT_STREQ(copy_test->postfix_expr().c_str(), postfix.str().c_str());
+
+        copy_test->gradient(_gradient.data(), _dfdp.data());
+        double v1 = copy_test->feat(0)->value_ptr()[0];
+
+        double alpha = copy_test->parameters()[0];
+        double a = copy_test->parameters()[1];
+        double df_dp = std::cos(alpha * v1 + a);
+
+        EXPECT_EQ(_gradient[0], df_dp * v1);
+        EXPECT_EQ(_gradient[_task_sizes_train[0]], df_dp);
+    }
+
     TEST_F(SinParamNodeTest, AttributesTest)
     {
         unsigned long int feat_ind = _phi.size();
diff --git a/tests/googletest/feature_creation/parameterization/test_six_pow_node.cc b/tests/googletest/feature_creation/parameterization/test_six_pow_node.cc
index 25369ffdc5080cf5d8b2d82cd0f35d93e7a1cc90..ca42f461e0f0fae29715f4f69b5d88b33150ac76 100644
--- a/tests/googletest/feature_creation/parameterization/test_six_pow_node.cc
+++ b/tests/googletest/feature_creation/parameterization/test_six_pow_node.cc
@@ -61,7 +61,11 @@ namespace
             _phi = {_feat_1, _feat_2};
             _a = distribution_params(generator);
             _alpha = distribution_params(generator);
+
             _prop = std::vector<double>(_task_sizes_train[0], 0.0);
+            _gradient.resize(_task_sizes_train[0] * 2, 1.0);
+            _dfdp.resize(_task_sizes_train[0]);
+
             allowed_op_funcs::sixth_pow(_task_sizes_train[0], _phi[0]->value_ptr(), _alpha, _a, _prop.data());
 
             _optimizer = nlopt_wrapper::get_optimizer("regression",_task_sizes_train, _prop, 1);
@@ -69,10 +73,12 @@ namespace
 
         node_ptr _feat_1;
         node_ptr _feat_2;
-        node_ptr _exp_test;
+        node_ptr _six_pow_test;
 
         std::vector<node_ptr> _phi;
         std::vector<double> _prop;
+        std::vector<double> _gradient;
+        std::vector<double> _dfdp;
         std::vector<int> _task_sizes_train;
         std::vector<int> _task_sizes_test;
 
@@ -102,7 +108,7 @@ namespace
 
         try
         {
-            _exp_test = std::make_shared<SixPowParamNode>(_phi[0], feat_ind, 1e-50, 1e-40, _optimizer);
+            _six_pow_test = std::make_shared<SixPowParamNode>(_phi[0], feat_ind, 1e-50, 1e-40, _optimizer);
             EXPECT_TRUE(false) << " (SixPowParamNode created with an absolute value above the upper bound)";
         }
         catch(const InvalidFeatureException& e)
@@ -110,7 +116,7 @@ namespace
 
         try
         {
-            _exp_test = std::make_shared<SixPowParamNode>(_phi[0], feat_ind, 1e49, 1e50, _optimizer);
+            _six_pow_test = std::make_shared<SixPowParamNode>(_phi[0], feat_ind, 1e49, 1e50, _optimizer);
             EXPECT_TRUE(false) << " (SixPowParamNode created with an absolute value below the lower bound)";
         }
         catch(const InvalidFeatureException& e)
@@ -118,8 +124,8 @@ namespace
 
         try
         {
-            _exp_test = std::make_shared<SixPowParamNode>(_phi[0], feat_ind, 1e-50, 1e50, _optimizer);
-            EXPECT_LT(1.0 - util_funcs::r2(_prop.data(), _exp_test->value_ptr(), 900), 1e-4);
+            _six_pow_test = std::make_shared<SixPowParamNode>(_phi[0], feat_ind, 1e-50, 1e50, _optimizer);
+            EXPECT_LT(1.0 - util_funcs::r2(_prop.data(), _six_pow_test->value_ptr(), 900), 1e-4);
         }
         catch(const InvalidFeatureException& e)
         {
@@ -127,27 +133,62 @@ namespace
         }
     }
 
+    TEST_F(SixPowParamNodeTest, HardCopyTest)
+    {
+        unsigned long int feat_ind = _phi.size();
+        _six_pow_test = std::make_shared<SixPowParamNode>(_phi[0], feat_ind, 1e-50, 1e50, _optimizer);
+
+        node_ptr copy_test = _six_pow_test->hard_copy();
+
+        EXPECT_EQ(copy_test->rung(), 1);
+
+        std::vector<double> expected_val(900, 0.0);
+
+        allowed_op_funcs::sixth_pow(900, _phi[0]->value_ptr(), copy_test->parameters()[0], copy_test->parameters()[1], expected_val.data());
+        EXPECT_LT(std::abs(copy_test->value_ptr()[0] - expected_val[0]), 1e-10);
+        EXPECT_LT(std::abs(copy_test->value()[0] - expected_val[0]), 1e-10);
+
+        allowed_op_funcs::sixth_pow(10, _phi[0]->test_value_ptr(), copy_test->parameters()[0], copy_test->parameters()[1], expected_val.data());
+        EXPECT_LT(std::abs(copy_test->test_value_ptr()[0] - expected_val[0]), 1e-10);
+        EXPECT_LT(std::abs(copy_test->test_value()[0] - expected_val[0]), 1e-10);
+
+        std::stringstream postfix;
+        postfix << "0|sp: " << std::setprecision(13) << std::scientific <<copy_test->parameters()[0] << ',' << copy_test->parameters()[1];
+        EXPECT_STREQ(copy_test->unit().toString().c_str(), "m^6");
+        EXPECT_STREQ(copy_test->postfix_expr().c_str(), postfix.str().c_str());
+
+        copy_test->gradient(_gradient.data(), _dfdp.data());
+        double v1 = copy_test->feat(0)->value_ptr()[0];
+
+        double alpha = copy_test->parameters()[0];
+        double a = copy_test->parameters()[1];
+        double df_dp = 6.0 * std::pow(alpha * v1 + a, 5.0);
+
+        EXPECT_EQ(_gradient[0], df_dp * v1);
+        EXPECT_EQ(_gradient[_task_sizes_train[0]], df_dp);
+    }
+
     TEST_F(SixPowParamNodeTest, AttributesTest)
     {
         unsigned long int feat_ind = _phi.size();
-        _exp_test = std::make_shared<SixPowParamNode>(_phi[0], feat_ind, 1e-50, 1e50, _optimizer);
+        _six_pow_test = std::make_shared<SixPowParamNode>(_phi[0], feat_ind, 1e-50, 1e50, _optimizer);
 
-        EXPECT_EQ(_exp_test->rung(), 1);
+        EXPECT_EQ(_six_pow_test->rung(), 1);
 
         std::vector<double> expected_val(900, 0.0);
 
-        allowed_op_funcs::sixth_pow(900, _phi[0]->value_ptr(), _exp_test->parameters()[0], _exp_test->parameters()[1], expected_val.data());
-        EXPECT_LT(std::abs(_exp_test->value_ptr()[0] - expected_val[0]), 1e-10);
-        EXPECT_LT(std::abs(_exp_test->value()[0] - expected_val[0]), 1e-10);
+        allowed_op_funcs::sixth_pow(900, _phi[0]->value_ptr(), _six_pow_test->parameters()[0], _six_pow_test->parameters()[1], expected_val.data());
+        EXPECT_LT(std::abs(_six_pow_test->value_ptr()[0] - expected_val[0]), 1e-10);
+        EXPECT_LT(std::abs(_six_pow_test->value()[0] - expected_val[0]), 1e-10);
 
-        allowed_op_funcs::sixth_pow(10, _phi[0]->test_value_ptr(), _exp_test->parameters()[0], _exp_test->parameters()[1], expected_val.data());
-        EXPECT_LT(std::abs(_exp_test->test_value_ptr()[0] - expected_val[0]), 1e-10);
-        EXPECT_LT(std::abs(_exp_test->test_value()[0] - expected_val[0]), 1e-10);
+        allowed_op_funcs::sixth_pow(10, _phi[0]->test_value_ptr(), _six_pow_test->parameters()[0], _six_pow_test->parameters()[1], expected_val.data());
+        EXPECT_LT(std::abs(_six_pow_test->test_value_ptr()[0] - expected_val[0]), 1e-10);
+        EXPECT_LT(std::abs(_six_pow_test->test_value()[0] - expected_val[0]), 1e-10);
 
         std::stringstream postfix;
-        postfix << "0|sp: " << std::setprecision(13) << std::scientific <<_exp_test->parameters()[0] << ',' << _exp_test->parameters()[1];
-        EXPECT_STREQ(_exp_test->unit().toString().c_str(), "m^6");
-        EXPECT_STREQ(_exp_test->postfix_expr().c_str(), postfix.str().c_str());
+        postfix << "0|sp: " << std::setprecision(13) << std::scientific <<_six_pow_test->parameters()[0] << ',' << _six_pow_test->parameters()[1];
+        EXPECT_STREQ(_six_pow_test->unit().toString().c_str(), "m^6");
+        EXPECT_STREQ(_six_pow_test->postfix_expr().c_str(), postfix.str().c_str());
     }
 }
 #endif
diff --git a/tests/googletest/feature_creation/parameterization/test_sq_node.cc b/tests/googletest/feature_creation/parameterization/test_sq_node.cc
index 63c3f241d17622f30cee468b7df39825bcb1d17f..770a055302e35a690894770c5a3339c84d2dc799 100644
--- a/tests/googletest/feature_creation/parameterization/test_sq_node.cc
+++ b/tests/googletest/feature_creation/parameterization/test_sq_node.cc
@@ -61,7 +61,11 @@ namespace
             _phi = {_feat_1, _feat_2};
             _a = distribution_params(generator);
             _alpha = distribution_params(generator);
+
             _prop = std::vector<double>(_task_sizes_train[0], 0.0);
+            _gradient.resize(_task_sizes_train[0] * 2, 1.0);
+            _dfdp.resize(_task_sizes_train[0]);
+
             allowed_op_funcs::sq(_task_sizes_train[0], _phi[1]->value_ptr(), _alpha, _a, _prop.data());
 
             _optimizer = nlopt_wrapper::get_optimizer("regression",_task_sizes_train, _prop, 1);
@@ -69,10 +73,12 @@ namespace
 
         node_ptr _feat_1;
         node_ptr _feat_2;
-        node_ptr _exp_test;
+        node_ptr _sq_test;
 
         std::vector<node_ptr> _phi;
         std::vector<double> _prop;
+        std::vector<double> _gradient;
+        std::vector<double> _dfdp;
         std::vector<int> _task_sizes_train;
         std::vector<int> _task_sizes_test;
 
@@ -102,7 +108,7 @@ namespace
 
         try
         {
-            _exp_test = std::make_shared<SqParamNode>(_phi[1], feat_ind, 1e-50, 1e-40, _optimizer);
+            _sq_test = std::make_shared<SqParamNode>(_phi[1], feat_ind, 1e-50, 1e-40, _optimizer);
             EXPECT_TRUE(false) << " (SqParamNode created with an absolute value above the upper bound)";
         }
         catch(const InvalidFeatureException& e)
@@ -110,7 +116,7 @@ namespace
 
         try
         {
-            _exp_test = std::make_shared<SqParamNode>(_phi[1], feat_ind, 1e49, 1e50, _optimizer);
+            _sq_test = std::make_shared<SqParamNode>(_phi[1], feat_ind, 1e49, 1e50, _optimizer);
             EXPECT_TRUE(false) << " (SqParamNode created with an absolute value below the lower bound)";
         }
         catch(const InvalidFeatureException& e)
@@ -118,8 +124,8 @@ namespace
 
         try
         {
-            _exp_test = std::make_shared<SqParamNode>(_phi[1], feat_ind, 1e-50, 1e50, _optimizer);
-            EXPECT_LT(1.0 - util_funcs::r2(_prop.data(), _exp_test->value_ptr(), 90), 1e-4);
+            _sq_test = std::make_shared<SqParamNode>(_phi[1], feat_ind, 1e-50, 1e50, _optimizer);
+            EXPECT_LT(1.0 - util_funcs::r2(_prop.data(), _sq_test->value_ptr(), 90), 1e-4);
         }
         catch(const InvalidFeatureException& e)
         {
@@ -127,27 +133,62 @@ namespace
         }
     }
 
+    TEST_F(SqParamNodeTest, HardCopyTest)
+    {
+        unsigned long int feat_ind = _phi.size();
+        _sq_test = std::make_shared<SqParamNode>(_phi[1], feat_ind, 1e-50, 1e50, _optimizer);
+
+        node_ptr copy_test = _sq_test->hard_copy();
+
+        EXPECT_EQ(copy_test->rung(), 1);
+
+        std::vector<double> expected_val(90, 0.0);
+
+        allowed_op_funcs::sq(90, _phi[1]->value_ptr(), copy_test->parameters()[0], copy_test->parameters()[1], expected_val.data());
+        EXPECT_LT(std::abs(copy_test->value_ptr()[0] - expected_val[0]), 1e-10);
+        EXPECT_LT(std::abs(copy_test->value()[0] - expected_val[0]), 1e-10);
+
+        allowed_op_funcs::sq(10, _phi[1]->test_value_ptr(), copy_test->parameters()[0], copy_test->parameters()[1], expected_val.data());
+        EXPECT_LT(std::abs(copy_test->test_value_ptr()[0] - expected_val[0]), 1e-10);
+        EXPECT_LT(std::abs(copy_test->test_value()[0] - expected_val[0]), 1e-10);
+
+        std::stringstream postfix;
+        postfix << "1|sq: " << std::setprecision(13) << std::scientific <<copy_test->parameters()[0] << ',' << copy_test->parameters()[1];
+        EXPECT_STREQ(copy_test->unit().toString().c_str(), "s^2");
+        EXPECT_STREQ(copy_test->postfix_expr().c_str(), postfix.str().c_str());
+
+        copy_test->gradient(_gradient.data(), _dfdp.data());
+        double v1 = copy_test->feat(0)->value_ptr()[0];
+
+        double alpha = copy_test->parameters()[0];
+        double a = copy_test->parameters()[1];
+        double df_dp = 2.0 * (alpha * v1 + a);
+
+        EXPECT_EQ(_gradient[0], df_dp * v1);
+        EXPECT_EQ(_gradient[_task_sizes_train[0]], df_dp);
+    }
+
     TEST_F(SqParamNodeTest, AttributesTest)
     {
         unsigned long int feat_ind = _phi.size();
-        _exp_test = std::make_shared<SqParamNode>(_phi[1], feat_ind, 1e-50, 1e50, _optimizer);
+        _sq_test = std::make_shared<SqParamNode>(_phi[1], feat_ind, 1e-50, 1e50, _optimizer);
 
-        EXPECT_EQ(_exp_test->rung(), 1);
+        EXPECT_EQ(_sq_test->rung(), 1);
 
         std::vector<double> expected_val(90, 0.0);
 
-        allowed_op_funcs::sq(90, _phi[1]->value_ptr(), _exp_test->parameters()[0], _exp_test->parameters()[1], expected_val.data());
-        EXPECT_LT(std::abs(_exp_test->value_ptr()[0] - expected_val[0]), 1e-10);
-        EXPECT_LT(std::abs(_exp_test->value()[0] - expected_val[0]), 1e-10);
+        allowed_op_funcs::sq(90, _phi[1]->value_ptr(), _sq_test->parameters()[0], _sq_test->parameters()[1], expected_val.data());
+        EXPECT_LT(std::abs(_sq_test->value_ptr()[0] - expected_val[0]), 1e-10);
+        EXPECT_LT(std::abs(_sq_test->value()[0] - expected_val[0]), 1e-10);
 
-        allowed_op_funcs::sq(10, _phi[1]->test_value_ptr(), _exp_test->parameters()[0], _exp_test->parameters()[1], expected_val.data());
-        EXPECT_LT(std::abs(_exp_test->test_value_ptr()[0] - expected_val[0]), 1e-10);
-        EXPECT_LT(std::abs(_exp_test->test_value()[0] - expected_val[0]), 1e-10);
+        allowed_op_funcs::sq(10, _phi[1]->test_value_ptr(), _sq_test->parameters()[0], _sq_test->parameters()[1], expected_val.data());
+        EXPECT_LT(std::abs(_sq_test->test_value_ptr()[0] - expected_val[0]), 1e-10);
+        EXPECT_LT(std::abs(_sq_test->test_value()[0] - expected_val[0]), 1e-10);
 
         std::stringstream postfix;
-        postfix << "1|sq: " << std::setprecision(13) << std::scientific <<_exp_test->parameters()[0] << ',' << _exp_test->parameters()[1];
-        EXPECT_STREQ(_exp_test->unit().toString().c_str(), "s^2");
-        EXPECT_STREQ(_exp_test->postfix_expr().c_str(), postfix.str().c_str());
+        postfix << "1|sq: " << std::setprecision(13) << std::scientific <<_sq_test->parameters()[0] << ',' << _sq_test->parameters()[1];
+        EXPECT_STREQ(_sq_test->unit().toString().c_str(), "s^2");
+        EXPECT_STREQ(_sq_test->postfix_expr().c_str(), postfix.str().c_str());
     }
 }
 #endif
diff --git a/tests/googletest/feature_creation/parameterization/test_sqrt_node.cc b/tests/googletest/feature_creation/parameterization/test_sqrt_node.cc
index 6b2dc24f2665650d0e9b7fb6bc9f6faf91154393..91a13a8a33277716c3da4f4d30e9a76b091b5c8d 100644
--- a/tests/googletest/feature_creation/parameterization/test_sqrt_node.cc
+++ b/tests/googletest/feature_creation/parameterization/test_sqrt_node.cc
@@ -61,7 +61,11 @@ namespace
             _phi = {_feat_1, _feat_2};
             _a = distribution_params(generator);
             _alpha = std::pow(distribution_params(generator), 2.0);
+
             _prop = std::vector<double>(_task_sizes_train[0], 0.0);
+            _gradient.resize(_task_sizes_train[0] * 2, 1.0);
+            _dfdp.resize(_task_sizes_train[0]);
+
             allowed_op_funcs::sqrt(_task_sizes_train[0], _phi[1]->value_ptr(), _alpha, _a, _prop.data());
 
             _optimizer = nlopt_wrapper::get_optimizer("regression",_task_sizes_train, _prop, 1);
@@ -73,6 +77,8 @@ namespace
 
         std::vector<node_ptr> _phi;
         std::vector<double> _prop;
+        std::vector<double> _gradient;
+        std::vector<double> _dfdp;
         std::vector<int> _task_sizes_train;
         std::vector<int> _task_sizes_test;
 
@@ -127,6 +133,41 @@ namespace
         }
     }
 
+    TEST_F(SqrtParamNodeTest, HardCopyTest)
+    {
+        unsigned long int feat_ind = _phi.size();
+        _sqrt_test = std::make_shared<SqrtParamNode>(_phi[1], feat_ind, 1e-50, 1e50, _optimizer);
+
+        node_ptr copy_test = _sqrt_test->hard_copy();
+
+        EXPECT_EQ(copy_test->rung(), 1);
+
+        std::vector<double> expected_val(900, 0.0);
+
+        allowed_op_funcs::sqrt(900, _phi[1]->value_ptr(), copy_test->parameters()[0], copy_test->parameters()[1], expected_val.data());
+        EXPECT_LT(std::abs(copy_test->value_ptr()[0] - expected_val[0]), 1e-10);
+        EXPECT_LT(std::abs(copy_test->value()[0] - expected_val[0]), 1e-10);
+
+        allowed_op_funcs::sqrt(10, _phi[1]->test_value_ptr(), copy_test->parameters()[0], copy_test->parameters()[1], expected_val.data());
+        EXPECT_LT(std::abs(copy_test->test_value_ptr()[0] - expected_val[0]), 1e-10);
+        EXPECT_LT(std::abs(copy_test->test_value()[0] - expected_val[0]), 1e-10);
+
+        std::stringstream postfix;
+        postfix << "1|sqrt: " << std::setprecision(13) << std::scientific <<copy_test->parameters()[0] << ',' << copy_test->parameters()[1];
+        EXPECT_STREQ(copy_test->unit().toString().c_str(), "s^0.5");
+        EXPECT_STREQ(copy_test->postfix_expr().c_str(), postfix.str().c_str());
+
+        copy_test->gradient(_gradient.data(), _dfdp.data());
+        double v1 = copy_test->feat(0)->value_ptr()[0];
+
+        double alpha = copy_test->parameters()[0];
+        double a = copy_test->parameters()[1];
+        double df_dp = 0.5 * std::pow(alpha * v1 + a, -0.5);
+
+        EXPECT_EQ(_gradient[0], df_dp * v1);
+        EXPECT_EQ(_gradient[_task_sizes_train[0]], df_dp);
+    }
+
     TEST_F(SqrtParamNodeTest, AttributesTest)
     {
         unsigned long int feat_ind = _phi.size();
diff --git a/tests/googletest/feature_creation/parameterization/test_sub_node.cc b/tests/googletest/feature_creation/parameterization/test_sub_node.cc
index b7da5988ab2550bf168995587b33f7ca1b970bbb..cb936433f63ffb3baa77aa0720067c1ca02d512b 100644
--- a/tests/googletest/feature_creation/parameterization/test_sub_node.cc
+++ b/tests/googletest/feature_creation/parameterization/test_sub_node.cc
@@ -61,7 +61,11 @@ namespace
             _phi = {_feat_1, _feat_2};
             _a = distribution_params(generator);
             _alpha = distribution_params(generator);
+
             _prop = std::vector<double>(_task_sizes_train[0], 0.0);
+            _gradient.resize(_task_sizes_train[0] * 2, 1.0);
+            _dfdp.resize(_task_sizes_train[0]);
+
             allowed_op_funcs::sub(_task_sizes_train[0], _phi[0]->value_ptr(), _phi[1]->value_ptr(), _alpha, _a, _prop.data());
 
             _optimizer = nlopt_wrapper::get_optimizer("regression",_task_sizes_train, _prop, 1);
@@ -73,6 +77,8 @@ namespace
 
         std::vector<node_ptr> _phi;
         std::vector<double> _prop;
+        std::vector<double> _gradient;
+        std::vector<double> _dfdp;
         std::vector<int> _task_sizes_train;
         std::vector<int> _task_sizes_test;
 
@@ -128,6 +134,42 @@ namespace
         }
     }
 
+    TEST_F(SubParamNodeTest, HardCopyTest)
+    {
+        unsigned long int feat_ind = _phi.size();
+        _sub_test = std::make_shared<SubParamNode>(_phi[0], _phi[1], feat_ind, 1e-50, 1e50, _optimizer);
+
+        node_ptr copy_test = _sub_test->hard_copy();
+
+        EXPECT_EQ(copy_test->rung(), 1);
+
+        std::vector<double> expected_val(90, 0.0);
+
+        allowed_op_funcs::sub(90, _phi[0]->value_ptr(), _phi[1]->value_ptr(), copy_test->parameters()[0], copy_test->parameters()[1], expected_val.data());
+        EXPECT_LT(std::abs(copy_test->value_ptr()[0] - expected_val[0]), 1e-10);
+        EXPECT_LT(std::abs(copy_test->value()[0] - expected_val[0]), 1e-10);
+
+        allowed_op_funcs::sub(10, _phi[0]->test_value_ptr(), _phi[1]->test_value_ptr(), copy_test->parameters()[0], copy_test->parameters()[1], expected_val.data());
+        EXPECT_LT(std::abs(copy_test->test_value_ptr()[0] - expected_val[0]), 1e-10);
+        EXPECT_LT(std::abs(copy_test->test_value()[0] - expected_val[0]), 1e-10);
+
+        std::stringstream postfix;
+        postfix << "0|1|sub: " << std::setprecision(13) << std::scientific <<copy_test->parameters()[0] << ',' << copy_test->parameters()[1];
+        EXPECT_STREQ(copy_test->unit().toString().c_str(), "m");
+        EXPECT_STREQ(copy_test->postfix_expr().c_str(), postfix.str().c_str());
+
+        copy_test->gradient(_gradient.data(), _dfdp.data());
+        double v1 = copy_test->feat(0)->value_ptr()[0];
+        double v2 = copy_test->feat(1)->value_ptr()[0];
+
+        double alpha = copy_test->parameters()[0];
+        double a = copy_test->parameters()[1];
+        double df_dp = -1.0;
+
+        EXPECT_EQ(_gradient[0], df_dp * v2);
+        EXPECT_EQ(_gradient[_task_sizes_train[0]], df_dp);
+    }
+
     TEST_F(SubParamNodeTest, AttributesTest)
     {
         unsigned long int feat_ind = _phi.size();
diff --git a/tests/googletest/utils/test_math_utils.cc b/tests/googletest/utils/test_math_utils.cc
index 5ec9b8d0aa82cf999b0c45a73cbbbbdd9cc974be..0e7a91ce475c41dbf472bc0fc2b2f7f9535aba48 100644
--- a/tests/googletest/utils/test_math_utils.cc
+++ b/tests/googletest/utils/test_math_utils.cc
@@ -86,6 +86,34 @@ namespace {
         EXPECT_FALSE(std::isfinite(util_funcs::r(dVec2.data(), dVec2.data(), szs.data(), 2)));
 
         szs = {8, 8};
+        double mean_a = util_funcs::mean(dVec2);
+        double std_a = util_funcs::stand_dev(dVec2);
+
+        double mean_b = util_funcs::mean(dNeg2);
+        double std_b = util_funcs::stand_dev(dNeg2);
+
+        std::vector<double> mean_a_vec = {util_funcs::mean(dVec2.data(), 8), util_funcs::mean(dVec2.data() + 8, 8)};
+        std::vector<double> std_a_vec = {util_funcs::stand_dev(dVec2.data(), 8, mean_a_vec[0]), util_funcs::stand_dev(dVec2.data() + 8, 8, mean_a_vec[1])};
+
+        std::vector<double> mean_b_vec = {util_funcs::mean(dNeg2.data(), 8), util_funcs::mean(dNeg2.data() + 8, 8)};
+        std::vector<double> std_b_vec = {util_funcs::stand_dev(dNeg2.data(), 8, mean_b_vec[0]), util_funcs::stand_dev(dNeg2.data() + 8, 8, mean_b_vec[1])};
+
+        EXPECT_LT(std::abs(1.0 - util_funcs::r(dVec2.data(), dVec2.data(), 16, mean_a, std_a, mean_a, std_a)), 1.0e-10);
+        EXPECT_LT(std::abs(1.0 - util_funcs::r(dVec2.data(), dVec2.data(), szs, mean_a_vec, std_a_vec, mean_a_vec, std_a_vec)), 1.0e-10);
+        EXPECT_LT(std::abs(1.0 - util_funcs::r(dVec2.data(), dVec2.data(), szs.data(), mean_a_vec.data(), std_a_vec.data(), mean_a_vec.data(), std_a_vec.data(), 2)), 1.0e-10);
+
+        EXPECT_LT(std::abs(1.0 + util_funcs::r(dVec2.data(), dNeg2.data(), 16, mean_a, std_a, mean_b, std_b)), 1e-10);
+        EXPECT_LT(std::abs(1.0 - util_funcs::r(dVec2.data(), dNeg2.data(), szs, mean_a_vec, std_a_vec, mean_b_vec, std_b_vec)), 1e-10);
+        EXPECT_LT(std::abs(1.0 - util_funcs::r(dVec2.data(), dNeg2.data(), szs.data(), mean_a_vec.data(), std_a_vec.data(), mean_b_vec.data(), std_b_vec.data(), 2)), 1e-10);
+
+        EXPECT_LT(std::abs(1.0 - util_funcs::r(dVec2.data(), dVec2.data(), 16, mean_a, std_a)), 1.0e-10);
+        EXPECT_LT(std::abs(1.0 - util_funcs::r(dVec2.data(), dVec2.data(), szs, mean_a_vec, std_a_vec)), 1.0e-10);
+        EXPECT_LT(std::abs(1.0 - util_funcs::r(dVec2.data(), dVec2.data(), szs.data(), mean_a_vec.data(), std_a_vec.data(), 2)), 1.0e-10);
+
+        EXPECT_LT(std::abs(1.0 + util_funcs::r(dVec2.data(), dNeg2.data(), 16, mean_a, std_a)), 1e-10);
+        EXPECT_LT(std::abs(1.0 - util_funcs::r(dVec2.data(), dNeg2.data(), szs, mean_a_vec, std_a_vec)), 1e-10);
+        EXPECT_LT(std::abs(1.0 - util_funcs::r(dVec2.data(), dNeg2.data(), szs.data(), mean_a_vec.data(), std_a_vec.data(), 2)), 1e-10);
+
         EXPECT_LT(std::abs(1.0 - util_funcs::r(dVec2.data(), dVec2.data(), 16)), 1.0e-10);
         EXPECT_LT(std::abs(1.0 - util_funcs::r(dVec2.data(), dVec2.data(), szs)), 1.0e-10);
         EXPECT_LT(std::abs(1.0 - util_funcs::r(dVec2.data(), dVec2.data(), szs.data(), 2)), 1.0e-10);
@@ -111,13 +139,41 @@ namespace {
         EXPECT_FALSE(std::isfinite(util_funcs::r2(dVec2.data(), dVec2.data(), szs.data(), 2)));
 
         szs = {8, 8};
+        double mean_a = util_funcs::mean(dVec2);
+        double std_a = util_funcs::stand_dev(dVec2, mean_a);
+
+        double mean_b = util_funcs::mean(dNeg2);
+        double std_b = util_funcs::stand_dev(dNeg2, mean_b);
+
+        std::vector<double> mean_a_vec = {util_funcs::mean(dVec2.data(), 8), util_funcs::mean(dVec2.data() + 8, 8)};
+        std::vector<double> std_a_vec = {util_funcs::stand_dev(dVec2.data(), 8, mean_a_vec[0]), util_funcs::stand_dev(dVec2.data() + 8, 8, mean_a_vec[1])};
+
+        std::vector<double> mean_b_vec = {util_funcs::mean(dNeg2.data(), 8), util_funcs::mean(dNeg2.data() + 8, 8)};
+        std::vector<double> std_b_vec = {util_funcs::stand_dev(dNeg2.data(), 8, mean_b_vec[0]), util_funcs::stand_dev(dNeg2.data() + 8, 8, mean_b_vec[1])};
+
+        EXPECT_LT(std::abs(1.0 - util_funcs::r2(dVec2.data(), dVec2.data(), 16, mean_a, std_a, mean_a, std_a)), 1.0e-10);
+        EXPECT_LT(std::abs(1.0 - util_funcs::r2(dVec2.data(), dVec2.data(), szs, mean_a_vec, std_a_vec, mean_a_vec, std_a_vec)), 1.0e-10);
+        EXPECT_LT(std::abs(1.0 - util_funcs::r2(dVec2.data(), dVec2.data(), szs.data(), mean_a_vec.data(), std_a_vec.data(), mean_a_vec.data(), std_a_vec.data(), 2)), 1.0e-10);
+
+        EXPECT_LT(std::abs(1.0 - util_funcs::r2(dVec2.data(), dNeg2.data(), 16, mean_a, std_a, mean_b, std_b)), 1e-10);
+        EXPECT_LT(std::abs(1.0 - util_funcs::r2(dVec2.data(), dNeg2.data(), szs, mean_a_vec, std_a_vec, mean_b_vec, std_b_vec)), 1e-10);
+        EXPECT_LT(std::abs(1.0 - util_funcs::r2(dVec2.data(), dNeg2.data(), szs.data(), mean_a_vec.data(), std_a_vec.data(), mean_b_vec.data(), std_b_vec.data(), 2)), 1e-10);
+
+        EXPECT_LT(std::abs(1.0 - util_funcs::r2(dVec2.data(), dVec2.data(), 16, mean_a, std_a)), 1.0e-10);
+        EXPECT_LT(std::abs(1.0 - util_funcs::r2(dVec2.data(), dVec2.data(), szs, mean_a_vec, std_a_vec)), 1.0e-10);
+        EXPECT_LT(std::abs(1.0 - util_funcs::r2(dVec2.data(), dVec2.data(), szs.data(), mean_a_vec.data(), std_a_vec.data(), 2)), 1.0e-10);
+
+        EXPECT_LT(std::abs(1.0 - util_funcs::r2(dVec2.data(), dNeg2.data(), 16, mean_a, std_a)), 1e-10);
+        EXPECT_LT(std::abs(1.0 - util_funcs::r2(dVec2.data(), dNeg2.data(), szs, mean_a_vec, std_a_vec)), 1e-10);
+        EXPECT_LT(std::abs(1.0 - util_funcs::r2(dVec2.data(), dNeg2.data(), szs.data(), mean_a_vec.data(), std_a_vec.data(), 2)), 1e-10);
+
         EXPECT_LT(std::abs(1.0 - util_funcs::r2(dVec2.data(), dVec2.data(), 16)), 1.0e-10);
         EXPECT_LT(std::abs(1.0 - util_funcs::r2(dVec2.data(), dVec2.data(), szs)), 1.0e-10);
         EXPECT_LT(std::abs(1.0 - util_funcs::r2(dVec2.data(), dVec2.data(), szs.data(), 2)), 1.0e-10);
 
-        EXPECT_LT(std::abs(1.0 - util_funcs::r2(dVec2.data(), dNeg2.data(), 16)), 1.0e-10);
-        EXPECT_LT(std::abs(1.0 - util_funcs::r2(dVec2.data(), dNeg2.data(), szs)), 1.0e-10);
-        EXPECT_LT(std::abs(1.0 - util_funcs::r2(dVec2.data(), dNeg2.data(), szs.data(), 2)), 1.0e-10);
+        EXPECT_LT(std::abs(1.0 - util_funcs::r2(dVec2.data(), dNeg2.data(), 16)), 1e-10);
+        EXPECT_LT(std::abs(1.0 - util_funcs::r2(dVec2.data(), dNeg2.data(), szs)), 1e-10);
+        EXPECT_LT(std::abs(1.0 - util_funcs::r2(dVec2.data(), dNeg2.data(), szs.data(), 2)), 1e-10);
     }
 
     //test log_r^2
@@ -139,10 +195,94 @@ namespace {
         szs = {2, 2};
         std::vector<double> x = {1, 10, 1000, 10000};
         std::vector<double> y = {0, 1, 3, 4};
-        EXPECT_LT(std::abs(1.0 - util_funcs::log_r2(x.data(), y.data(), log_x.data(), 4)), 1.0);
-        EXPECT_LT(std::abs(1.0 - util_funcs::log_r2(x.data(), y.data(), log_x.data(), szs)), 1.0);
-        EXPECT_LT(std::abs(1.0 - util_funcs::log_r2(x.data(), y.data(), log_x.data(), szs.data(), 2)), 1.0);
+        EXPECT_LT(std::abs(1.0 - util_funcs::log_r2(x.data(), y.data(), log_x.data(), 4)), 1.0e-10);
+        EXPECT_LT(std::abs(1.0 - util_funcs::log_r2(x.data(), y.data(), log_x.data(), szs)), 1.0e-10);
+        EXPECT_LT(std::abs(1.0 - util_funcs::log_r2(x.data(), y.data(), log_x.data(), szs.data(), 2)), 1.0e-10);
+
+        double mean_b = util_funcs::mean(y);
+        double std_b = util_funcs::stand_dev(y, mean_b);
+
+        std::vector<double> mean_b_vec = {util_funcs::mean(y.data(), 2), util_funcs::mean(y.data() + 2, 2)};
+        std::vector<double> std_b_vec = {util_funcs::stand_dev(y.data(), 2), util_funcs::stand_dev(y.data() + 2, 2)};
 
+        EXPECT_LT(std::abs(1.0 - util_funcs::log_r2(x.data(), y.data(), log_x.data(), 4, mean_b, std_b)), 1.0e-10);
+        EXPECT_LT(std::abs(1.0 - util_funcs::log_r2(x.data(), y.data(), log_x.data(), szs, mean_b_vec, std_b_vec)), 1.0e-10);
+        EXPECT_LT(std::abs(1.0 - util_funcs::log_r2(x.data(), y.data(), log_x.data(), szs.data(), mean_b_vec.data(), std_b_vec.data(), 2)), 1.0e-10);
+    }
+
+    //test Spearman correlation
+    TEST(MathUtils, SpearmanRTest)
+    {
+        std::vector<double> dNeg2(16, 0);
+
+        std::vector<double> rank_a(16, 0);
+        std::vector<double> rank_b(16, 0);
+        std::vector<int> index(16, 0);
+
+        std::transform(dVec2.begin(), dVec2.end(), dNeg2.begin(), [](double dd){return -1.0 * dd;});
+
+        std::vector<int> szs = {2, 14};
+        EXPECT_FALSE(std::isfinite(util_funcs::r(dVec1.data(), dVec2.data(), 16)));
+        EXPECT_FALSE(std::isfinite(util_funcs::r(dVec1.data(), dVec2.data(), szs)));
+        EXPECT_FALSE(std::isfinite(util_funcs::r(dVec1.data(), dVec2.data(), szs.data(), 2)));
+
+        EXPECT_TRUE(std::isfinite(util_funcs::r(dVec2.data(), dVec2.data(), 16)));
+        EXPECT_FALSE(std::isfinite(util_funcs::r(dVec2.data(), dVec2.data(), szs)));
+        EXPECT_FALSE(std::isfinite(util_funcs::r(dVec2.data(), dVec2.data(), szs.data(), 2)));
+
+        szs = {8, 8};
+        double mean_a = util_funcs::mean(dVec2);
+        double std_a = util_funcs::stand_dev(dVec2);
+
+        double mean_b = util_funcs::mean(dNeg2);
+        double std_b = util_funcs::stand_dev(dNeg2);
+
+        std::vector<double> mean_a_vec = {util_funcs::mean(dVec2.data(), 8), util_funcs::mean(dVec2.data() + 8, 8)};
+        std::vector<double> std_a_vec = {util_funcs::stand_dev(dVec2.data(), 8, mean_a_vec[0]), util_funcs::stand_dev(dVec2.data() + 8, 8, mean_a_vec[1])};
+
+        std::vector<double> mean_b_vec = {util_funcs::mean(dNeg2.data(), 8), util_funcs::mean(dNeg2.data() + 8, 8)};
+        std::vector<double> std_b_vec = {util_funcs::stand_dev(dNeg2.data(), 8, mean_b_vec[0]), util_funcs::stand_dev(dNeg2.data() + 8, 8, mean_b_vec[1])};
+
+        EXPECT_LT(
+            std::abs(
+                1.0 - util_funcs::spearman_r(
+                    dVec2.data(),
+                    dVec2.data(),
+                    rank_a.data(),
+                    rank_b.data(),
+                    index.data(),
+                    16
+                )
+            ),
+            1.0e-10
+        );
+        EXPECT_LT(
+            std::abs(
+                1.0 - util_funcs::spearman_r(
+                    dVec2.data(),
+                    dVec2.data(),
+                    rank_a.data(),
+                    rank_b.data(),
+                    index.data(),
+                    szs
+                )
+            ),
+            1.0e-10
+        );
+        EXPECT_LT(
+            std::abs(
+                1.0 - util_funcs::spearman_r(
+                    dVec2.data(),
+                    dVec2.data(),
+                    rank_a.data(),
+                    rank_b.data(),
+                    index.data(),
+                    szs.data(),
+                    2
+                )
+            ),
+            1.0e-10
+        );
     }
 
     //test argsort
diff --git a/tests/pytest/test_descriptor_identifier/model_classifierr.m b/tests/pytest/test_descriptor_identifier/model_classifierr.m
deleted file mode 100644
index 17fd119259e2e42b97e90a2e636bf11cb9460b6c..0000000000000000000000000000000000000000
--- a/tests/pytest/test_descriptor_identifier/model_classifierr.m
+++ /dev/null
@@ -1,27 +0,0 @@
-function P = model_classifierr(X)
-% Returns the value of $Class$ = [(feat_9 - feat_8), (feat_1 * feat_0)]
-%
-% X = [
-%     feat_9,
-%     feat_8,
-%     feat_1,
-%     feat_0,
-% ]
-
-if(size(X, 2) ~= 4)
-    error("ERROR: X must have a size of 4 in the second dimension.")
-end
-feat_9 = reshape(X(:, 1), 1, []);
-feat_8 = reshape(X(:, 2), 1, []);
-feat_1 = reshape(X(:, 3), 1, []);
-feat_0 = reshape(X(:, 4), 1, []);
-
-f0 = (feat_9 - feat_8);
-f1 = (feat_1 .* feat_0);
-
-c0 = 9.0759507278e-01;
-a0 = 1.3262056497e+00;
-a1 = -1.7442399997e+00;
-
-P = reshape(c0 + a0 * f0 + a1 * f1, [], 1);
-end
diff --git a/tests/pytest/test_descriptor_identifier/test_class_model_retrain_svm.py b/tests/pytest/test_descriptor_identifier/test_class_model_retrain_svm.py
new file mode 100644
index 0000000000000000000000000000000000000000..0e8fe26965f6ad507243452694ece4c1271f7578
--- /dev/null
+++ b/tests/pytest/test_descriptor_identifier/test_class_model_retrain_svm.py
@@ -0,0 +1,52 @@
+# Copyright 2021 Thomas A. R. Purcell
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from sissopp import ModelClassifier
+from sissopp.postprocess.load_models import load_model
+from sissopp.postprocess.classification import update_model_svm
+from pathlib import Path
+
+import numpy as np
+
+
+class InvalidFeatureMade(Exception):
+    pass
+
+
+parent = Path(__file__).parent
+
+
+def test_class_model_retrain_svm():
+    model = load_model(
+        str(parent / "model_files/train_classifier.dat"),
+        str(parent / "model_files/test_classifier.dat"),
+    )
+    updated_model_np = update_model_svm(model, c=1.0, max_iter=1000000)
+    updated_model_list = ModelClassifier(
+        model,
+        [list(coefs) for coefs in updated_model_np.coefs],
+        updated_model_np.fit,
+        updated_model_np.predict,
+    )
+    assert np.all(
+        [
+            abs(c_list - c_np) < 1e-8
+            for c_list, c_np in zip(
+                updated_model_np.coefs[0], updated_model_list.coefs[0]
+            )
+        ]
+    )
+
+
+if __name__ == "__main__":
+    test_class_model_retrain_svm()
diff --git a/tests/pytest/test_descriptor_identifier/test_class_model_train_from_file.py b/tests/pytest/test_descriptor_identifier/test_class_model_train_from_file.py
new file mode 100644
index 0000000000000000000000000000000000000000..69789dae0d77152dff61581ff316fd9e3524d66c
--- /dev/null
+++ b/tests/pytest/test_descriptor_identifier/test_class_model_train_from_file.py
@@ -0,0 +1,58 @@
+# Copyright 2021 Thomas A. R. Purcell
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from sissopp.postprocess.load_models import load_model
+from pathlib import Path
+
+import numpy as np
+
+
+class InvalidFeatureMade(Exception):
+    pass
+
+
+parent = Path(__file__).parent
+
+
+def test_class_model_train_from_file():
+    model = load_model(str(parent / "model_files/train_classifier.dat"))
+
+    assert np.all(np.abs(model.fit - model.prop_train) < 1e-7)
+
+    assert np.sum(model.train_error) + 80 < 1e-7
+
+    assert model.task_sizes_train == [80]
+    assert model.task_sizes_test == [0]
+    assert model.leave_out_inds == []
+
+    assert model.feats[0].postfix_expr == "9|8|sub"
+    assert model.feats[1].postfix_expr == "1|0|mult"
+
+    actual_coefs = [
+        [1.326205649731981, -1.744239999671528, 0.9075950727790907],
+    ]
+
+    assert np.all(
+        [
+            abs(coef - actual) < 1e-8
+            for coef, actual in zip(model.coefs[0], actual_coefs[0])
+        ]
+    )
+    assert (
+        model.latex_str
+        == "[$\\left(feat_{9} - feat_{8}\\right)$, $\\left(feat_{1} feat_{0}\\right)$]"
+    )
+
+
+if __name__ == "__main__":
+    test_class_model_train_from_file()
diff --git a/tests/pytest/test_descriptor_identifier/test_log_reg_train_model_from_file.py b/tests/pytest/test_descriptor_identifier/test_log_reg_train_model_from_file.py
new file mode 100644
index 0000000000000000000000000000000000000000..fdb945401e9182dc1fb3a66a855ded363eef5ba6
--- /dev/null
+++ b/tests/pytest/test_descriptor_identifier/test_log_reg_train_model_from_file.py
@@ -0,0 +1,68 @@
+# Copyright 2021 Thomas A. R. Purcell
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from sissopp.postprocess.load_models import load_model
+from pathlib import Path
+
+import numpy as np
+
+
+class InvalidFeatureMade(Exception):
+    pass
+
+
+parent = Path(__file__).parent
+
+
+def test_log_reg_model_from_file():
+    model = load_model(
+        str(parent / "model_files/train_log_regressor.dat"),
+    )
+
+    assert np.all(np.abs(model.fit - model.prop_train) < 1e-7)
+    assert np.all(np.abs(model.train_error) < 1e-7)
+
+    assert model.task_sizes_train == [95]
+    assert model.task_sizes_test == [0]
+    assert model.leave_out_inds == []
+
+    assert model.feats[0].postfix_expr == "1|0|add"
+    assert model.feats[1].postfix_expr == "3|1|abd"
+
+    actual_coefs = [
+        [1.20, -1.95, 2.194569927587456e-13],
+    ]
+
+    assert np.all(
+        [
+            abs(coef - actual) < 1e-8
+            for coef, actual in zip(model.coefs[0], actual_coefs[0])
+        ]
+    )
+
+    assert model.rmse < 1e-7
+    assert model.max_ae < 1e-7
+    assert model.mae < 1e-7
+    assert model.mape < 1e-7
+    assert model.percentile_25_ae < 1e-7
+    assert model.percentile_50_ae < 1e-7
+    assert model.percentile_75_ae < 1e-7
+    assert model.percentile_95_ae < 1e-7
+    assert (
+        model.latex_str
+        == "$\\exp\\left(c_0\\right)\\left(\\left(B + A\\right)\\right)^{a_0}\\left(\\left(\\left|D - B\\right|\\right)\\right)^{a_1}$"
+    )
+
+
+if __name__ == "__main__":
+    test_log_reg_model_from_file()
diff --git a/tests/pytest/test_descriptor_identifier/test_reg_model_train_from_file.py b/tests/pytest/test_descriptor_identifier/test_reg_model_train_from_file.py
new file mode 100644
index 0000000000000000000000000000000000000000..64fd05fdfbe70b8a900772b74ab62d6f135410c3
--- /dev/null
+++ b/tests/pytest/test_descriptor_identifier/test_reg_model_train_from_file.py
@@ -0,0 +1,76 @@
+# Copyright 2021 Thomas A. R. Purcell
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from sissopp.postprocess.load_models import load_model
+from pathlib import Path
+
+import numpy as np
+
+
+class InvalidFeatureMade(Exception):
+    pass
+
+
+parent = Path(__file__).parent
+
+
+def test_reg_model_from_file():
+    model = load_model(
+        str(parent / "model_files/train_regressor.dat"),
+    )
+
+    assert np.all(np.abs(model.fit - model.prop_train) < 1e-7)
+    assert np.all(np.abs(model.train_error) < 1e-7)
+
+    assert model.task_sizes_train == [57, 38]
+    assert model.task_sizes_test == [0, 0]
+    assert model.leave_out_inds == []
+
+    assert model.feats[0].postfix_expr == "1|0|div|0|div"
+    assert model.feats[1].postfix_expr == "3|2|add|3|abs|add"
+
+    actual_coefs = [
+        [-7.215478485316414, 82.2718000000000, 3.141589999617774],
+        [99.14521320736678, -142.365900000000, -5.254860000134174],
+    ]
+
+    assert np.all(
+        [
+            abs(coef - actual) < 1e-8
+            for coef, actual in zip(model.coefs[0], actual_coefs[0])
+        ]
+    )
+    assert np.all(
+        [
+            abs(coef - actual) < 1e-8
+            for coef, actual in zip(model.coefs[1], actual_coefs[1])
+        ]
+    )
+    print(model.r2)
+    assert model.r2 > 0.999
+    assert model.rmse < 1e-7
+    assert model.max_ae < 1e-7
+    assert model.mae < 1e-7
+    assert model.mape < 1e-7
+    assert model.percentile_25_ae < 1e-7
+    assert model.percentile_50_ae < 1e-7
+    assert model.percentile_75_ae < 1e-7
+    assert model.percentile_95_ae < 1e-7
+    assert (
+        model.latex_str
+        == "$c_0 + a_0\\left(\\frac{ \\left(\\frac{ B }{ A } \\right) }{ A } \\right) + a_1\\left(\\left(D + C\\right) + \\left(\\left|D\\right|\\right)\\right)$"
+    )
+
+
+if __name__ == "__main__":
+    test_reg_model_from_file()
diff --git a/tests/pytest/test_feature_creation/test_feat_generation/test_abs_diff_node.py b/tests/pytest/test_feature_creation/test_feat_generation/test_abs_diff_node.py
index 5a57f62d58acc3e7086acd68c112f62f840ca7a7..f06158a16dfaac3c5c3e2825bf82f80ab06fa83d 100644
--- a/tests/pytest/test_feature_creation/test_feat_generation/test_abs_diff_node.py
+++ b/tests/pytest/test_feature_creation/test_feat_generation/test_abs_diff_node.py
@@ -96,6 +96,19 @@ def test_abs_diff_node():
         pass
 
     decomp = feats[-1].primary_feat_decomp
+    assert len(decomp.keys()) == 2
+    assert decomp["t_a"] == 1
+    assert decomp["t_b"] == 1
+
+    assert feats[-1].n_leaves == 2
+    assert feats[-1].n_feats == 2
+    assert feats[-1].feat(0).expr == "t_b"
+    try:
+        feats[-1].feat(2)
+        raise ValueError("Accessing feature that should throw an error")
+    except:
+        pass
+    assert feats[-1].matlab_fxn_expr == "abs(t_b - t_a)"
 
 
 if __name__ == "__main__":
diff --git a/tests/pytest/test_feature_creation/test_feat_generation/test_abs_node.py b/tests/pytest/test_feature_creation/test_feat_generation/test_abs_node.py
index 925938a4a8b26746235bab3b1cbe34e182d29ebc..47ac337c70a8a9538543d36edb5c163d1c272846 100644
--- a/tests/pytest/test_feature_creation/test_feat_generation/test_abs_node.py
+++ b/tests/pytest/test_feature_creation/test_feat_generation/test_abs_node.py
@@ -64,21 +64,35 @@ def test_abs_node():
     except RuntimeError:
         pass
 
-    feats.append(AbsNode(feat_3, 6, 1e-50, 1e50))
     feats.append(AbsDiffNode(feat_2, feat_1, 4, 1e-50, 1e50))
+    feats.append(AbsNode(feat_3, 6, 1e-50, 1e50))
 
     try:
-        feats.append(AbsNode(feats[0], 5, 1e-50, 1e50))
+        feats.append(AbsNode(feats[1], 5, 1e-50, 1e50))
         raise InvalidFeatureMade("Taking the absolute value of an absolute value")
     except RuntimeError:
         pass
 
     try:
-        feats.append(AbsNode(feats[1], 5, 1e-50, 1e50))
+        feats.append(AbsNode(feats[0], 5, 1e-50, 1e50))
         raise InvalidFeatureMade("Taking the absolute value of an absolute difference")
     except RuntimeError:
         pass
 
+    decomp = feats[-1].primary_feat_decomp
+    assert len(decomp.keys()) == 1
+    assert decomp["v_a"] == 1
+
+    assert feats[-1].n_leaves == 1
+    assert feats[-1].n_feats == 1
+    assert feats[-1].feat(0).expr == "v_a"
+    try:
+        feats[-1].feat(2)
+        raise ValueError("Accessing feature that should throw an error")
+    except:
+        pass
+    assert feats[-1].matlab_fxn_expr == "abs(v_a)"
+
 
 if __name__ == "__main__":
     test_abs_node()
diff --git a/tests/pytest/test_feature_creation/test_feat_generation/test_add_node.py b/tests/pytest/test_feature_creation/test_feat_generation/test_add_node.py
index de115c458f77f3c912d6e363572eb4189cc38a99..a56cd2f13ff8e924e06be2df9c6cc7b333fe8aa2 100644
--- a/tests/pytest/test_feature_creation/test_feat_generation/test_add_node.py
+++ b/tests/pytest/test_feature_creation/test_feat_generation/test_add_node.py
@@ -67,6 +67,7 @@ def test_add_node():
 
     feats.append(AddNode(feat_1, feat_3, 4, 1e-50, 1e50))
     feats.append(SubNode(feat_1, feat_3, 5, 1e-50, 1e50))
+    feats.append(AddNode(feats[0], feat_3, 6, 1e-50, 1e50))
 
     try:
         feats.append(AddNode(feats[0], feats[1], 6, 1e-50, 1e50))
@@ -86,6 +87,21 @@ def test_add_node():
     except RuntimeError:
         pass
 
+    decomp = feats[-1].primary_feat_decomp
+    assert len(decomp.keys()) == 2
+    assert decomp["t_a"] == 1
+    assert decomp["t_b"] == 2
+
+    assert feats[-1].n_leaves == 3
+    assert feats[-1].n_feats == 2
+    assert feats[-1].feat(1).expr == "t_b"
+    try:
+        feats[-1].feat(2)
+        raise ValueError("Accessing feature that should throw an error")
+    except:
+        pass
+    assert feats[0].matlab_fxn_expr == "(t_a + t_b)"
+
 
 if __name__ == "__main__":
     test_add_node()
diff --git a/tests/pytest/test_feature_creation/test_feat_generation/test_cb_node.py b/tests/pytest/test_feature_creation/test_feat_generation/test_cb_node.py
index 81863cf910069cf43b281f303bb34b348a589809..84fc8d783266728bfe66e6624e2986ddd07c0780 100644
--- a/tests/pytest/test_feature_creation/test_feat_generation/test_cb_node.py
+++ b/tests/pytest/test_feature_creation/test_feat_generation/test_cb_node.py
@@ -63,6 +63,7 @@ def test_cube_node():
     feats.append(InvNode(feat_1, 5, 1e-50, 1e50))
     feats.append(SqNode(feat_2, 6, 1e-50, 1e50))
     feats.append(CbrtNode(feat_1, 7, 1e-50, 1e50))
+    feats.append(CbNode(feat_1, 8, 1e-50, 1e50))
 
     try:
         feats.append(CbNode(feats[0], 13, 1e-50, 1e50))
@@ -82,6 +83,20 @@ def test_cube_node():
     except RuntimeError:
         pass
 
+    decomp = feats[-1].primary_feat_decomp
+    assert len(decomp.keys()) == 1
+    assert decomp["t_a"] == 1
+
+    assert feats[-1].n_leaves == 1
+    assert feats[-1].n_feats == 1
+    assert feats[-1].feat(0).expr == "t_a"
+    try:
+        feats[-1].feat(2)
+        raise ValueError("Accessing feature that should throw an error")
+    except:
+        pass
+    assert feats[-1].matlab_fxn_expr == "(t_a).^3"
+
 
 if __name__ == "__main__":
     test_cube_node()
diff --git a/tests/pytest/test_feature_creation/test_feat_generation/test_cbrt_node.py b/tests/pytest/test_feature_creation/test_feat_generation/test_cbrt_node.py
index 0c0b2e2ff39eb5d18c7d02334b20481fddf13f86..e404bf20837213ae5c997f68294f3c1fddc778a8 100644
--- a/tests/pytest/test_feature_creation/test_feat_generation/test_cbrt_node.py
+++ b/tests/pytest/test_feature_creation/test_feat_generation/test_cbrt_node.py
@@ -66,6 +66,7 @@ def test_cbrt_node():
     feats.append(SqNode(feat_2, 6, 1e-50, 1e50))
     feats.append(CbNode(feat_1, 7, 1e-50, 1e50))
     feats.append(SixPowNode(feat_2, 8, 1e-50, 1e50))
+    feats.append(CbrtNode(feat_1, 9, 1e-50, 1e50))
 
     try:
         feats.append(CbrtNode(feats[0], 13, 1e-50, 1e50))
@@ -91,6 +92,20 @@ def test_cbrt_node():
     except RuntimeError:
         pass
 
+    decomp = feats[-1].primary_feat_decomp
+    assert len(decomp.keys()) == 1
+    assert decomp["t_a"] == 1
+
+    assert feats[-1].n_leaves == 1
+    assert feats[-1].n_feats == 1
+    assert feats[-1].feat(0).expr == "t_a"
+    try:
+        feats[-1].feat(2)
+        raise ValueError("Accessing feature that should throw an error")
+    except:
+        pass
+    assert feats[-1].matlab_fxn_expr == "nthroot(t_a, 3)"
+
 
 if __name__ == "__main__":
     test_cbrt_node()
diff --git a/tests/pytest/test_feature_creation/test_feat_generation/test_cos_node.py b/tests/pytest/test_feature_creation/test_feat_generation/test_cos_node.py
index da510ead267f0ed2c6681c49c188609ca10d8209..e0ac340aeb794a3cfe87a183ce107f46e2fbb080 100644
--- a/tests/pytest/test_feature_creation/test_feat_generation/test_cos_node.py
+++ b/tests/pytest/test_feature_creation/test_feat_generation/test_cos_node.py
@@ -80,6 +80,20 @@ def test_cos_node():
     except RuntimeError:
         pass
 
+    decomp = feats[-1].primary_feat_decomp
+    assert len(decomp.keys()) == 1
+    assert decomp["x_a"] == 1
+
+    assert feats[-1].n_leaves == 1
+    assert feats[-1].n_feats == 1
+    assert feats[-1].feat(0).expr == "x_a"
+    try:
+        feats[-1].feat(2)
+        raise ValueError("Accessing feature that should throw an error")
+    except:
+        pass
+    assert feats[-1].matlab_fxn_expr == "cos(x_a)"
+
 
 if __name__ == "__main__":
     test_cos_node()
diff --git a/tests/pytest/test_feature_creation/test_feat_generation/test_div_node.py b/tests/pytest/test_feature_creation/test_feat_generation/test_div_node.py
index 07f10a89cccc713fc1c958c31122a186e98142a8..3a338a52244c00024ca89914debe403c70fc20d3 100644
--- a/tests/pytest/test_feature_creation/test_feat_generation/test_div_node.py
+++ b/tests/pytest/test_feature_creation/test_feat_generation/test_div_node.py
@@ -109,6 +109,22 @@ def test_div_node():
 
     feats.append(DivNode(feat_3, feat_5, 9, 1e-50, 1e50))
 
+    decomp = feats[-1].primary_feat_decomp
+    assert len(decomp.keys()) == 2
+    assert decomp["t_b"] == 1
+    assert decomp["x_b"] == 1
+
+    assert feats[-1].n_leaves == 2
+    assert feats[-1].n_feats == 2
+    assert feats[-1].feat(0).expr == "t_b"
+    try:
+        feats[-1].feat(2)
+        raise ValueError("Accessing feature that should throw an error")
+    except:
+        pass
+
+    assert feats[-1].matlab_fxn_expr == "(t_b ./ x_b)"
+
 
 if __name__ == "__main__":
     test_div_node()
diff --git a/tests/pytest/test_feature_creation/test_feat_generation/test_exp_node.py b/tests/pytest/test_feature_creation/test_feat_generation/test_exp_node.py
index 670687bffdc5c7d07b6dcceb2a3c565a6459beb6..070dbdcadb1b79442f771d69b86af0bed54756b7 100644
--- a/tests/pytest/test_feature_creation/test_feat_generation/test_exp_node.py
+++ b/tests/pytest/test_feature_creation/test_feat_generation/test_exp_node.py
@@ -102,6 +102,20 @@ def test_exp_node():
     except RuntimeError:
         pass
 
+    decomp = feats[0].primary_feat_decomp
+    assert len(decomp.keys()) == 1
+    assert decomp["x_a"] == 1
+
+    assert feats[0].n_leaves == 1
+    assert feats[0].n_feats == 1
+    assert feats[0].feat(0).expr == "x_a"
+    try:
+        feats[0].feat(2)
+        raise ValueError("Accessing feature that should throw an error")
+    except:
+        pass
+    assert feats[0].matlab_fxn_expr == "exp(x_a)"
+
 
 if __name__ == "__main__":
     test_exp_node()
diff --git a/tests/pytest/test_feature_creation/test_feat_generation/test_feat_node.py b/tests/pytest/test_feature_creation/test_feat_generation/test_feat_node.py
new file mode 100644
index 0000000000000000000000000000000000000000..e214930fc4f36596279ee9dca943872670db6f0c
--- /dev/null
+++ b/tests/pytest/test_feature_creation/test_feat_generation/test_feat_node.py
@@ -0,0 +1,68 @@
+# Copyright 2021 Thomas A. R. Purcell
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from sissopp import (
+    FeatureNode,
+    AddNode,
+    SubNode,
+    AbsDiffNode,
+    Unit,
+    initialize_values_arr,
+)
+
+import numpy as np
+
+
+class InvalidFeatureMade(Exception):
+    pass
+
+
+def test_feat_node():
+    task_sizes_train = [10]
+    task_sizes_test = [10]
+
+    initialize_values_arr(task_sizes_train, task_sizes_test, 1, 0)
+
+    data_1 = np.random.random(task_sizes_train[0]) * 1e10 + 1e-10
+    test_data_1 = np.random.random(task_sizes_test[0]) * 1e10 + 1e-10
+
+    data_2 = np.random.random(task_sizes_train[0] + 1) * 2e4 - 1e4
+    test_data_2 = np.random.random(task_sizes_test[0] + 1) * 2e4 - 1e4
+
+    feat_1 = FeatureNode(0, "t_a", data_1, test_data_1, Unit("s"))
+    feat_2 = FeatureNode(1, "t_a", data_1, test_data_1, Unit("s"))
+
+    assert feat_1.n_leaves == 1
+
+    try:
+        feat_3 = FeatureNode(1, "t_b", data_2, test_data_2, Unit("s"))
+        raise InvalidFeatureMade("FeatureNode created with wrong number of samples.")
+    except RuntimeError:
+        pass
+
+    decomp = feat_1.primary_feat_decomp
+    assert len(decomp.keys()) == 1
+    assert decomp["t_a"] == 1
+
+    assert feat_1.n_leaves == 1
+
+    try:
+        feat_1.feat(2)
+        raise ValueError("Accessing feature that should throw an error")
+    except:
+        pass
+    assert feat_1.matlab_fxn_expr == "t_a"
+
+
+if __name__ == "__main__":
+    test_feat_node()
diff --git a/tests/pytest/test_feature_creation/test_feat_generation/test_inv_node.py b/tests/pytest/test_feature_creation/test_feat_generation/test_inv_node.py
index 01ae9dc5d3044c6d09ed5b2b3373b51b5b6fc39a..646bb223b576212e5de36a7e3421fedd9dbd912d 100644
--- a/tests/pytest/test_feature_creation/test_feat_generation/test_inv_node.py
+++ b/tests/pytest/test_feature_creation/test_feat_generation/test_inv_node.py
@@ -89,6 +89,21 @@ def test_inv_node():
     except RuntimeError:
         pass
 
+    decomp = feats[0].primary_feat_decomp
+    assert len(decomp.keys()) == 1
+    assert decomp["x_b"] == 1
+
+    assert feats[0].n_leaves == 1
+    assert feats[0].n_feats == 1
+    assert feats[0].feat(0).expr == "x_b"
+    try:
+        feats[0].feat(2)
+        raise ValueError("Accessing feature that should throw an error")
+    except:
+        pass
+    print(feats[0].matlab_fxn_expr)
+    assert feats[0].matlab_fxn_expr == "(x_b).^(-1)"
+
 
 if __name__ == "__main__":
     test_inv_node()
diff --git a/tests/pytest/test_feature_creation/test_feat_generation/test_log_node.py b/tests/pytest/test_feature_creation/test_feat_generation/test_log_node.py
index f995a41116de109832c6eb03092142de47d74dac..4b484fd8d697eda1b6bf97fd0f8b19a54cbc5fd9 100644
--- a/tests/pytest/test_feature_creation/test_feat_generation/test_log_node.py
+++ b/tests/pytest/test_feature_creation/test_feat_generation/test_log_node.py
@@ -84,7 +84,7 @@ def test_log_node():
     except RuntimeError:
         pass
 
-    feats.append(ExpNode(feat_2, 3, 1e-50, 1e50))
+    feats.append(ExpNode(feat_2, 13, 1e-50, 1e50))
     feats.append(NegExpNode(feat_2, 4, 1e-50, 1e50))
     feats.append(LogNode(feat_1, 5, 1e-50, 1e50))
     feats.append(MultNode(feat_1, feats[0], 6, 1e-50, 1e50))
@@ -162,6 +162,20 @@ def test_log_node():
     except RuntimeError:
         pass
 
+    decomp = feats[2].primary_feat_decomp
+    assert len(decomp.keys()) == 1
+    assert decomp["t_a"] == 1
+
+    assert feats[2].n_leaves == 1
+    assert feats[2].n_feats == 1
+    assert feats[2].feat(0).expr == "t_a"
+    try:
+        feats[2].feat(2)
+        raise ValueError("Accessing feature that should throw an error")
+    except:
+        pass
+    assert feats[2].matlab_fxn_expr == "log(t_a)"
+
 
 if __name__ == "__main__":
     test_log_node()
diff --git a/tests/pytest/test_feature_creation/test_feat_generation/test_model_node.py b/tests/pytest/test_feature_creation/test_feat_generation/test_model_node.py
new file mode 100644
index 0000000000000000000000000000000000000000..4070244a4333cfc80298f8285e6c11ff226395d6
--- /dev/null
+++ b/tests/pytest/test_feature_creation/test_feat_generation/test_model_node.py
@@ -0,0 +1,65 @@
+# Copyright 2021 Thomas A. R. Purcell
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from sissopp import (
+    FeatureNode,
+    ModelNode,
+    AddNode,
+    Unit,
+    initialize_values_arr,
+)
+
+import numpy as np
+
+
+class InvalidFeatureMade(Exception):
+    pass
+
+
+def test_model_node():
+    task_sizes_train = [10]
+    task_sizes_test = [10]
+
+    initialize_values_arr(task_sizes_train, task_sizes_test, 1, 1)
+
+    data_1 = np.random.random(task_sizes_train[0]) * 1e10 + 1e-10
+    test_data_1 = np.random.random(task_sizes_test[0]) * 1e10 + 1e-10
+
+    data_2 = np.random.random(task_sizes_train[0]) * 1e10 + 1e-10
+    test_data_2 = np.random.random(task_sizes_test[0]) * 1e10 + 1e-10
+
+    feat_1 = FeatureNode(0, "t_a", data_1, test_data_1, Unit("s"))
+    feat_2 = FeatureNode(1, "t_b", data_2, test_data_2, Unit("s"))
+    feat_3 = AddNode(feat_1, feat_2, 2, 1e-50, 1e50)
+    model_node = ModelNode(feat_3)
+
+    assert model_node.n_leaves == 2
+
+    decomp = model_node.primary_feat_decomp
+    assert len(decomp.keys()) == 2
+    assert decomp["t_a"] == 1
+    assert decomp["t_b"] == 1
+    assert model_node.x_in_expr_list[0] == "t_a"
+    assert model_node.x_in_expr_list[1] == "t_b"
+
+    assert model_node.n_leaves == 2
+    try:
+        model_node.feat(2)
+        raise ValueError("Accessing feature that should throw an error")
+    except:
+        pass
+    assert model_node.matlab_fxn_expr == "(t_a + t_b)"
+
+
+if __name__ == "__main__":
+    test_model_node()
diff --git a/tests/pytest/test_feature_creation/test_feat_generation/test_mult_node.py b/tests/pytest/test_feature_creation/test_feat_generation/test_mult_node.py
index 8c7a9b2937cdf1ac4312b1d0fc6da32e33e5af81..cc11c78043c0df134f4dd9d3c56a9fc25f42883f 100644
--- a/tests/pytest/test_feature_creation/test_feat_generation/test_mult_node.py
+++ b/tests/pytest/test_feature_creation/test_feat_generation/test_mult_node.py
@@ -106,6 +106,22 @@ def test_mult_node():
     except RuntimeError:
         pass
 
+    decomp = feats[0].primary_feat_decomp
+    assert len(decomp.keys()) == 2
+    assert decomp["t_a"] == 1
+    assert decomp["x_a"] == 1
+
+    assert feats[0].n_leaves == 2
+    assert feats[0].n_feats == 2
+    assert feats[0].feat(0).expr == "t_a"
+    try:
+        feats[0].feat(2)
+        raise ValueError("Accessing feature that should throw an error")
+    except:
+        pass
+
+    assert feats[0].matlab_fxn_expr == "(t_a .* x_a)"
+
 
 if __name__ == "__main__":
     test_mult_node()
diff --git a/tests/pytest/test_feature_creation/test_feat_generation/test_neg_exp_node.py b/tests/pytest/test_feature_creation/test_feat_generation/test_neg_exp_node.py
index 2af614882498399f85612db44a9b0d05e8d8b2fd..faa761a5490326ea47ea5668c9aa19a37ea93d84 100644
--- a/tests/pytest/test_feature_creation/test_feat_generation/test_neg_exp_node.py
+++ b/tests/pytest/test_feature_creation/test_feat_generation/test_neg_exp_node.py
@@ -70,7 +70,7 @@ def test_neg_exp_node():
     except RuntimeError:
         pass
 
-    feats.append(ExpNode(feat_2, 3, 1e-50, 1e50))
+    feats.append(NegExpNode(feat_2, 3, 1e-50, 1e50))
     feats.append(ExpNode(feat_2, 4, 1e-50, 1e50))
     feats.append(LogNode(feat_1, 5, 1e-50, 1e50))
     feats.append(AddNode(feat_1, feat_2, 6, 1e-50, 1e50))
@@ -78,13 +78,13 @@ def test_neg_exp_node():
 
     try:
         feats.append(NegExpNode(feats[0], 8, 1e-50, 1e50))
-        raise InvalidFeatureMade("Negative exponentiating an ExpNode")
+        raise InvalidFeatureMade("Negative exponentiating a NegExpNode")
     except RuntimeError:
         pass
 
     try:
         feats.append(NegExpNode(feats[1], 8, 1e-50, 1e50))
-        raise InvalidFeatureMade("Negative exponentiating a NegExpNode")
+        raise InvalidFeatureMade("Negative exponentiating an ExpNode")
     except RuntimeError:
         pass
 
@@ -106,6 +106,21 @@ def test_neg_exp_node():
     except RuntimeError:
         pass
 
+    decomp = feats[0].primary_feat_decomp
+    assert len(decomp.keys()) == 1
+    assert decomp["x_a"] == 1
+
+    assert feats[0].n_leaves == 1
+    assert feats[0].n_feats == 1
+    assert feats[0].feat(0).expr == "x_a"
+    try:
+        feats[0].feat(2)
+        raise ValueError("Accessing feature that should throw an error")
+    except:
+        pass
+    print(feats[0].matlab_fxn_expr)
+    assert feats[0].matlab_fxn_expr == "exp(-1.0 * (x_a))"
+
 
 if __name__ == "__main__":
     test_neg_exp_node()
diff --git a/tests/pytest/test_feature_creation/test_feat_generation/test_sin_node.py b/tests/pytest/test_feature_creation/test_feat_generation/test_sin_node.py
index 9cef2b48ffcbae0f63e8812deba9b45c35c7ff4d..017ba8947254b1799bfd382c0744e75884d7c695 100644
--- a/tests/pytest/test_feature_creation/test_feat_generation/test_sin_node.py
+++ b/tests/pytest/test_feature_creation/test_feat_generation/test_sin_node.py
@@ -83,6 +83,20 @@ def test_sin_node():
     except RuntimeError:
         pass
 
+    decomp = feats[0].primary_feat_decomp
+    assert len(decomp.keys()) == 1
+    assert decomp["x_a"] == 1
+
+    assert feats[0].n_leaves == 1
+    assert feats[0].n_feats == 1
+    assert feats[0].feat(0).expr == "x_a"
+    try:
+        feats[0].feat(2)
+        raise ValueError("Accessing feature that should throw an error")
+    except:
+        pass
+    assert feats[0].matlab_fxn_expr == "sin(x_a)"
+
 
 if __name__ == "__main__":
     test_sin_node()
diff --git a/tests/pytest/test_feature_creation/test_feat_generation/test_six_pow_node.py b/tests/pytest/test_feature_creation/test_feat_generation/test_six_pow_node.py
index 203022a2f73df921a9ba83ece0663c0a402aac1d..e2baacf405dc6d8ac4fbdd7775cd03b4e5bf128a 100644
--- a/tests/pytest/test_feature_creation/test_feat_generation/test_six_pow_node.py
+++ b/tests/pytest/test_feature_creation/test_feat_generation/test_six_pow_node.py
@@ -46,7 +46,7 @@ def test_six_pow_node():
 
     feats = []
     try:
-        feats.append(SixPowNode(feat_1, 3, 1e-50, 1e-10))
+        feats.append(SixPowNode(feat_1, 2, 1e-50, 1e-10))
         raise InvalidFeatureMade(
             "Taking the sixth power of the feature leads to values outside of user specified bounds"
         )
@@ -54,7 +54,7 @@ def test_six_pow_node():
         pass
 
     try:
-        feats.append(SixPowNode(feat_1, 3, 1e7, 1e50))
+        feats.append(SixPowNode(feat_1, 2, 1e7, 1e50))
         raise InvalidFeatureMade(
             "Taking the sixth power of the feature leads to values outside of user specified bounds"
         )
@@ -74,6 +74,7 @@ def test_six_pow_node():
     feats.append(SqNode(feat_1, 6, 1e-50, 1e50))
     feats.append(CbNode(feat_1, 7, 1e-50, 1e50))
     feats.append(SqrtNode(feat_1, 8, 1e-50, 1e50))
+    feats.append(SixPowNode(feat_1, 3, 1e-50, 1e50))
 
     try:
         feats.append(SixPowNode(feats[0], 13, 1e-50, 1e50))
@@ -105,6 +106,20 @@ def test_six_pow_node():
     except RuntimeError:
         pass
 
+    decomp = feats[-1].primary_feat_decomp
+    assert len(decomp.keys()) == 1
+    assert decomp["t_a"] == 1
+
+    assert feats[-1].n_leaves == 1
+    assert feats[-1].n_feats == 1
+    assert feats[-1].feat(0).expr == "t_a"
+    try:
+        feats[-1].feat(2)
+        raise ValueError("Accessing feature that should throw an error")
+    except:
+        pass
+    assert feats[-1].matlab_fxn_expr == "(t_a).^6"
+
 
 if __name__ == "__main__":
     test_six_pow_node()
diff --git a/tests/pytest/test_feature_creation/test_feat_generation/test_sq_node.py b/tests/pytest/test_feature_creation/test_feat_generation/test_sq_node.py
index 3c1fcf245505e46fd82e7d62dc2cd09ad484228f..434380402f24551daf32f8172d1d72e6c2d245f4 100644
--- a/tests/pytest/test_feature_creation/test_feat_generation/test_sq_node.py
+++ b/tests/pytest/test_feature_creation/test_feat_generation/test_sq_node.py
@@ -43,7 +43,7 @@ def test_square_node():
 
     feats = []
     try:
-        feats.append(SqNode(feat_1, 3, 1e-50, 1e-10))
+        feats.append(SqNode(feat_1, 2, 1e-50, 1e-10))
         raise InvalidFeatureMade(
             "Taking the cube of the feature leads to values outside of user specified bounds"
         )
@@ -51,7 +51,7 @@ def test_square_node():
         pass
 
     try:
-        feats.append(SqNode(feat_1, 3, 1e8, 1e50))
+        feats.append(SqNode(feat_1, 2, 1e8, 1e50))
         raise InvalidFeatureMade(
             "Taking the cube of the feature leads to values outside of user specified bounds"
         )
@@ -68,6 +68,7 @@ def test_square_node():
 
     feats.append(InvNode(feat_1, 5, 1e-50, 1e50))
     feats.append(SqrtNode(feat_1, 6, 1e-50, 1e50))
+    feats.append(SqNode(feat_1, 4, 1e-50, 1e50))
 
     try:
         feats.append(SqNode(feats[0], 13, 1e-50, 1e50))
@@ -81,6 +82,20 @@ def test_square_node():
     except RuntimeError:
         pass
 
+    decomp = feats[-1].primary_feat_decomp
+    assert len(decomp.keys()) == 1
+    assert decomp["t_a"] == 1
+
+    assert feats[-1].n_leaves == 1
+    assert feats[-1].n_feats == 1
+    assert feats[-1].feat(0).expr == "t_a"
+    try:
+        feats[-1].feat(2)
+        raise ValueError("Accessing feature that should throw an error")
+    except:
+        pass
+    assert feats[-1].matlab_fxn_expr == "(t_a).^2"
+
 
 if __name__ == "__main__":
     test_square_node()
diff --git a/tests/pytest/test_feature_creation/test_feat_generation/test_sqrt_node.py b/tests/pytest/test_feature_creation/test_feat_generation/test_sqrt_node.py
index 0b225cd01d80f752f08a7ec5c4c66cf5792ae5a0..9d302fc113525a0857793222ccababef5d208918 100644
--- a/tests/pytest/test_feature_creation/test_feat_generation/test_sqrt_node.py
+++ b/tests/pytest/test_feature_creation/test_feat_generation/test_sqrt_node.py
@@ -46,7 +46,7 @@ def test_sqrt_node():
 
     feats = []
     try:
-        feats.append(SqrtNode(feat_1, 3, 1e-50, 1e-10))
+        feats.append(SqrtNode(feat_1, 2, 1e-50, 1e-10))
         raise InvalidFeatureMade(
             "Taking the sqrt of the feature leads to values outside of user specified bounds"
         )
@@ -54,7 +54,7 @@ def test_sqrt_node():
         pass
 
     try:
-        feats.append(SqrtNode(feat_1, 3, 1e4, 1e50))
+        feats.append(SqrtNode(feat_1, 2, 1e4, 1e50))
         raise InvalidFeatureMade(
             "Taking the sqrt of the feature leads to values outside of user specified bounds"
         )
@@ -62,7 +62,7 @@ def test_sqrt_node():
         pass
 
     try:
-        feats.append(SqrtNode(feat_2, 3, 1e-50, 1e50))
+        feats.append(SqrtNode(feat_2, 2, 1e-50, 1e50))
         raise InvalidFeatureMade(
             "Taking the sqrt of a feature whose domain contains values < 0"
         )
@@ -74,6 +74,7 @@ def test_sqrt_node():
     feats.append(SqNode(feat_2, 6, 1e-50, 1e50))
     feats.append(CbNode(feat_1, 7, 1e-50, 1e50))
     feats.append(SixPowNode(feat_2, 8, 1e-50, 1e50))
+    feats.append(SqrtNode(feat_1, 3, 1e-50, 1e50))
 
     try:
         feats.append(SqrtNode(feats[0], 13, 1e-50, 1e50))
@@ -105,6 +106,20 @@ def test_sqrt_node():
     except RuntimeError:
         pass
 
+    decomp = feats[0].primary_feat_decomp
+    assert len(decomp.keys()) == 1
+    assert decomp["t_a"] == 1
+
+    assert feats[0].n_leaves == 1
+    assert feats[0].n_feats == 1
+    assert feats[0].feat(0).expr == "t_a"
+    try:
+        feats[0].feat(2)
+        raise ValueError("Accessing feature that should throw an error")
+    except:
+        pass
+    assert feats[-1].matlab_fxn_expr == "sqrt(t_a)"
+
 
 if __name__ == "__main__":
     test_sqrt_node()
diff --git a/tests/pytest/test_feature_creation/test_feat_generation/test_sub_node.py b/tests/pytest/test_feature_creation/test_feat_generation/test_sub_node.py
index 46ed37ff561f85d52f950570d74b65b6b0f62454..d76bd8c9ed77786a168ecdd90aa0be4aed5d77c7 100644
--- a/tests/pytest/test_feature_creation/test_feat_generation/test_sub_node.py
+++ b/tests/pytest/test_feature_creation/test_feat_generation/test_sub_node.py
@@ -86,6 +86,21 @@ def test_sub_node():
     except RuntimeError:
         pass
 
+    decomp = feats[1].primary_feat_decomp
+    assert len(decomp.keys()) == 2
+    assert decomp["t_a"] == 1
+    assert decomp["t_b"] == 1
+
+    assert feats[1].n_leaves == 2
+    assert feats[1].n_feats == 2
+    assert feats[1].feat(0).expr == "t_a"
+    try:
+        feats[1].feat(2)
+        raise ValueError("Accessing feature that should throw an error")
+    except:
+        pass
+    assert feats[1].matlab_fxn_expr == "(t_a - t_b)"
+
 
 if __name__ == "__main__":
     test_sub_node()
diff --git a/tests/pytest/test_feature_creation/test_feature_space/phi.txt b/tests/pytest/test_feature_creation/test_feature_space/phi.txt
index de2d862c88f533bbd62d444635bd18ced1efdf11..0d07afd07f1ba7704fccdfc794995d39597a5738 100644
--- a/tests/pytest/test_feature_creation/test_feature_space/phi.txt
+++ b/tests/pytest/test_feature_creation/test_feature_space/phi.txt
@@ -8,6 +8,34 @@
 7
 8
 9
+1|0|abd
+0|cbrt
+0|sqrt
+0|sp
+0|exp
+0|nexp
+0|log
+0|sin
+0|cos
+0|inv
+0|abs
+1|0|add: 2.0000000000000e+00,0.0000000000000e+00
+1|0|sub: 2.0000000000000e+00,0.0000000000000e+00
+1|0|abd: 2.0000000000000e+00,1.0000000000000e+00
+1|0|mult: 1.0000000000000e+00,2.0000000000000e+00
+1|0|div: 1.0000000000000e+00,2.0000000000000e+00
+0|inv: 1.0000000000000e+00,2.0000000000000e+00
+0|abs: 1.0000000000000e+00,2.0000000000000e+00
+0|log: 1.0000000000000e+00,2.0000000000000e+00
+0|exp: 2.0000000000000e+00,0.0000000000000e+00
+0|nexp: 2.0000000000000e+00,0.0000000000000e+00
+0|sin: 2.0000000000000e+00,1.0000000000000e+00
+0|cos: 2.0000000000000e+00,1.0000000000000e+00
+0|cbrt: 1.0000000000000e+00,2.0000000000000e+00
+0|sqrt: 1.0000000000000e+00,2.0000000000000e+00
+0|sq: 1.0000000000000e+00,2.0000000000000e+00
+0|cb: 1.0000000000000e+00,2.0000000000000e+00
+0|sp: 1.0000000000000e+00,2.0000000000000e+00
 0|sq
 0|cb
 8|sq
diff --git a/tests/pytest/test_feature_creation/test_feature_space/test_gen_feature_space_from_file.py b/tests/pytest/test_feature_creation/test_feature_space/test_gen_feature_space_from_file.py
index af20963482a010354636060cc3847ef33ecd4b73..bfe269a4055a56eca65c6e75f22b2510a7f0097c 100644
--- a/tests/pytest/test_feature_creation/test_feature_space/test_gen_feature_space_from_file.py
+++ b/tests/pytest/test_feature_creation/test_feature_space/test_gen_feature_space_from_file.py
@@ -35,8 +35,8 @@ def test_gen_feature_space_from_file():
         FeatureNode(
             ff,
             f"feat_{ff}",
-            np.random.random(task_sizes_train[0]) * 1e2 - 50,
-            np.random.random(task_sizes_test[0]) * 1e2 - 50,
+            np.random.uniform(1e-5, 100.0, task_sizes_train[0]),
+            np.random.uniform(1e-5, 100.0, task_sizes_test[0]),
             Unit(),
         )
         for ff in range(10)
diff --git a/tests/pytest/test_feature_creation/test_parameterize/test_param_abs.py b/tests/pytest/test_feature_creation/test_parameterize/test_param_abs.py
index c7ee38f203cac3d797e28f7b3fc0568b09428951..5a0d89bc6b955c45840962e5ddeb06cf6dbd58e0 100644
--- a/tests/pytest/test_feature_creation/test_parameterize/test_param_abs.py
+++ b/tests/pytest/test_feature_creation/test_parameterize/test_param_abs.py
@@ -49,9 +49,12 @@ def test_param_abs_node():
 
     feat_node = AbsParamNode(feat_1, 2, 1e-50, 1e50)
     feat_node.get_parameters(optimizer)
-    print(feat_node.parameters)
+
     assert check_feat_parmeters(feat_node, prop)
 
+    p_0, p_1 = feat_node.parameters
+    assert feat_node.matlab_fxn_expr == f"abs({p_0:.1f}.*t_a{p_1:+11.6e})"
+
 
 if __name__ == "__main__":
     test_param_abs_node()
diff --git a/tests/pytest/test_feature_creation/test_parameterize/test_param_abs_diff.py b/tests/pytest/test_feature_creation/test_parameterize/test_param_abs_diff.py
index 0e27d3541acdb53bb5ce0bab12b5df525167d968..ee7a65c1d378aa66bcf68ca88fed8c4e8f71d355 100644
--- a/tests/pytest/test_feature_creation/test_parameterize/test_param_abs_diff.py
+++ b/tests/pytest/test_feature_creation/test_parameterize/test_param_abs_diff.py
@@ -55,6 +55,9 @@ def test_param_abs_diff_node():
 
     assert check_feat_parmeters(feat_node, prop)
 
+    p_0, p_1 = feat_node.parameters
+    assert feat_node.matlab_fxn_expr == f"abs(t_a - ({p_0:.6e}.*x_a{p_1:+11.6e}))"
+
 
 if __name__ == "__main__":
     test_param_abs_diff_node()
diff --git a/tests/pytest/test_feature_creation/test_parameterize/test_param_add.py b/tests/pytest/test_feature_creation/test_parameterize/test_param_add.py
index 6d7d35ced976f031e53d077ba3d59e22f025824e..0c9a7dada5b8e4db05229074dc14617f28816440 100644
--- a/tests/pytest/test_feature_creation/test_parameterize/test_param_add.py
+++ b/tests/pytest/test_feature_creation/test_parameterize/test_param_add.py
@@ -55,6 +55,9 @@ def test_param_add_node():
 
     assert check_feat_parmeters(feat_node, prop)
 
+    p_0, p_1 = feat_node.parameters
+    assert feat_node.matlab_fxn_expr == f"(t_a + ({p_0:.6e}.*x_a))"
+
 
 if __name__ == "__main__":
     test_param_add_node()
diff --git a/tests/pytest/test_feature_creation/test_parameterize/test_param_cb.py b/tests/pytest/test_feature_creation/test_parameterize/test_param_cb.py
index 7bd015a0213388f23d347e426205a96d254eaf1c..3904b9c3852851acfec6929b2fc53740e817cc5d 100644
--- a/tests/pytest/test_feature_creation/test_parameterize/test_param_cb.py
+++ b/tests/pytest/test_feature_creation/test_parameterize/test_param_cb.py
@@ -49,9 +49,12 @@ def test_param_cb_node():
 
     feat_node = CbParamNode(feat_1, 2, 1e-50, 1e50)
     feat_node.get_parameters(optimizer)
-    print(feat_node.parameters)
+
     assert check_feat_parmeters(feat_node, prop)
 
+    p_0, p_1 = feat_node.parameters
+    assert feat_node.matlab_fxn_expr == f"(t_a{p_1:+11.6e}).^3"
+
 
 if __name__ == "__main__":
     test_param_cb_node()
diff --git a/tests/pytest/test_feature_creation/test_parameterize/test_param_cbrt.py b/tests/pytest/test_feature_creation/test_parameterize/test_param_cbrt.py
index 861920f9c4d511011a480ae05cb2d2d612576364..93a00b5dc6bd67c29bfb4ac2e8c04987d7e6b9db 100644
--- a/tests/pytest/test_feature_creation/test_parameterize/test_param_cbrt.py
+++ b/tests/pytest/test_feature_creation/test_parameterize/test_param_cbrt.py
@@ -52,6 +52,9 @@ def test_param_cbrt_node():
 
     assert check_feat_parmeters(feat_node, prop)
 
+    p_0, p_1 = feat_node.parameters
+    assert feat_node.matlab_fxn_expr == f"nthroot(t_a{p_1:+11.6e}, 3)"
+
 
 if __name__ == "__main__":
     test_param_cbrt_node()
diff --git a/tests/pytest/test_feature_creation/test_parameterize/test_param_cos.py b/tests/pytest/test_feature_creation/test_parameterize/test_param_cos.py
index 0e57422251122c8609fc9853eac10a0370a00d94..f89c86caa02602d81f9e407b316c72959bcf0c27 100644
--- a/tests/pytest/test_feature_creation/test_parameterize/test_param_cos.py
+++ b/tests/pytest/test_feature_creation/test_parameterize/test_param_cos.py
@@ -52,6 +52,9 @@ def test_param_cos_node():
 
     assert check_feat_parmeters(feat_node, prop)
 
+    p_0, p_1 = feat_node.parameters
+    assert feat_node.matlab_fxn_expr == f"cos({p_0:.6e}.*t_a{p_1:+11.6e})"
+
 
 if __name__ == "__main__":
     test_param_cos_node()
diff --git a/tests/pytest/test_feature_creation/test_parameterize/test_param_div.py b/tests/pytest/test_feature_creation/test_parameterize/test_param_div.py
index be4dbc1c320dbef67732f2b227a74456bc246e66..d9fbf1bda547cf83489cc8180bad95ddc2b8449b 100644
--- a/tests/pytest/test_feature_creation/test_parameterize/test_param_div.py
+++ b/tests/pytest/test_feature_creation/test_parameterize/test_param_div.py
@@ -56,6 +56,9 @@ def test_param_div_node():
 
     assert check_feat_parmeters(feat_node, prop)
 
+    p_0, p_1 = feat_node.parameters
+    assert feat_node.matlab_fxn_expr == f"(t_a ./ (x_a{p_1:+11.6e}))"
+
 
 if __name__ == "__main__":
     test_param_div_node()
diff --git a/tests/pytest/test_feature_creation/test_parameterize/test_param_exp.py b/tests/pytest/test_feature_creation/test_parameterize/test_param_exp.py
index 61c17fdab5950caa1ebd8799aa66d82aeabfd95e..6e130bb3540fe566da7005726c0b18f5cca0c6a1 100644
--- a/tests/pytest/test_feature_creation/test_parameterize/test_param_exp.py
+++ b/tests/pytest/test_feature_creation/test_parameterize/test_param_exp.py
@@ -52,6 +52,10 @@ def test_param_exp_node():
 
     assert check_feat_parmeters(feat_node, prop)
 
+    p_0, p_1 = feat_node.parameters
+
+    assert feat_node.matlab_fxn_expr == f"exp({p_0:.6e}.*t_a)"
+
 
 if __name__ == "__main__":
     test_param_exp_node()
diff --git a/tests/pytest/test_feature_creation/test_parameterize/test_param_inv.py b/tests/pytest/test_feature_creation/test_parameterize/test_param_inv.py
index 492984bcc725b0927e37ddc7d17f381dcb8cf9ce..743933ff3d55444077477987555858d3ad9c9351 100644
--- a/tests/pytest/test_feature_creation/test_parameterize/test_param_inv.py
+++ b/tests/pytest/test_feature_creation/test_parameterize/test_param_inv.py
@@ -52,6 +52,9 @@ def test_param_inv_node():
 
     assert check_feat_parmeters(feat_node, prop)
 
+    p_0, p_1 = feat_node.parameters
+    assert feat_node.matlab_fxn_expr == f"(t_a{p_1:+11.6e}).^(-1)"
+
 
 if __name__ == "__main__":
     test_param_inv_node()
diff --git a/tests/pytest/test_feature_creation/test_parameterize/test_param_log.py b/tests/pytest/test_feature_creation/test_parameterize/test_param_log.py
index 33195bcbef5f5e492ddb477b0bdfbddac53cc95d..13c594c6be73a4bea014c86c4fa828dc77e63bf4 100644
--- a/tests/pytest/test_feature_creation/test_parameterize/test_param_log.py
+++ b/tests/pytest/test_feature_creation/test_parameterize/test_param_log.py
@@ -52,6 +52,9 @@ def test_param_log_node():
 
     assert check_feat_parmeters(feat_node, prop)
 
+    p_0, p_1 = feat_node.parameters
+    assert feat_node.matlab_fxn_expr == f"log({p_0:.1f}*t_a{p_1:+11.6e})"
+
 
 if __name__ == "__main__":
     test_param_log_node()
diff --git a/tests/pytest/test_feature_creation/test_parameterize/test_param_neg_exp.py b/tests/pytest/test_feature_creation/test_parameterize/test_param_neg_exp.py
index 6d8fcda8af0d1cd5cd28c07c9c87e7bdac144b55..0b9c5834ce502949b98db76bca78374681cb4bdf 100644
--- a/tests/pytest/test_feature_creation/test_parameterize/test_param_neg_exp.py
+++ b/tests/pytest/test_feature_creation/test_parameterize/test_param_neg_exp.py
@@ -49,9 +49,12 @@ def test_param_neg_exp_node():
 
     feat_node = NegExpParamNode(feat_1, 2, 1e-50, 1e50)
     feat_node.get_parameters(optimizer)
-    print(feat_node.parameters)
+
     assert check_feat_parmeters(feat_node, prop)
 
+    p_0, p_1 = feat_node.parameters
+    assert feat_node.matlab_fxn_expr == f"exp(-1.0 .* ({p_0:.6e}.*t_a))"
+
 
 if __name__ == "__main__":
     test_param_neg_exp_node()
diff --git a/tests/pytest/test_feature_creation/test_parameterize/test_param_sin.py b/tests/pytest/test_feature_creation/test_parameterize/test_param_sin.py
index 5de305a9db4b5d88365194643d36df5e9f8c5e9f..2d5280a442d8d9b8acaff8577303919850503e69 100644
--- a/tests/pytest/test_feature_creation/test_parameterize/test_param_sin.py
+++ b/tests/pytest/test_feature_creation/test_parameterize/test_param_sin.py
@@ -52,6 +52,9 @@ def test_param_sin_node():
 
     assert check_feat_parmeters(feat_node, prop)
 
+    p_0, p_1 = feat_node.parameters
+    assert feat_node.matlab_fxn_expr == f"sin({p_0:.6e}.*t_a{p_1:+11.6e})"
+
 
 if __name__ == "__main__":
     test_param_sin_node()
diff --git a/tests/pytest/test_feature_creation/test_parameterize/test_param_six_pow.py b/tests/pytest/test_feature_creation/test_parameterize/test_param_six_pow.py
index 2a2376dc24fd122c2a769c5a3e3ed089df0ed7cd..c5aaee568ec7c6e0fba046838271bb12af798959 100644
--- a/tests/pytest/test_feature_creation/test_parameterize/test_param_six_pow.py
+++ b/tests/pytest/test_feature_creation/test_parameterize/test_param_six_pow.py
@@ -52,6 +52,9 @@ def test_param_six_pow_node():
 
     assert check_feat_parmeters(feat_node, prop)
 
+    p_0, p_1 = feat_node.parameters
+    assert feat_node.matlab_fxn_expr == f"(t_a{p_1:+11.6e}).^6"
+
 
 if __name__ == "__main__":
     test_param_six_pow_node()
diff --git a/tests/pytest/test_feature_creation/test_parameterize/test_param_sq.py b/tests/pytest/test_feature_creation/test_parameterize/test_param_sq.py
index 50102b38df4c0f171ae46748ae929026bfce9dde..9e7a7363820d5334d7af21f818d9131036c019bf 100644
--- a/tests/pytest/test_feature_creation/test_parameterize/test_param_sq.py
+++ b/tests/pytest/test_feature_creation/test_parameterize/test_param_sq.py
@@ -52,6 +52,9 @@ def test_param_sq_node():
 
     assert check_feat_parmeters(feat_node, prop)
 
+    p_0, p_1 = feat_node.parameters
+    assert feat_node.matlab_fxn_expr == f"(t_a{p_1:+11.6e}).^2"
+
 
 if __name__ == "__main__":
     test_param_sq_node()
diff --git a/tests/pytest/test_feature_creation/test_parameterize/test_param_sqrt.py b/tests/pytest/test_feature_creation/test_parameterize/test_param_sqrt.py
index f5144c7c3c81cf0332cd5593d2875b254b448c69..a03af2e5d49f37c126af25a66cf5bfd9141eaf25 100644
--- a/tests/pytest/test_feature_creation/test_parameterize/test_param_sqrt.py
+++ b/tests/pytest/test_feature_creation/test_parameterize/test_param_sqrt.py
@@ -52,6 +52,9 @@ def test_param_sqrt_node():
 
     assert check_feat_parmeters(feat_node, prop)
 
+    p_0, p_1 = feat_node.parameters
+    assert feat_node.matlab_fxn_expr == f"sqrt({p_0:.1f}.*t_a{p_1:+11.6e})"
+
 
 if __name__ == "__main__":
     test_param_sqrt_node()
diff --git a/tests/pytest/test_feature_creation/test_parameterize/test_param_sub.py b/tests/pytest/test_feature_creation/test_parameterize/test_param_sub.py
index 615a76cf6a4f3ab0e2e79d9a38b14bf4aa154640..3ac9aab86157c856363d7c22a77ab9e5870c37df 100644
--- a/tests/pytest/test_feature_creation/test_parameterize/test_param_sub.py
+++ b/tests/pytest/test_feature_creation/test_parameterize/test_param_sub.py
@@ -55,6 +55,9 @@ def test_param_sub_node():
 
     assert check_feat_parmeters(feat_node, prop)
 
+    p_0, p_1 = feat_node.parameters
+    assert feat_node.matlab_fxn_expr == f"(t_a - ({p_0:.6e}.*x_a))"
+
 
 if __name__ == "__main__":
     test_param_sub_node()