Commit c0396b20 authored by Thomas Purcell's avatar Thomas Purcell
Browse files

Remove need for making new feature nodes for selected features

-Saves memory
-should allow for larger sis to get past MPI send

Made Model Nodes

Models now do not connect to the main value arrays so it will be easier to serialize in the future
parent c86927c0
......@@ -10,6 +10,7 @@ __top_builddir__sisso_cpp_SOURCES = \
feature_creation/units/Unit.cpp \
feature_creation/node/Node.cpp \
feature_creation/node/FeatureNode.cpp \
feature_creation/node/ModelNode.cpp \
feature_creation/node/operator_nodes/OperatorNode.cpp \
feature_creation/node/operator_nodes/allowed_operator_nodes/add.cpp \
feature_creation/node/operator_nodes/allowed_operator_nodes/subtract.cpp \
......
#include <descriptor_identifier/Model/Model.hpp>
Model::Model(std::vector<double> prop_train, std::vector<double> prop_test, std::vector<node_ptr> feats, std::vector<int> task_sizes_train, std::vector<int> task_sizes_test) :
Model::Model(std::vector<double> prop_train, std::vector<double> prop_test, std::vector<model_node_ptr> feats, std::vector<int> task_sizes_train, std::vector<int> task_sizes_test) :
_n_samp_train(feats[0]->n_samp()),
_n_samp_test(feats[0]->n_test_samp()),
_n_dim(feats.size() + 1),
......
......@@ -7,8 +7,9 @@
#include<fstream>
#include<iostream>
#include <feature_creation/node/FeatureNode.hpp>
#include <feature_creation/node/ModelNode.hpp>
typedef std::shared_ptr<ModelNode> model_node_ptr;
/**
* @brief Class to store the models found from SISSO
*
......@@ -19,7 +20,7 @@ class Model
int _n_samp_test; //!< The number of test samples per feature
int _n_dim; //!< Dimension of the model
std::vector<node_ptr> _feats; //!< List of features in the model
std::vector<model_node_ptr> _feats; //!< List of features in the model
std::vector<std::vector<double>> _coefs; //!< Coefficients for teh features
std::vector<double> _prop_train; //!< The property to be modeled
......@@ -41,7 +42,7 @@ public:
* @param prop The property
* @param feats The features for the model
*/
Model(std::vector<double> prop_train, std::vector<double> prop_test, std::vector<node_ptr> feats, std::vector<int> task_sizes_train, std::vector<int> task_sizes_test);
Model(std::vector<double> prop_train, std::vector<double> prop_test, std::vector<model_node_ptr> feats, std::vector<int> task_sizes_train, std::vector<int> task_sizes_test);
/**
......
......@@ -97,7 +97,8 @@ void SISSORegressor::fit()
std::vector<Model> models;
for(int rr = 0; rr < _n_residual; ++rr)
{
models.push_back(Model(_prop, _prop_test, {_feat_space->phi_selected()[rr]}, _task_sizes_train, _task_sizes_test));
model_node_ptr model_feat = std::make_shared<ModelNode>(_feat_space->phi_selected()[rr]->arr_ind(), _feat_space->phi_selected()[rr]->rung(), _feat_space->phi_selected()[rr]->expr(), _feat_space->phi_selected()[rr]->value(), _feat_space->phi_selected()[rr]->test_value(), _feat_space->phi_selected()[rr]->unit());
models.push_back(Model(_prop, _prop_test, {model_feat}, _task_sizes_train, _task_sizes_test));
models.back().copy_error(&residual[rr * _n_samp]);
}
_models.push_back(models);
......@@ -176,13 +177,17 @@ void SISSORegressor::l0_norm(std::vector<double>& prop, int n_dim)
inds = util_funcs::argsort(all_min_error);
std::vector<node_ptr> min_nodes(n_dim);
std::vector<model_node_ptr> min_nodes(n_dim);
std::vector<Model> models;
for(int rr = 0; rr < _n_residual; ++rr)
{
for(int ii = 0; ii < n_dim; ++ii)
min_nodes[ii] = _feat_space->phi_selected()[all_inds_min[inds[rr] * n_dim + ii]];
{
int index = all_inds_min[inds[rr] * n_dim + ii];
min_nodes[ii] = std::make_shared<ModelNode>(_feat_space->phi_selected()[index]->arr_ind(), _feat_space->phi_selected()[index]->rung(), _feat_space->phi_selected()[index]->expr(), _feat_space->phi_selected()[index]->value(), _feat_space->phi_selected()[index]->test_value(), _feat_space->phi_selected()[index]->unit());
// min_nodes[ii] = _feat_space->phi_selected()[all_inds_min[inds[rr] * n_dim + ii]];
}
models.push_back(Model(_prop, _prop_test, min_nodes, _task_sizes_train, _task_sizes_test));
}
......
......@@ -368,12 +368,21 @@ void FeatureSpace::project_generated(double* prop, int size, std::vector<node_pt
if(is_valid)
{
node_ptr new_feat = std::make_shared<FeatureNode>(node_value_arrs::N_SELECTED - _n_sis_select + end_check, generated_phi[inds[ii]]->expr(), generated_phi[inds[ii]]->value(), generated_phi[inds[ii]]->test_value(), generated_phi[inds[ii]]->unit(), true);
phi_sel.insert(phi_sel.begin() + end_check, new_feat);
// node_ptr new_feat = std::make_shared<FeatureNode>(node_value_arrs::N_SELECTED - _n_sis_select + end_check, generated_phi[inds[ii]]->expr(), generated_phi[inds[ii]]->value(), generated_phi[inds[ii]]->test_value(), generated_phi[inds[ii]]->unit(), true);
generated_phi[inds[ii]]->selected() = true;
generated_phi[inds[ii]]->set_dmat_ind(node_value_arrs::N_SELECTED - _n_sis_select + end_check);
generated_phi[inds[ii]]->reindex(_phi.size() + node_value_arrs::N_SELECTED - _n_sis_select + end_check);
generated_phi[inds[ii]]->set_value();
phi_sel.insert(phi_sel.begin() + end_check, generated_phi[inds[ii]]);
// std::cout << phi_sel[end_check]->expr() << '\t' << phi_sel[end_check]->test_value()[0] << std::endl;
scores_sel.insert(scores_sel.begin() + end_check, cur_score);
for(int jj = end_check + 1; jj < _n_sis_select; ++jj)
{
phi_sel[jj]->reindex(node_value_arrs::N_SELECTED - _n_sis_select + jj);
phi_sel[jj]->set_dmat_ind(node_value_arrs::N_SELECTED - _n_sis_select + jj);
if(phi_sel[jj]->rung() == _max_phi)
phi_sel[jj]->reindex(_phi.size() + node_value_arrs::N_SELECTED - _n_sis_select + jj);
phi_sel[jj]->set_value();
}
}
......@@ -382,6 +391,11 @@ void FeatureSpace::project_generated(double* prop, int size, std::vector<node_pt
if(scores_sel.size() > _n_sis_select)
{
scores_sel.erase(scores_sel.begin() + _n_sis_select, scores_sel.end());
for(int ii = _n_sis_select; ii < phi_sel.size(); ++ii)
{
phi_sel[ii]->selected() = false;
phi_sel[ii]->set_dmat_ind(-1);
}
phi_sel.erase(phi_sel.begin() + _n_sis_select, phi_sel.end());
}
}
......@@ -421,6 +435,8 @@ void FeatureSpace::sis(std::vector<double>& prop)
std::vector<double> scores_sel(_n_sis_select, 0.0);
std::vector<node_ptr> phi_sel;
int feat_num_width = 4 + static_cast<int>(std::ceil(std::log10(_n_sis_select * _max_phi)));
phi_sel.reserve(_n_sis_select);
int cur_feat = node_value_arrs::N_SELECTED;
......@@ -453,7 +469,11 @@ void FeatureSpace::sis(std::vector<double>& prop)
if(is_valid)
{
scores_sel[cur_feat_local] = _scores[inds[ii]];
phi_sel.push_back(std::make_shared<FeatureNode>(cur_feat + cur_feat_local, _phi[inds[ii]]->expr(), _phi[inds[ii]]->value(), _phi[inds[ii]]->test_value(), _phi[inds[ii]]->unit(), true));
// phi_sel.push_back(std::make_shared<FeatureNode>(cur_feat + cur_feat_local, _phi[inds[ii]]->expr(), _phi[inds[ii]]->value(), _phi[inds[ii]]->test_value(), _phi[inds[ii]]->unit(), true));
phi_sel.push_back(_phi[inds[ii]]);
phi_sel.back()->selected() = true;
phi_sel.back()->set_dmat_ind(cur_feat + cur_feat_local);
phi_sel.back()->set_value();
++cur_feat_local;
}
++ii;
......@@ -464,6 +484,7 @@ void FeatureSpace::sis(std::vector<double>& prop)
phi_sel.resize(cur_feat_local);
scores_sel.resize(cur_feat_local);
project_generated(prop.data(), prop.size(), phi_sel, scores_sel, scores_comp);
std::fill_n(node_value_arrs::TEMP_STORAGE_TEST_REG.data(), node_value_arrs::TEMP_STORAGE_TEST_REG.size(), -1);
}
phi_sel.resize(_n_sis_select);
......@@ -510,9 +531,14 @@ void FeatureSpace::sis(std::vector<double>& prop)
if(std::none_of(inds.begin(), inds.end(), [&compare_ind](int i1){return i1 == compare_ind;}))
{
scores_sel.erase(scores_sel.begin() + ii);
phi_sel.back()->selected() = false;
phi_sel.back()->set_dmat_ind(-1);
phi_sel.erase(phi_sel.begin() + ii);
}
}
for(auto& feat : phi_sel)
feat->selected() = false;
scores_sel.resize(_n_sis_select, 0.0);
phi_sel.resize(_n_sis_select, nullptr);
......@@ -535,9 +561,10 @@ void FeatureSpace::sis(std::vector<double>& prop)
inds = util_funcs::argsort(sent_scores);
for(int ii = 0; ii < _n_sis_select; ++ii)
{
std::cout << std::setw(22) << std::setprecision(18) << std::left << sent_scores[inds[ii]] << sent_phi[inds[ii]]->expr() << std::endl;
std::cout << std::setw(feat_num_width) <<std::left << cur_feat << std::setw(24) << std::setprecision(18) << std::left << sent_scores[inds[ii]] << sent_phi[inds[ii]]->expr() << std::endl;
_phi_selected.push_back(sent_phi[inds[ii]]);
_phi_selected.back()->reindex(cur_feat);
_phi_selected.back()->selected() = true;
_phi_selected.back()->set_dmat_ind(cur_feat);
++cur_feat;
}
}
......@@ -557,14 +584,32 @@ void FeatureSpace::sis(std::vector<double>& prop)
{
if(valid_score_against_current(cur_feat_local, sent_phi[inds[ii]]->value().data(), sent_scores[inds[ii]], scores_sel, scores_comp))
{
std::cout << std::setw(22) << std::setprecision(18) << std::left << sent_scores[inds[ii]] << sent_phi[inds[ii]]->expr() << std::endl;
std::cout << std::setw(feat_num_width) <<std::left << cur_feat << std::setw(24) << std::setprecision(18) << std::left << sent_scores[inds[ii]] << sent_phi[inds[ii]]->expr() << std::endl;
_phi_selected.push_back(sent_phi[inds[ii]]);
_phi_selected.back()->reindex(cur_feat);
_phi_selected.back()->selected() = true;
_phi_selected.back()->set_dmat_ind(cur_feat);
_phi_selected.back()->set_value();
scores_sel[cur_feat_local] = sent_scores[inds[ii]];
++cur_feat_local;
++cur_feat;
}
else
{
sent_phi[inds[ii]]->selected() = false;
sent_phi[inds[ii]]->set_dmat_ind(-1);
}
++ii;
}
while(ii < sent_phi.size())
{
if(sent_phi[inds[ii]])
{
sent_phi[inds[ii]]->selected() = false;
sent_phi[inds[ii]]->set_dmat_ind(-1);
}
++ii;
}
}
......@@ -577,6 +622,14 @@ void FeatureSpace::sis(std::vector<double>& prop)
_mpi_comm->send(0, _mpi_comm->cantorTagGen(_mpi_comm->rank(), 0, 2, 0), scores_sel.data(), _n_sis_select);
_mpi_comm->send(0, _mpi_comm->cantorTagGen(_mpi_comm->rank(), 0, 2, 1), phi_sel.data(), _n_sis_select);
_phi_selected.resize(node_value_arrs::N_SELECTED);
for(auto& feat : phi_sel)
{
if(feat)
{
feat->selected() = false;
feat->set_dmat_ind(-1);
}
}
}
mpi::broadcast(*_mpi_comm, &_phi_selected[_phi_selected.size() - _n_sis_select], _n_sis_select, 0);
for(int ii = _phi_selected.size() - _n_sis_select; ii < _phi_selected.size(); ++ii)
......@@ -590,14 +643,15 @@ void FeatureSpace::sis(std::vector<double>& prop)
cur_feat_local = 0;
for(auto& feat : phi_sel)
{
std::cout << std::setw(22) << std::setprecision(18) << std::left << scores_sel[cur_feat_local] << phi_sel[cur_feat_local]->expr() << std::endl;
std::cout << std::setw(feat_num_width) <<std::left << cur_feat << std::setw(24) << std::setprecision(18) << std::left << scores_sel[cur_feat_local] << phi_sel[cur_feat_local]->expr() << std::endl;
_phi_selected.push_back(feat);
_phi_selected.back()->reindex(cur_feat);
_phi_selected.back()->set_dmat_ind(cur_feat);
_phi_selected.back()->set_value();
++cur_feat;
++cur_feat_local;
}
}
if(cur_feat != node_value_arrs::N_SELECTED)
throw std::logic_error("SIS went through all features and did not select enough.");
......
......@@ -3,13 +3,12 @@
FeatureNode::FeatureNode()
{}
FeatureNode::FeatureNode(int feat_ind, std::string expr, std::vector<double> value, std::vector<double> test_value, Unit unit, bool selected) :
FeatureNode::FeatureNode(int feat_ind, std::string expr, std::vector<double> value, std::vector<double> test_value, Unit unit) :
Node(feat_ind, value.size(), test_value.size()),
_value(value),
_test_value(test_value),
_unit(unit),
_expr(expr),
_selected(selected)
_expr(expr)
{
set_value();
set_test_value();
......
......@@ -26,7 +26,6 @@ class FeatureNode: public Node
void serialize(Archive& ar, const unsigned int version)
{
ar & boost::serialization::base_object<Node>(*this);
ar & _selected;
ar & _expr;
ar & _unit;
ar & _value;
......@@ -38,7 +37,7 @@ protected:
std::vector<double> _test_value; //!< test values for the feature
Unit _unit; //!< Unit for the feature
std::string _expr; //!< Expression of the feature
bool _selected; //!< True if the features was selected
public:
/**
* @brief Base Constructor
......@@ -55,7 +54,7 @@ public:
* @param value Value of the feature for each test sample
* @param unit Unit of the feature
*/
FeatureNode(int feat_ind, std::string expr, std::vector<double> value, std::vector<double> test_value, Unit unit, bool selected=false);
FeatureNode(int feat_ind, std::string expr, std::vector<double> value, std::vector<double> test_value, Unit unit);
FeatureNode(const FeatureNode&) = default;
FeatureNode(FeatureNode&&) = default;
......@@ -118,7 +117,7 @@ public:
/**
* @brief Accessor function to the value of the feature
*/
inline double* value_ptr(int offset = 0){return _selected ? node_value_arrs::get_d_matrix_ptr(_arr_ind) : node_value_arrs::get_value_ptr(_arr_ind, offset);}
inline double* value_ptr(int offset = 0){return _selected ? node_value_arrs::get_d_matrix_ptr(_d_mat_ind) : node_value_arrs::get_value_ptr(_arr_ind, offset);}
/**
* @brief Accessor function to the value of the feature's test set
......
#include <feature_creation/node/ModelNode.hpp>
ModelNode::ModelNode()
{}
ModelNode::ModelNode(int feat_ind, int rung, std::string expr, std::vector<double> value, std::vector<double> test_value, Unit unit) :
FeatureNode(feat_ind, expr, value, test_value, unit),
_rung(rung)
{}
ModelNode::~ModelNode()
{}
void ModelNode::update_add_sub_leaves(std::map<std::string, int>& add_sub_leaves, int pl_mn, int& expected_abs_tot)
{
if(add_sub_leaves.count(_expr) > 0)
add_sub_leaves[_expr] += pl_mn;
else
add_sub_leaves[_expr] = pl_mn;
++expected_abs_tot;
}
void ModelNode::update_div_mult_leaves(std::map<std::string, double>& div_mult_leaves, double fact, double& expected_abs_tot)
{
if(div_mult_leaves.count(_expr) > 0)
div_mult_leaves[_expr] += fact;
else
div_mult_leaves[_expr] = fact;
expected_abs_tot += std::abs(fact);
}
// BOOST_CLASS_EXPORT(ModelNode)
#ifndef MODEL_NODE
#define MODEL_NODE
#include <feature_creation/node/FeatureNode.hpp>
/**
* @brief Node that describe the leaves of the operator graph (Initial features in Phi_0)
*/
class ModelNode: public FeatureNode
{
friend class boost::serialization::access;
/**
* @brief Serialization function to send over MPI
*
* @param ar Archive representation of node
*/
template <typename Archive>
void serialize(Archive& ar, const unsigned int version)
{
ar & boost::serialization::base_object<FeatureNode>(*this);
ar & _rung;
}
protected:
int _rung;
public:
/**
* @brief Base Constructor
* @details This is only used for serialization
*/
ModelNode();
/**
* @brief Constructs a feature node
*
* @param feat_ind index of the feature
* @param expr Expression for the feature
* @param value Value of the feature for each sample
* @param value Value of the feature for each test sample
* @param unit Unit of the feature
*/
ModelNode(int feat_ind, int rung, std::string expr, std::vector<double> value, std::vector<double> test_value, Unit unit);
ModelNode(const ModelNode&) = default;
ModelNode(ModelNode&&) = default;
ModelNode& operator=(const ModelNode&) = default;
ModelNode& operator=(ModelNode&&) = default;
~ModelNode();
/**
* @brief Set the value for the feature
*/
inline void set_value(int offset = -1){return;}
/**
* @brief Set the test value for the feature
*/
inline void set_test_value(int offset = -1){return;}
/**
* @brief Check if the feature contains NaN
*/
inline bool is_nan(){return false;}
/**
* @brief Check if feature is constant
*/
inline bool is_const(){return false;}
/**
* @brief Returns the type of node this is
*/
inline NODE_TYPE type(){return NODE_TYPE::MODEL_FEATURE;}
/**
* @brief Accessor function to the value of the feature
*/
inline double* value_ptr(int offset = 0){return _value.data();}
/**
* @brief Accessor function to the value of the feature's test set
*/
inline double* test_value_ptr(int offset = 0){return _test_value.data();}
/**
* @brief return the rung of the feature
*/
inline int rung(int cur_rung = 0){return _rung;}
/**
* @brief update the dictionary used to check if an Add/Sub node is valid
*
* @param add_sub_leaves the dictionary used to check if an Add/Sub node is valid
* @param pl_mn if for an addition node: 1 if for a subtraction node: -1
* @param expected_abs_tot The expected absolute sum of all values in add_sub_leaves
*/
void update_add_sub_leaves(std::map<std::string, int>& add_sub_leaves, int pl_mn, int& expected_abs_tot);
/**
* @brief update the dictionary used to check if
* @details [long description]
*
* @param add_sub_leaves [description]
*/
void update_div_mult_leaves(std::map<std::string, double>& div_mult_leaves, double fact, double& expected_abs_tot);
};
#endif
......@@ -7,7 +7,9 @@ Node::Node(int feat_ind, int n_samp, int n_test_samp) :
_n_test_samp(n_test_samp),
_n_samp(n_samp),
_feat_ind(feat_ind),
_arr_ind(feat_ind)
_arr_ind(feat_ind),
_d_mat_ind(-1),
_selected(false)
{}
Node::~Node()
......
......@@ -35,6 +35,8 @@ class Node
ar & _n_samp;
ar & _feat_ind;
ar & _arr_ind;
ar & _d_mat_ind;
ar & _selected;
}
protected:
......@@ -42,7 +44,9 @@ protected:
int _n_samp; //!< Number of samples in the feature's test set
int _feat_ind; //!< Index of the feature
int _arr_ind; //!< Index of the feature for the value arrays
int _d_mat_ind; //!< Index for the descriptor matrix
bool _selected; //!< True if the feature is selected;
public:
/**
* @brief Base Constructor
......@@ -104,6 +108,11 @@ public:
*/
inline int& arr_ind(){return _arr_ind;}
inline bool& selected(){return _selected;}
inline void set_dmat_ind(int ind){_d_mat_ind = ind;}
inline int d_mat_ind(){return _d_mat_ind;}
/**
* @brief Get the expression for the overall descriptor (From head node down)
*/
......
......@@ -94,6 +94,9 @@ public:
*/
double* value_ptr(int offset=-1)
{
if(_selected)
return node_value_arrs::get_d_matrix_ptr(_d_mat_ind);
offset = (offset == -1) ? rung() : offset;
if((rung() > node_value_arrs::N_RUNGS_STORED) && (node_value_arrs::temp_storage_reg(_arr_ind, offset) != _arr_ind))
{
......
......@@ -26,6 +26,9 @@ public:
inline void set_value(int offset = -1)
{
if(_selected)
allowed_op_funcs::abs_diff(_n_samp, _feats[0]->value_ptr(offset + 2), _feats[1]->value_ptr(offset + 1), node_value_arrs::get_d_matrix_ptr(_d_mat_ind));
offset = (offset == -1) ? rung() : offset;
allowed_op_funcs::abs_diff(_n_samp, _feats[0]->value_ptr(offset + 2), _feats[1]->value_ptr(offset + 1), node_value_arrs::get_value_ptr(_arr_ind, offset));
}
......
......@@ -25,6 +25,9 @@ public:
inline void set_value(int offset = -1)
{
if(_selected)
allowed_op_funcs::abs(_n_samp, _feats[0]->value_ptr(offset + 2), node_value_arrs::get_d_matrix_ptr(_d_mat_ind));
offset = (offset == -1) ? rung() : offset;
allowed_op_funcs::abs(_n_samp, _feats[0]->value_ptr(offset + 2), node_value_arrs::get_value_ptr(_arr_ind, offset));
}
......
......@@ -25,6 +25,9 @@ public:
inline void set_value(int offset = -1)
{
if(_selected)
allowed_op_funcs::add(_n_samp, _feats[0]->value_ptr(offset + 2), _feats[1]->value_ptr(offset + 1), node_value_arrs::get_d_matrix_ptr(_d_mat_ind));
offset = (offset == -1) ? rung() : offset;
allowed_op_funcs::add(_n_samp, _feats[0]->value_ptr(offset + 2), _feats[1]->value_ptr(offset + 1), node_value_arrs::get_value_ptr(_arr_ind, offset));
}
......
......@@ -25,6 +25,9 @@ public:
inline void set_value(int offset = -1)
{
if(_selected)
allowed_op_funcs::cos(_n_samp, _feats[0]->value_ptr(offset + 2), node_value_arrs::get_d_matrix_ptr(_d_mat_ind));
offset = (offset == -1) ? rung() : offset;
allowed_op_funcs::cos(_n_samp, _feats[0]->value_ptr(offset + 2), node_value_arrs::get_value_ptr(_arr_ind, offset));
}
......
......@@ -25,6 +25,9 @@ public:
inline void set_value(int offset = -1)
{
if(_selected)
allowed_op_funcs::cb(_n_samp, _feats[0]->value_ptr(offset + 2), node_value_arrs::get_d_matrix_ptr(_d_mat_ind));
offset = (offset == -1) ? rung() : offset;
allowed_op_funcs::cb(_n_samp, _feats[0]->value_ptr(offset + 2), node_value_arrs::get_value_ptr(_arr_ind, offset));
}
......
......@@ -25,6 +25,9 @@ public:
inline void set_value(int offset = -1)
{
if(_selected)
allowed_op_funcs::cbrt(_n_samp, _feats[0]->value_ptr(offset + 2), node_value_arrs::get_d_matrix_ptr(_d_mat_ind));
offset = (offset == -1) ? rung() : offset;
allowed_op_funcs::cbrt(_n_samp, _feats[0]->value_ptr(offset + 2), node_value_arrs::get_value_ptr(_arr_ind, offset));
}
......
......@@ -25,6 +25,9 @@ public:
inline void set_value(int offset = -1)
{
if(_selected)
allowed_op_funcs::div(_n_samp, _feats[0]->value_ptr(offset + 2), _feats[1]->value_ptr(offset + 1), node_value_arrs::get_d_matrix_ptr(_d_mat_ind));
offset = (offset == -1) ? rung() : offset;
allowed_op_funcs::div(_n_samp, _feats[0]->value_ptr(offset + 2), _feats[1]->value_ptr(offset + 1), node_value_arrs::get_value_ptr(_arr_ind, offset));
}
......
......@@ -25,6 +25,9 @@ public:
inline void set_value(int offset = -1)
{
if(_selected)
allowed_op_funcs::exp(_n_samp, _feats[0]->value_ptr(offset + 2), node_value_arrs::get_d_matrix_ptr(_d_mat_ind));