Commit 40385597 authored by Thomas Purcell's avatar Thomas Purcell
Browse files

Fix all compiler errors from the merge

parent 27b898f5
......@@ -187,16 +187,18 @@ ModelClassifier::ModelClassifier(std::string train_file)
for(int ff = 0; ff < feature_expr_train.size(); ++ff)
{
split_str = str_utils::split_string_trim(feature_expr_train[ff]);
int rung = std::stoi(split_str[0]);
std::string unit_str = split_str[1];
std::string postfix_expr = split_str[2];
std::string expr = split_str[3];
std::string domain_str = split_str[1];
std::string unit_str = split_str[2];
std::string postfix_expr = split_str[3];
std::string expr = split_str[4];
std::vector<double> feat_val(_n_samp_train);
std::vector<double> feat_test_val = {};
std::copy_n(&_D_train[ff * _n_samp_train], _n_samp_train, feat_val.data());
model_node_ptr feat = std::make_shared<ModelNode>(ff, rung, expr, postfix_expr, feat_val, feat_test_val, Unit(unit_str));
model_node_ptr feat = std::make_shared<ModelNode>(ff, rung, expr, postfix_expr, feat_val, feat_test_val, Domain(domain_str), Unit(unit_str));
_feats.push_back(feat);
}
}
......@@ -215,9 +217,10 @@ ModelClassifier::ModelClassifier(std::string train_file, std::string test_file)
split_str = str_utils::split_string_trim(feature_expr_train[ff]);
int rung = std::stoi(split_str[0]);
std::string unit_str = split_str[1];
std::string postfix_expr = split_str[2];
std::string expr = split_str[3];
std::string domain_str = split_str[1];
std::string unit_str = split_str[2];
std::string postfix_expr = split_str[3];
std::string expr = split_str[4];
std::vector<double> feat_val(_n_samp_train);
std::vector<double> feat_test_val(_n_samp_test);
......@@ -225,7 +228,7 @@ ModelClassifier::ModelClassifier(std::string train_file, std::string test_file)
std::copy_n(&_D_train[ff * _n_samp_train], _n_samp_train, feat_val.data());
std::copy_n(&_D_test[ff * _n_samp_test], _n_samp_test, feat_test_val.data());
_feats.push_back(std::make_shared<ModelNode>(ff, rung, expr, postfix_expr, feat_val, feat_test_val, Unit(unit_str)));
_feats.push_back(std::make_shared<ModelNode>(ff, rung, expr, postfix_expr, feat_val, feat_test_val, Domain(domain_str), Unit(unit_str)));
}
}
......@@ -426,7 +429,7 @@ void ModelClassifier::to_file(std::string filename, bool train, std::vector<int>
out_file_stream << "# Feature Rung, Domain, Units, and Expressions" << std::endl;
for(int ff = 0; ff < _feats.size(); ++ff)
out_file_stream << std::setw(6) << std::left << "# " + std::to_string(ff) + ", " << std::to_string(_feats[ff]->rung()) + ", " << std::to_string(_feats[ff]->domain()) + ", " << std::setw(50) << _feats[ff]->unit().toString() + ", " << _feats[ff]->postfix_expr() + "," << _feats[ff]->expr() << std::endl;
out_file_stream << std::setw(6) << std::left << "# " + std::to_string(ff) + ", " << std::to_string(_feats[ff]->rung()) + ", " << _feats[ff]->domain().toString() + ", " << std::setw(50) << _feats[ff]->unit().toString() + ", " << _feats[ff]->postfix_expr() + "," << _feats[ff]->expr() << std::endl;
out_file_stream << "# Number of Samples Per Task" << std::endl;
if(train)
......
......@@ -67,16 +67,18 @@ ModelRegressor::ModelRegressor(std::string train_file)
for(int ff = 0; ff < feature_expr_train.size(); ++ff)
{
split_str = str_utils::split_string_trim(feature_expr_train[ff]);
int rung = std::stoi(split_str[0]);
std::string unit_str = split_str[1];
std::string postfix_expr = split_str[2];
std::string expr = split_str[3];
std::string domain_str = split_str[1];
std::string unit_str = split_str[2];
std::string postfix_expr = split_str[3];
std::string expr = split_str[4];
std::vector<double> feat_val(_n_samp_train);
std::vector<double> feat_test_val = {};
std::copy_n(&_D_train[ff * _n_samp_train], _n_samp_train, feat_val.data());
model_node_ptr feat = std::make_shared<ModelNode>(ff, rung, expr, postfix_expr, feat_val, feat_test_val, Unit(unit_str));
model_node_ptr feat = std::make_shared<ModelNode>(ff, rung, expr, postfix_expr, feat_val, feat_test_val, Domain(domain_str), Unit(unit_str));
_feats.push_back(feat);
}
......@@ -96,9 +98,10 @@ ModelRegressor::ModelRegressor(std::string train_file, std::string test_file)
split_str = str_utils::split_string_trim(feature_expr_train[ff]);
int rung = std::stoi(split_str[0]);
std::string unit_str = split_str[1];
std::string postfix_expr = split_str[2];
std::string expr = split_str[3];
std::string domain_str = split_str[1];
std::string unit_str = split_str[2];
std::string postfix_expr = split_str[3];
std::string expr = split_str[4];
std::vector<double> feat_val(_n_samp_train);
std::vector<double> feat_test_val(_n_samp_test);
......@@ -106,7 +109,7 @@ ModelRegressor::ModelRegressor(std::string train_file, std::string test_file)
std::copy_n(&_D_train[ff * _n_samp_train], _n_samp_train, feat_val.data());
std::copy_n(&_D_test[ff * _n_samp_test], _n_samp_test, feat_test_val.data());
_feats.push_back(std::make_shared<ModelNode>(ff, rung, expr, postfix_expr, feat_val, feat_test_val, Unit(unit_str)));
_feats.push_back(std::make_shared<ModelNode>(ff, rung, expr, postfix_expr, feat_val, feat_test_val, Domain(domain_str), Unit(unit_str)));
}
}
......@@ -305,7 +308,7 @@ void ModelRegressor::to_file(std::string filename, bool train, std::vector<int>
out_file_stream << "# Feature Rung, Domain, Units, and Expressions" << std::endl;
for(int ff = 0; ff < _feats.size(); ++ff)
out_file_stream << std::setw(6) << std::left << "# " + std::to_string(ff) + ", " << std::to_string(_feats[ff]->rung()) + ", " << std::to_string(_feats[ff]->domain()) + ", " << std::setw(50) << _feats[ff]->unit().toString() + ", " << _feats[ff]->postfix_expr() + "," << _feats[ff]->expr() << std::endl;
out_file_stream << std::setw(6) << std::left << "# " + std::to_string(ff) + ", " << std::to_string(_feats[ff]->rung()) + ", " << _feats[ff]->domain().toString() + ", " << std::setw(50) << _feats[ff]->unit().toString() + ", " << _feats[ff]->postfix_expr() + "," << _feats[ff]->expr() << std::endl;
out_file_stream << "# Number of Samples Per Task" << std::endl;
if(train)
......
......@@ -367,7 +367,7 @@ void SISSOClassifier::l0_norm(std::vector<double>& prop, int n_dim)
for(int ii = 0; ii < n_dim; ++ii)
{
int index = all_inds_min[rr * n_dim + ii];
min_nodes[ii] = std::make_shared<ModelNode>(_feat_space->phi_selected()[index]->arr_ind(), _feat_space->phi_selected()[index]->rung(), _feat_space->phi_selected()[index]->expr(), _feat_space->phi_selected()[index]->postfix_expr(), _feat_space->phi_selected()[index]->value(), _feat_space->phi_selected()[index]->test_value(), _feat_space->phi_selected()[index]->unit());
min_nodes[ii] = std::make_shared<ModelNode>(_feat_space->phi_selected()[index]->arr_ind(), _feat_space->phi_selected()[index]->rung(), _feat_space->phi_selected()[index]->expr(), _feat_space->phi_selected()[index]->postfix_expr(), _feat_space->phi_selected()[index]->value(), _feat_space->phi_selected()[index]->test_value(), _feat_space->phi_selected()[index]->domain(), _feat_space->phi_selected()[index]->unit());
}
models.push_back(ModelClassifier(_prop_unit, _prop, _prop_test, min_nodes, _task_sizes_train, _task_sizes_test, _fix_intercept));
}
......
......@@ -137,7 +137,7 @@ void SISSORegressor::l0_norm(std::vector<double>& prop, int n_dim)
for(int ii = 0; ii < n_dim; ++ii)
{
int index = all_inds_min[inds[rr] * n_dim + ii];
min_nodes[ii] = std::make_shared<ModelNode>(_feat_space->phi_selected()[index]->arr_ind(), _feat_space->phi_selected()[index]->rung(), _feat_space->phi_selected()[index]->expr(), _feat_space->phi_selected()[index]->postfix_expr(), _feat_space->phi_selected()[index]->value(), _feat_space->phi_selected()[index]->test_value(), _feat_space->phi_selected()[index]->unit());
min_nodes[ii] = std::make_shared<ModelNode>(_feat_space->phi_selected()[index]->arr_ind(), _feat_space->phi_selected()[index]->rung(), _feat_space->phi_selected()[index]->expr(), _feat_space->phi_selected()[index]->postfix_expr(), _feat_space->phi_selected()[index]->value(), _feat_space->phi_selected()[index]->test_value(), _feat_space->phi_selected()[index]->domain(), _feat_space->phi_selected()[index]->unit());
}
models.push_back(ModelRegressor(_prop_unit, _prop, _prop_test, min_nodes, _task_sizes_train, _task_sizes_test, _fix_intercept));
}
......
......@@ -103,7 +103,7 @@ public:
std::map<std::string, std::vector<std::string>> allowed_param_ops,
std::vector<double> prop,
std::vector<int> task_sizes,
std::string project_type="pearson",
std::string project_type="regression",
int max_phi=1,
int n_sis_select=1,
int max_store_rung=-1,
......
......@@ -6,19 +6,8 @@ ModelNode::ModelNode()
ModelNode::ModelNode(int feat_ind, int rung, std::string expr, std::string post_fix_expr, std::vector<double> value, std::vector<double> test_value, Domain domain, Unit unit) :
FeatureNode(feat_ind, expr, value, test_value, domain, unit, false),
_expr_postfix(post_fix_expr),
_b_remap_svm(0.0),
_w_remap_svm(1.0),
_rung(rung)
{
double w_remap_svm_temp = 1.0 / (*std::max_element(_value.begin(), _value.end()) - *std::min_element(_value.begin(), _value.end()));
double b_remap_svm_temp = *std::min_element(_value.begin(), _value.end());
_w_remap_svm = w_remap_svm_temp;
_b_remap_svm = b_remap_svm_temp;
std::transform(_value.begin(), _value.end(), _value_svm.begin(), [w_remap_svm_temp, b_remap_svm_temp](double val){return (val - b_remap_svm_temp) * w_remap_svm_temp;});
std::transform(_test_value.begin(), _test_value.end(), _test_value_svm.begin(), [w_remap_svm_temp, b_remap_svm_temp](double val){return (val - b_remap_svm_temp) * w_remap_svm_temp;});
}
{}
ModelNode::~ModelNode()
{}
......
......@@ -320,7 +320,7 @@ void InputParser::generate_feature_space(std::shared_ptr<MPI_Interface> comm, st
}
phi_0.push_back(std::make_shared<FeatureNode>(ff, headers[ff], data[ff], test_data[ff], *domain_list[ff], units[ff]));
}
_feat_space = std::make_shared<FeatureSpace>(comm, phi_0, _opset, _param_opset, _prop_train, _task_sizes_train, _max_rung, _n_sis_select, _max_store_rung, _n_rung_generate, _cross_cor_max, _l_bound, _u_bound);
_feat_space = std::make_shared<FeatureSpace>(comm, phi_0, _opset, _param_opset, _prop_train, _task_sizes_train, _calc_type, _max_rung, _n_sis_select, _max_store_rung, _n_rung_generate, _cross_cor_max, _l_bound, _u_bound);
}
void stripComments(std::string& filename)
......
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment