Commit 62f9ac6a authored by Thomas Purcell's avatar Thomas Purcell
Browse files

Add functionality to setup central storage piecewise

So when inputs called then central storage is modified
parent 48858ad5
......@@ -50,50 +50,37 @@ std::vector<double> node_value_arrs::TEMP_STORAGE_TEST_ARR;
void node_value_arrs::initialize_values_arr(
const int n_samples,
const int n_samples_test,
const int n_primary_feat,
const int max_rung,
const bool set_task_sz,
const bool use_params
const int n_primary_feat
)
{
if(max_rung < 0)
{
throw std::logic_error("Maximum rung of the features is less than 0");
}
if(max_rung == 0)
{
std::cerr << "Warning requested calculation has a maximum rung of 0" << std::endl;
}
if(set_task_sz)
{
TASK_SZ_TRAIN = {n_samples};
TASK_START_TRAIN = {0};
TASK_SZ_TEST = {n_samples_test};
}
N_SAMPLES = n_samples;
N_SAMPLES_TEST = n_samples_test;
N_RUNGS_STORED = 0;
N_STORE_FEATURES = n_primary_feat;
N_PRIMARY_FEATURES = n_primary_feat;
MAX_RUNG = max_rung;
N_OP_SLOTS = 2 * (static_cast<int>(std::pow(2, max_rung)) - 1);
VALUES_ARR = std::vector<double>(N_STORE_FEATURES * N_SAMPLES);
TEST_VALUES_ARR = std::vector<double>(N_STORE_FEATURES * N_SAMPLES_TEST);
}
TEMP_STORAGE_ARR = std::vector<double>(MAX_N_THREADS * (N_OP_SLOTS * N_PRIMARY_FEATURES + 1) * N_SAMPLES);
TEMP_STORAGE_REG = std::vector<int>(MAX_N_THREADS * (N_OP_SLOTS * N_PRIMARY_FEATURES + 1), -1);
TEMP_STORAGE_TEST_ARR = std::vector<double>(MAX_N_THREADS * (N_OP_SLOTS * N_PRIMARY_FEATURES + 1) * N_SAMPLES_TEST);
TEMP_STORAGE_TEST_REG = std::vector<int>(MAX_N_THREADS * (N_OP_SLOTS * N_PRIMARY_FEATURES + 1), -1);
void node_value_arrs::initialize_values_arr(
const int n_samples,
const int n_samples_test,
const int n_primary_feat,
const int max_rung,
const bool set_task_sz,
const bool use_params
)
{
initialize_values_arr(n_samples, n_samples_test, n_primary_feat);
if(use_params)
if(set_task_sz)
{
initialize_param_storage();
set_task_sz_train({n_samples});
set_task_sz_test({n_samples_test});
}
set_max_rung(max_rung, use_params);
}
void node_value_arrs::initialize_values_arr(
......@@ -104,19 +91,16 @@ void node_value_arrs::initialize_values_arr(
const bool use_params
)
{
TASK_SZ_TRAIN = task_sz_train;
TASK_SZ_TEST = task_sz_test;
TASK_START_TRAIN = std::vector<int>(TASK_SZ_TRAIN.size(), 0);
std::copy_n(TASK_SZ_TRAIN.begin(), TASK_SZ_TRAIN.size() - 1, &TASK_START_TRAIN[1]);
initialize_values_arr(
std::accumulate(task_sz_train.begin(), task_sz_train.end(), 0),
std::accumulate(task_sz_test.begin(), task_sz_test.end(), 0),
n_primary_feat,
max_rung,
false,
use_params
n_primary_feat
);
set_task_sz_train(task_sz_train);
set_task_sz_test(task_sz_test);
set_max_rung(max_rung, use_params);
}
void node_value_arrs::initialize_param_storage()
......@@ -127,13 +111,42 @@ void node_value_arrs::initialize_param_storage()
PARAM_STORAGE_TEST_ARR = std::vector<double>(N_SAMPLES_TEST * (N_PARAM_OP_SLOTS + 1) * MAX_N_THREADS);
}
void node_value_arrs::set_max_rung(const int max_rung, bool use_params)
{
if(max_rung < 0)
{
throw std::logic_error("Maximum rung of the features is less than 0");
}
if(max_rung == 0)
{
std::cerr << "Warning requested calculation has a maximum rung of 0" << std::endl;
}
MAX_RUNG = max_rung;
N_OP_SLOTS = 2 * (static_cast<int>(std::pow(2, max_rung)) - 1);
TEMP_STORAGE_ARR = std::vector<double>(MAX_N_THREADS * (N_OP_SLOTS * N_PRIMARY_FEATURES + 1) * N_SAMPLES);
TEMP_STORAGE_REG = std::vector<int>(MAX_N_THREADS * (N_OP_SLOTS * N_PRIMARY_FEATURES + 1), -1);
TEMP_STORAGE_TEST_ARR = std::vector<double>(MAX_N_THREADS * (N_OP_SLOTS * N_PRIMARY_FEATURES + 1) * N_SAMPLES_TEST);
TEMP_STORAGE_TEST_REG = std::vector<int>(MAX_N_THREADS * (N_OP_SLOTS * N_PRIMARY_FEATURES + 1), -1);
if(use_params || (N_PARAM_OP_SLOTS > 0))
{
initialize_param_storage();
}
}
void node_value_arrs::set_task_sz_train(const std::vector<int> task_sz_train)
{
if(std::accumulate(task_sz_train.begin(), task_sz_train.end(), 0) != N_SAMPLES)
{
throw std::logic_error("The total number of samples has changed, task_sz_train is wrong.");
int n_samp_new = std::accumulate(task_sz_train.begin(), task_sz_train.end(), 0);
throw std::logic_error("The total number of samples has changed from " + std::to_string(N_SAMPLES) + " to " + std::to_string(n_samp_new) + ", task_sz_train is wrong.");
}
TASK_SZ_TRAIN = task_sz_train;
TASK_START_TRAIN = std::vector<int>(TASK_SZ_TRAIN.size(), 0);
std::copy_n(TASK_SZ_TRAIN.begin(), TASK_SZ_TRAIN.size() - 1, &TASK_START_TRAIN[1]);
}
......@@ -141,7 +154,8 @@ void node_value_arrs::set_task_sz_test(const std::vector<int> task_sz_test)
{
if(std::accumulate(task_sz_test.begin(), task_sz_test.end(), 0) != N_SAMPLES_TEST)
{
throw std::logic_error("The total number of test samples has changed, task_sz_test is wrong.");
int n_samp_new = std::accumulate(task_sz_test.begin(), task_sz_test.end(), 0);
throw std::logic_error("The total number of test samples has changed from " + std::to_string(N_SAMPLES_TEST) + " to " + std::to_string(n_samp_new) + ", task_sz_test is wrong.");
}
TASK_SZ_TEST = task_sz_test;
}
......
......@@ -71,6 +71,18 @@ namespace node_value_arrs
extern int MAX_N_THREADS; //!< Get the maximum number of threads possible for a calculation
extern int N_OP_SLOTS; //!< The number of possible nodes of the binary expression tree that maybe calculated on the fly
extern int N_PARAM_OP_SLOTS; //!< The number of possible non-leaf nodes of the binary expression tree
/**
* @brief Initialize all central storage vectors/descriptive variables without changing MAX_RUNG
*
* @param n_samples The number of training samples for each feature (Sum of all elements in TASK_SZ_TRAIN)
* @param n_samples_test The number of test samples for each feature (Sum of all elements in TASK_SZ_TEST)
* @param n_primary_feat The number of primary features
*/
void initialize_values_arr(
const int n_samples,
const int n_samples_test,
const int n_primary_feat
);
/**
* @brief Initialize all central storage vectors/descriptive variables
......@@ -170,6 +182,13 @@ namespace node_value_arrs
*/
void set_task_sz_test(const std::vector<int> task_sz_test);
/**
* @brief Set max_rung and initialize the temporary storage arrays
*
* @param max_rung The maximum rung for the calculation
*/
void set_max_rung(const int max_rung, bool use_params=false);
/**
* @brief Get the operator slot associated with a given rung/offset
*
......
......@@ -513,6 +513,7 @@ void InputParser::set_task_sizes_train(std::vector<int> task_sizes_train)
{
throw std::logic_error("The total number of samples in the updated task size vector is not the same as the number of samples ids for the training set.");
}
node_value_arrs::set_task_sz_train(_task_sizes_train);
}
void InputParser::set_task_sizes_test(std::vector<int> task_sizes_test)
......@@ -558,6 +559,7 @@ void InputParser::set_task_sizes_test(std::vector<int> task_sizes_test)
{
throw std::logic_error("The total number of samples in the updated task size vector is not the same as the number of samples ids for the test set.");
}
node_value_arrs::set_task_sz_test(_task_sizes_test);
}
void InputParser::set_task_names(std::vector<std::string> task_names)
......@@ -770,6 +772,14 @@ void InputParser::set_phi_0(std::vector<FeatureNode> phi_0)
throw std::logic_error("The total number of samples in the updated primary feature set is not the same as the number of samples ids for the test set.");
}
_phi_0 = phi_0;
node_value_arrs::initialize_values_arr(
_phi_0[0].n_samp(), _phi_0[0].n_samp_test(), _phi_0.size()
);
for(auto& feat : _phi_0)
{
feat.set_value();
feat.set_test_value();
}
}
void strip_comments(std::string& filename)
......
......@@ -603,7 +603,11 @@ public:
/**
* @brief Set Maximum rung for the feature creation
*/
inline void set_max_rung(const int max_rung) {_max_rung = max_rung;}
inline void set_max_rung(const int max_rung)
{
_max_rung = max_rung;
node_value_arrs::set_max_rung(max_rung, _allowed_param_ops.size() > 0);
}
// DocString: inputs_get_n_rung_store
/**
......
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment