diff --git a/src/feature_creation/feature_space/FeatureSpace.hpp b/src/feature_creation/feature_space/FeatureSpace.hpp
index 49648876e1ed98e082a05f527aafab53670db16a..d1d3ae670b28201ec929fdca65ae2523d458a58d 100644
--- a/src/feature_creation/feature_space/FeatureSpace.hpp
+++ b/src/feature_creation/feature_space/FeatureSpace.hpp
@@ -255,13 +255,13 @@ public:
 
     // DocString: feat_space_n_rung_store
     /**
-     * @brief The number of rungs whose feature training data is stored in memory
+     * @brief The number of rungs whose feature's data is always stored in memory
      */
     inline int n_rung_store() const {return _n_rung_store;}
 
     // DocString: feat_space_n_rung_generate
     /**
-     * @brief The number of rungs to be generated on the fly during SIS
+     * @brief Either 0 or 1, and is the number of rungs to generate on the fly during SIS
      */
     inline int n_rung_generate() const {return _n_rung_generate;}
 
@@ -346,18 +346,16 @@ public:
     void project_generated(std::shared_ptr<LossFunction> loss, std::vector<node_ptr>& phi_selected, std::vector<double>& scores_selected);
 
     /**
-     * @brief Perform SIS on a feature set with a specified property
-     * @details Perform sure-independence screening with either the correct property or the error
+     * @brief Perform Sure-Independence Screening over the FeatureSpace. The features are ranked using a projection operator constructed using _project_type and the Property vector
      *
-     * @param prop The property to perform SIS over
+     * @param prop Vector containing the property vector (training data only)
      */
     void sis(const std::vector<double>& prop);
 
     /**
-     * @brief Perform SIS on a feature set with a specified loss function
-     * @details Perform sure-independence screening with either the correct property or the error
+     * @brief Perform Sure-Independence Screening over the FeatureSpace. The features are ranked using a projection operator defined in loss
      *
-     * @param loss The LossFunction to project over
+     * @param loss The LossFunction used to project over all of the features
      */
     void sis(std::shared_ptr<LossFunction> loss);
 
@@ -365,7 +363,7 @@ public:
     /**
      * @brief Is a feature in this process' _phi?
      *
-     * @param ind The index of the feature
+     * @param ind (int) The index of the feature
      *
      * @return True if feature is in this rank's _phi
      */
@@ -385,23 +383,22 @@ public:
 
     // DocString: feat_space_init_py_list
     /**
-     * @brief Constructor for the feature space that takes in python objects
-     * @details constructs the feature space from an initial set of features and a list of allowed operators (cpp definition in <python/feature_creation/FeatureSpace.cpp>)
+     * @brief FeatureSpace constructor given a set of primary features and operators
      *
-     * @param phi_0 (list) The initial set of features to combine
-     * @param allowed_ops (list) list of allowed operators
-     * @param allowed_param_ops (list) dictionary of the parameterizable operators and their associated free parameters
-     * @param prop (list) The property to be learned (training data)
-     * @param project_type (str) The projection operator to use
-     * @param max_phi (int) highest rung value for the calculation
-     * @param n_sis_select (int) number of features to select during each SIS step
-     * @param max_store_rung (int) number of rungs to calculate and store the value of the features for all samples
-     * @param n_rung_generate (int) number of rungs to generate on the fly during SIS (this must be 1 or 0 right now, possible to be higher with recursive algorithm)
-     * @param cross_corr_max (double) Maximum cross-correlation used for selecting features
-     * @param min_abs_feat_val (double) minimum absolute feature value
-     * @param max_abs_feat_val (double) maximum absolute feature value
-     * @param max_param_depth (int) the maximum paremterization depths for features
-     * @param reparam_residual (bool) If True then reparameterize using the residuals of each model
+     * @param phi_0 (list) The set of primary features
+     * @param allowed_ops (list) The list of allowed operators
+     * @param allowed_param_ops (list) The list of allowed operators to be used with non-linear optimization
+     * @param prop (list) List containing the property vector (training data only)
+     * @param project_type (str) The type of loss function/projection operator to use
+     * @param max_phi (int) The maximum rung of the feature (Height of the binary expression tree -1)
+     * @param n_sis_select (int) The number of features to select during each SIS step
+     * @param max_store_rung (int) The number of rungs whose feature's data is always stored in memory
+     * @param n_rung_generate (int) Either 0 or 1, and is the number of rungs to generate on the fly during SIS
+     * @param cross_corr_max (double) The maximum allowed cross-correlation value between selected features
+     * @param min_abs_feat_val (double) The minimum allowed absolute feature value for a feature
+     * @param max_abs_feat_val (double) The maximum allowed absolute feature value for a feature
+     * @param max_param_depth (int) The maximum depth in the binary expression tree to set non-linear optimization
+     * @param reparam_residual (bool) If True then reparameterize features using the residuals of each model
      */
     FeatureSpace(
         py::list phi_0,
@@ -422,23 +419,22 @@ public:
 
     // DocString: feat_space_init_np_array
     /**
-     * @brief Constructor for the feature space that takes in python and numpy objects
-     * @details constructs the feature space from an initial set of features and a list of allowed operators (cpp definition in <python/feature_creation/FeatureSpace.cpp>)
+     * @brief FeatureSpace constructor given a set of primary features and operators
      *
-     * @param phi_0 (list) The initial set of features to combine
-     * @param allowed_ops (list) list of allowed operators
-     * @param allowed_param_ops (list) dictionary of the parameterizable operators and their associated free parameters
-     * @param prop (np.ndarray) The property to be learned (training data)
-     * @param project_type (str) The projection operator to use
-     * @param max_phi (int) highest rung value for the calculation
-     * @param n_sis_select (int) number of features to select during each SIS step
-     * @param max_store_rung (int) number of rungs to calculate and store the value of the features for all samples
-     * @param n_rung_generate (int) number of rungs to generate on the fly during SIS (this must be 1 or 0 right now, possible to be higher with recursive algorithm)
-     * @param cross_corr_max (double) Maximum cross-correlation used for selecting features
-     * @param min_abs_feat_val (double) minimum absolute feature value
-     * @param max_abs_feat_val (double) maximum absolute feature value
-     * @param max_param_depth (int) the maximum paremterization depths for features
-     * @param reparam_residual (bool) If True then reparameterize using the residuals of each model
+     * @param phi_0 (list) The set of primary features
+     * @param allowed_ops (list) The list of allowed operators
+     * @param allowed_param_ops (list) The list of allowed operators to be used with non-linear optimization
+     * @param prop (np.ndarray) List containing the property vector (training data only)
+     * @param project_type (str) The type of loss function/projection operator to use
+     * @param max_phi (int) The maximum rung of the feature (Height of the binary expression tree -1)
+     * @param n_sis_select (int) The number of features to select during each SIS step
+     * @param max_store_rung (int) The number of rungs whose feature's data is always stored in memory
+     * @param n_rung_generate (int) Either 0 or 1, and is the number of rungs to generate on the fly during SIS
+     * @param cross_corr_max (double) The maximum allowed cross-correlation value between selected features
+     * @param min_abs_feat_val (double) The minimum allowed absolute feature value for a feature
+     * @param max_abs_feat_val (double) The maximum allowed absolute feature value for a feature
+     * @param max_param_depth (int) The maximum depth in the binary expression tree to set non-linear optimization
+     * @param reparam_residual (bool) If True then reparameterize features using the residuals of each model
      */
     FeatureSpace(
         py::list phi_0,
@@ -461,20 +457,19 @@ public:
 
     // DocString: feat_space_ini_no_param_py_list
     /**
-     * @brief Constructor for the feature space that takes in python objects
-     * @details constructs the feature space from an initial set of features and a list of allowed operators (cpp definition in <python/feature_creation/FeatureSpace.cpp>)
+     * @brief FeatureSpace constructor given a set of primary features and operators
      *
-     * @param phi_0 (list) The initial set of features to combine
-     * @param allowed_ops (list) list of allowed operators
-     * @param prop (list) The property to be learned (training data)
-     * @param project_type (str) The projection operator to use
-     * @param max_phi (int) highest rung value for the calculation
-     * @param n_sis_select (int) number of features to select during each SIS step
-     * @param max_store_rung (int) number of rungs to calculate and store the value of the features for all samples
-     * @param n_rung_generate (int) number of rungs to generate on the fly during SIS (this must be 1 or 0 right now, possible to be higher with recursive algorithm)
-     * @param cross_corr_max (double) Maximum cross-correlation used for selecting features
-     * @param min_abs_feat_val (double) minimum absolute feature value
-     * @param max_abs_feat_val (double) maximum absolute feature value
+     * @param phi_0 (list) The set of primary features
+     * @param allowed_ops (list) The list of allowed operators
+     * @param prop (list) List containing the property vector (training data only)
+     * @param project_type (str) The type of loss function/projection operator to use
+     * @param max_phi (int) The maximum rung of the feature (Height of the binary expression tree -1)
+     * @param n_sis_select (int) The number of features to select during each SIS step
+     * @param max_store_rung (int) The number of rungs whose feature's data is always stored in memory
+     * @param n_rung_generate (int) Either 0 or 1, and is the number of rungs to generate on the fly during SIS
+     * @param cross_corr_max (double) The maximum allowed cross-correlation value between selected features
+     * @param min_abs_feat_val (double) The minimum allowed absolute feature value for a feature
+     * @param max_abs_feat_val (double) The maximum allowed absolute feature value for a feature
      */
     FeatureSpace(
         py::list phi_0,
@@ -492,20 +487,19 @@ public:
 
     // DocString: feat_space_init_no_param_np_array
     /**
-     * @brief Constructor for the feature space that takes in python and numpy objects
-     * @details constructs the feature space from an initial set of features and a list of allowed operators (cpp definition in <python/feature_creation/FeatureSpace.cpp>)
+     * @brief FeatureSpace constructor given a set of primary features and operators
      *
-     * @param phi_0 (list) The initial set of features to combine
-     * @param allowed_ops (list) list of allowed operators
-     * @param prop (np.ndarray) The property to be learned (training data)
-     * @param project_type (str) The projection operator to use
-     * @param max_phi (int) highest rung value for the calculation
-     * @param n_sis_select (int) number of features to select during each SIS step
-     * @param max_store_rung (int) number of rungs to calculate and store the value of the features for all samples
-     * @param n_rung_generate (int) number of rungs to generate on the fly during SIS (this must be 1 or 0 right now, possible to be higher with recursive algorithm)
-     * @param cross_corr_max (double) Maximum cross-correlation used for selecting features
-     * @param min_abs_feat_val (double) minimum absolute feature value
-     * @param max_abs_feat_val (double) maximum absolute feature value
+     * @param phi_0 (list) The set of primary features
+     * @param allowed_ops (list) The list of allowed operators
+     * @param prop (np.ndarray) List containing the property vector (training data only)
+     * @param project_type (str) The type of loss function/projection operator to use
+     * @param max_phi (int) The maximum rung of the feature (Height of the binary expression tree -1)
+     * @param n_sis_select (int) The number of features to select during each SIS step
+     * @param max_store_rung (int) The number of rungs whose feature's data is always stored in memory
+     * @param n_rung_generate (int) Either 0 or 1, and is the number of rungs to generate on the fly during SIS
+     * @param cross_corr_max (double) The maximum allowed cross-correlation value between selected features
+     * @param min_abs_feat_val (double) The minimum allowed absolute feature value for a feature
+     * @param max_abs_feat_val (double) The maximum allowed absolute feature value for a feature
      */
     FeatureSpace(
         py::list phi_0,
@@ -522,18 +516,17 @@ public:
     );
     #endif
 
-    // DocString: feat_space_init_file_py_list
+    // DocString: feat_space_init_file_np_array
     /**
-     * @brief Constructor for the feature space that takes in python and numpy objects
-     * @details constructs the feature space from an initial set of features and a file containing postfix expressions for the features (cpp definition in <python/feature_creation/FeatureSpace.cpp>)
+     * @brief FeatureSpace constructor that uses a file containing postfix feature expressions to describe all features in Phi, and a primary feature setn <python/feature_creation/FeatureSpace.cpp>)
      *
-     * @param feature_file (str) The file with the postfix expressions for the feature space
-     * @param phi_0 (list) The initial set of features to combine
-     * @param prop (np.ndarray) The property to be learned (training data)
-     * @param task_sizes (list) The number of samples per task
-     * @param project_type (str) The projection operator to use
-     * @param n_sis_select (int) number of features to select during each SIS step
-     * @param cross_corr_max (double) Maximum cross-correlation used for selecting features
+     * @param feature_file (str) The file containing the postfix expressions of all features in the FeatureSpace
+     * @param phi_0 (list) The set of primary features
+     * @param prop (np.ndarray) List containing the property vector (training data only)
+     * @param task_sizes (list) The number of samples in the training data per task
+     * @param project_type (str) The type of loss function/projection operator to use
+     * @param n_sis_select (int) The number of features to select during each SIS step
+     * @param cross_corr_max (double) The maximum allowed cross-correlation value between selected features
      */
     FeatureSpace(
         std::string feature_file,
@@ -545,18 +538,17 @@ public:
         double cross_corr_max=1.0
     );
 
-    // DocString: feat_space_init_file_np_array
+    // DocString: feat_space_init_file_py_list
     /**
-     * @brief Constructor for the feature space that takes in python and numpy objects
-     * @details constructs the feature space from an initial set of features and a file containing postfix expressions for the features (cpp definition in <python/feature_creation/FeatureSpace.cpp>)
+     * @brief FeatureSpace constructor that uses a file containing postfix feature expressions to describe all features in Phi, and a primary feature setn <python/feature_creation/FeatureSpace.cpp>)
      *
-     * @param feature_file (str) The file with the postfix expressions for the feature space
-     * @param phi_0 (list) The initial set of features to combine
-     * @param prop (list) The property to be learned (training data)
-     * @param task_sizes (list) The number of samples per task
-     * @param project_type (str) The projection operator to use
-     * @param n_sis_select (int) number of features to select during each SIS step
-     * @param cross_corr_max (double) Maximum cross-correlation used for selecting features
+     * @param feature_file (str) The file containing the postfix expressions of all features in the FeatureSpace
+     * @param phi_0 (list) The set of primary features
+     * @param prop (list) List containing the property vector (training data only)
+     * @param task_sizes (list) The number of samples in the training data per task
+     * @param project_type (str) The type of loss function/projection operator to use
+     * @param n_sis_select (int) The number of features to select during each SIS step
+     * @param cross_corr_max (double) The maximum allowed cross-correlation value between selected features
      */
     FeatureSpace(
         std::string feature_file,
@@ -570,9 +562,9 @@ public:
 
     // DocString: feat_space_sis_arr
     /**
-     * @brief Wrapper function for SIS using a numpy array
+     * @brief Perform Sure-Independence Screening over the FeatureSpace. The features are ranked using a projection operator constructed using _project_type and the Property vector
      *
-     * @param prop(np.ndarray) The property to perform SIS over as a numpy array
+     * @param prop (np.ndarray) Array containing the property vector (training data only)
      */
     inline void sis(np::ndarray prop)
     {
@@ -582,9 +574,9 @@ public:
 
     // DocString: feat_space_sis_list
     /**
-     * @brief Wrapper function for SIS using a python list
+     * @brief Perform Sure-Independence Screening over the FeatureSpace. The features are ranked using a projection operator constructed using _project_type and the Property vector
      *
-     * @param prop(list) The property to perform SIS over as a python list
+     * @param prop (list) List containing the property vector (training data only)
      */
     inline void sis(py::list prop)
     {
@@ -594,58 +586,51 @@ public:
 
     // DocString: feat_space_phi_selected_py
     /**
-     * @brief The selected feature space (cpp definition in <python/feature_creation/FeatureSpace.cpp>)
-     * @return _phi_selected as a python list
+     * @brief A list of all of the selected features
      */
     py::list phi_selected_py();
 
     // DocString: feat_space_phi0_py
     /**
-     * @brief The initial feature space (cpp definition in <python/feature_creation/FeatureSpace.cpp>)
-     * @return _phi0 as a python list
+     * @brief A list containing all of the primary features
      */
     py::list phi0_py();
 
     // DocString: feat_space_phi_py
     /**
-     * @brief The feature space (cpp definition in <python/feature_creation/FeatureSpace.cpp>)
-     * @return _phi as a python list
+     * @brief A list of all features in the FeatureSpace
      */
     py::list phi_py();
 
     // DocString: feat_space_scores_py
     /**
-     * @brief The vector of projection scores for SIS
-     * @return _scores as a numpy array
+     * @brief An array of all stored projection scores from SIS
      */
     inline np::ndarray scores_py(){return python_conv_utils::to_ndarray<double>(_scores);};
 
     // DocString: feat_space_task_sizes_py
     /**
-     * @brief The vector storing the number of samples in each task
-     * @return _task_sizes as a python list
+     * @brief A list of the number of samples in each task for the training data
      */
     inline py::list task_sizes_py(){return python_conv_utils::to_list<int>(_task_sizes);};
 
     // DocString: feat_space_allowed_ops_py
     /**
-     * @brief The list of allowed operator nodes
-     * @return _allowed_ops as a python list
+     * @brief The list of allowed operators
      */
     inline py::list allowed_ops_py(){return python_conv_utils::to_list<std::string>(_allowed_ops);}
 
     // DocString: feat_space_start_gen_py
     /**
-     * @brief The index in _phi where each generation starts
-     * @return _start_gen as a python list
+     * @brief A list containing the index of the first feature of each rung in the feature space.
      */
     inline py::list start_gen_py(){return python_conv_utils::to_list<int>(_start_gen);}
 
     // DocString: feat_space_get_feature
     /**
-     * @brief Return a feature at a specified index
+     * @brief Access the feature in _phi with an index ind
      *
-     * @param ind index of the feature to get
+     * @param ind (int) The index of the feature to get
      * @return A ModelNode of the feature at index ind
      */
     inline ModelNode get_feature(const int ind) const {return ModelNode(_phi[ind]);}
diff --git a/src/feature_creation/node/FeatureNode.hpp b/src/feature_creation/node/FeatureNode.hpp
index 0c1e8505046a53f95e328f5d576ba184b4025a9a..cd62424fe17ccfd757c7a9b9a68f5711485d94f4 100644
--- a/src/feature_creation/node/FeatureNode.hpp
+++ b/src/feature_creation/node/FeatureNode.hpp
@@ -74,27 +74,27 @@ public:
         const bool set_val=true
     );
     #ifdef PY_BINDINGS
-    // DocString: feat_node_init_list
+    // DocString: feat_node_init_arr
     /**
-     * @brief Constructs a feature node using numpy arrays (cpp definition in "python/feature_creation/FeatureNode.cpp)
+     * @brief Constructs a feature node using numpy arrays (cpp definition in python/feature_creation/FeatureNode.cpp)
      *
-     * @param feat_ind index of the feature
-     * @param expr Expression for the feature
-     * @param value Value of the feature for each sample
-     * @param test_value Value of the feature for each test sample
-     * @param unit Unit of the feature
+     * @param feat_ind (int) index of the feature
+     * @param expr (str) Expression for the feature
+     * @param value (np.ndarray) Value of the feature for each sample
+     * @param test_value (np.ndarray) Value of the feature for each test sample
+     * @param unit (Unit) Unit of the feature
      */
     FeatureNode(unsigned long int feat_ind, std::string expr, np::ndarray value, np::ndarray test_value, Unit unit);
 
-    // DocString: feat_node_init_arr
+    // DocString: feat_node_init_list
     /**
-     * @brief Constructs a feature node using Python lists (cpp definition in "python/feature_creation/FeatureNode.cpp)
+     * @brief Constructs a feature node using Python lists (cpp definition in python/feature_creation/FeatureNode.cpp)
      *
-     * @param feat_ind index of the feature
-     * @param expr Expression for the feature
-     * @param value Value of the feature for each sample
-     * @param test_value Value of the feature for each test sample
-     * @param unit Unit of the feature
+     * @param feat_ind (int) index of the feature
+     * @param expr (str) Expression for the feature
+     * @param value (list) Value of the feature for each sample
+     * @param test_value (list) Value of the feature for each test sample
+     * @param unit (Unit) Unit of the feature
      */
     FeatureNode(unsigned long int feat_ind, std::string expr, py::list value, py::list test_value, Unit unit);
     #endif
@@ -147,17 +147,13 @@ public:
 
     // DocString: feat_node_x_in_expr
     /**
-     * @brief Get the list of feature expressions
-     * @return vector storing the expressions for all primary features that show up in feature in the order they appear in the postfix notation
+     * @brief A vector storing the expressions for all primary features in the order they appear in the postfix expression
      */
     virtual inline std::vector<std::string> get_x_in_expr_list() const {return std::vector<std::string>(1, _expr);}
 
     // DocString: feat_node_n_leaves
     /**
-     * @brief return the number of leaves in the feature
-     *
-     * @param cur_n_leaves The current number of primary features that are inside the feature
-     * @return total number of leaves of the binary expression tree
+     * @brief Get the number of primary features (non-unique) this feature contains (The number of leaves of the Binary Expression Tree)
      */
     virtual inline int n_leaves(int cur_n_leaves = 0) const {return cur_n_leaves + 1;}
 
@@ -171,13 +167,13 @@ public:
 
     // DocString: feat_node_expr_const
     /**
-     * @brief Get the string expression used to represent the primary feature
+     * @brief Get the expression for the feature (From root node down)
      */
     inline std::string expr()const{return _expr;}
 
     // DocString: feat_node_latex_expr
     /**
-     * @brief Get the latexified expression for the overall feature (From root node down)
+     * @brief Get the valid LaTeX expression that represents the feature
      */
     inline std::string get_latex_expr() const {return str_utils::latexify(_expr);}
 
@@ -188,21 +184,23 @@ public:
     inline Unit unit() const {return _unit;}
 
     /**
-     * @brief Get the training data of the feature
+     * @brief An array containing the values of the training set samples for the feature
+
      */
     inline std::vector<double> value() const {return _value;}
 
     /**
-     * @brief Get the test data for the feature
+     * @brief An array containing the values of the test set samples for the feature
+
      */
     inline std::vector<double> test_value() const {return _test_value;}
 
     // DocString: feat_node_set_value
     /**
-     * @brief Set the values of the training data for the feature inside of the value storage arrays
+     * @brief Set the value of all training samples for the feature inside the central data storage array
      *
-     * @param offset(int) Key to determine which part of the temporary storage array to look into
-     * @param for_comp(bool) If true then the evaluation is for comparing features
+     * @param offset (int) Where the current node is in the binary expression tree relative to other nodes at the same depth
+     * @param for_comp(bool) If true then the evaluation is used for comparing features
      */
     inline void set_value(int offset=-1, const bool for_comp=false) const
     {
@@ -211,10 +209,10 @@ public:
 
     // DocString: feat_node_set_test_value
     /**
-     * @brief Set the values of the test data for the feature inside of the value storage arrays
+     * @brief Set the value of all test samples for the feature inside the central data storage array
      *
-     * @param offset(int) Key to determine which part of the temporary storage array to look into
-     * @param for_comp(bool) If true then the evaluation is for comparing features
+     * @param offset (int) Where the current node is in the binary expression tree relative to other nodes at the same depth
+     * @param for_comp(bool) If true then the evaluation is used for comparing features
      */
     inline void set_test_value(int offset=-1, const bool for_comp=false) const
     {
@@ -226,7 +224,9 @@ public:
 
     // DocString: feat_node_is_nan
     /**
-     * @brief Check if the feature contains NaN
+     * @brief Check if the feature has a NaN value in it
+     *
+     * @return True if one of the values of the feature is a NaN
      */
     inline bool is_nan() const
     {
@@ -235,7 +235,9 @@ public:
 
     // DocString: feat_node_is_const
     /**
-     * @brief Check if feature is constant
+     * @brief Check if feature is constant for one of the tasks
+     *
+     * @return True if the feature is constant for all samples in any task
      */
     bool is_const() const;
 
@@ -247,8 +249,8 @@ public:
     /**
      * @brief The pointer to where the feature's training data is stored
      *
-     * @param offset(int) Key to determine which part of the temporary storage array to look into
-     * @param for_comp(bool) If true then the evaluation is for comparing features
+     * @param offset (int) Where the current node is in the binary expression tree relative to other nodes at the same depth
+     * @param for_comp (bool) If true then the evaluation is used for comparing features
      */
     virtual inline double* value_ptr(int offset=-1, const bool for_comp=false) const
     {
@@ -258,8 +260,8 @@ public:
     /**
      * @brief The pointer to where the feature's test data is stored
      *
-     * @param offset(int) Key to determine which part of the temporary storage array to look into
-     * @param for_comp(bool) If true then the evaluation is for comparing features
+     * @param offset (int) Where the current node is in the binary expression tree relative to other nodes at the same depth
+     * @param for_comp (bool) If true then the evaluation is used for comparing features
      */
     virtual inline double* test_value_ptr(int offset=-1, const bool for_comp=false) const
     {
@@ -268,9 +270,8 @@ public:
 
     // DocString: feat_node_rung
     /**
-     * @brief return the rung of the feature
+     * @brief return the rung of the feature (Height of the binary expression tree + 1)
      *
-     * @param cur_rung The rung current rung of the feature tree (used to recursively calculate rung)
      */
     inline int rung(int cur_rung = 0) const {return cur_rung;}
 
@@ -318,18 +319,17 @@ public:
         return str_utils::matlabify(_expr);
     }
 
-    //DocString: feat_node_nfeats
+    // DocString: feat_node_nfeats
     /**
      * @brief Number of features used for an operator node
-     * @return the number of features for an operator node
      */
     inline int n_feats() const {return 0;}
 
-    //DocString: feat_node_feat
+    // DocString: feat_node_feat
     /**
      * @brief Return the ind node_ptr in the operator node's feat list
      *
-     * @param ind the index of the node to access
+     * @param ind (int) the index of the node to access
      * @return the ind feature in feature_list
      */
     inline node_ptr feat(const int ind) const
@@ -379,7 +379,7 @@ public:
      */
     inline void set_parameters(const double* params){};
 
-    //DocString: feat_node_get_params
+    // DocString: feat_node_get_params
     /**
      * @brief Solve the non-linear optimization to set the parameters
      * @details Fits the data points from _feats->value_ptr and prop to get the parameters for the feature
@@ -392,49 +392,49 @@ public:
      * @brief returns the number of parameters for this feature
      *
      * @param n_cur the current number of parameters for the feature
-     * @param depth the current depth of the node on the Binary expression tree
+     * @param depth the current depth of the node on the binary expression tree
      * @return the number of parameters (_params.size())
      */
     inline int n_params(int n_cur=0, int depth = 1) const {return n_cur;};
 
     /**
-     * @brief Set the values of the training data for the feature inside of the value storage arrays
+     * @brief Set the value of all test samples for the feature inside the central data storage array
      *
-     * @param offset(int) Key to determine which part of the temporary storage array to look into
-     * @param params pointer to the parameter values
-     * @param for_comp(bool) If true then the evaluation is for comparing features
-     * @param depth the current depth of the node on the Binary expression tree
+     * @param offset (int) Where the current node is in the binary expression tree relative to other nodes at the same depth
+     * @param params pointer to the non-linear parameter values
+     * @param for_comp(bool) If true then the evaluation is used for comparing features
+     * @param depth the current depth of the node on the binary expression tree
      */
     inline void set_value(const double* params, int offset=-1, const bool for_comp=false, const int depth=1) const {set_value(offset);};
 
     /**
      * @brief The pointer to where the feature's training data is stored
      *
-     * @param offset(int) Key to determine which part of the temporary storage array to look into
-     * @param params pointer to the parameter values
-     * @param for_comp(bool) If true then the evaluation is for comparing features
-     * @param depth the current depth of the node on the Binary expression tree
+     * @param offset (int) Where the current node is in the binary expression tree relative to other nodes at the same depth
+     * @param params pointer to the non-linear parameter values
+     * @param for_comp (bool) If true then the evaluation is used for comparing features
+     * @param depth the current depth of the node on the binary expression tree
      * @returns the pointer to the feature's data
      */
     inline double* value_ptr(const double* params, int offset=-1, const bool for_comp=false, const int depth=1) const {return value_ptr(offset);};
 
     /**
-     * @brief Set the values of the test data for the feature inside of the value storage arrays
+     * @brief Set the value of all test samples for the feature inside the central data storage array
      *
-     * @param offset(int) Key to determine which part of the temporary storage array to look into
-     * @param params pointer to the parameter values
-     * @param for_comp(bool) If true then the evaluation is for comparing features
-     * @param depth the current depth of the node on the Binary expression tree
+     * @param offset (int) Where the current node is in the binary expression tree relative to other nodes at the same depth
+     * @param params pointer to the non-linear parameter values
+     * @param for_comp (bool) If true then the evaluation is used for comparing features
+     * @param depth the current depth of the node on the binary expression tree
      */
     inline void set_test_value(const double* params, int offset=-1, const bool for_comp=false, const int depth=1) const {set_test_value(offset);};
 
     /**
      * @brief The pointer to where the feature's test data is stored
      *
-     * @param offset(int) Key to determine which part of the temporary storage array to look into
-     * @param params pointer to the parameter values
-     * @param for_comp(bool) If true then the evaluation is for comparing features
-     * @param depth the current depth of the node on the Binary expression tree
+     * @param offset (int) Where the current node is in the binary expression tree relative to other nodes at the same depth
+     * @param params pointer to the non-linear parameter values
+     * @param for_comp (bool) If true then the evaluation is used for comparing features
+     * @param depth the current depth of the node on the binary expression tree
      * @returns the pointer to the feature's data
      */
     inline double* test_value_ptr(const double* params, int offset=-1, const bool for_comp=false, const int depth=1) const {return test_value_ptr(offset);};
@@ -443,16 +443,16 @@ public:
      * @brief The expression of the feature
      *
      * @param params parameter values for non-linear operations
-     * @param depth the current depth of the node on the Binary expression tree
+     * @param depth the current depth of the node on the binary expression tree
      * @return feature expression
      */
     inline std::string expr(const double* params, const int depth = 1) const {return _expr;};
 
     /**
-     * @brief Get the latexified expression for the overall feature (From root node down)
+     * @brief Get the valid LaTeX expression that represents the feature
      *
      * @param params parameter values for non-linear operations
-     * @param depth the current depth of the node on the Binary expression tree
+     * @param depth the current depth of the node on the binary expression tree
      * @return the latexified expression
      */
     inline std::string get_latex_expr(const double* params, const int depth=1) const {return str_utils::latexify(_expr);}
@@ -462,7 +462,7 @@ public:
      * @brief Get the string that corresponds to the code needed to evaluate the node in matlab
      *
      * @param params parameter values for non-linear operations
-     * @param depth the current depth of the node on the Binary expression tree
+     * @param depth the current depth of the node on the binary expression tree
      * @return The matlab code for the feature
      */
     virtual inline std::string matlab_fxn_expr(const double* params, const int depth=1) const
@@ -476,7 +476,7 @@ public:
      * @param lb pointer to the lower bounds data
      * @param ub pointer to the upper bounds data
      * @param from_parent How many parameters are between the start of this node's parameters and its parent
-     * @param depth the current depth of the node on the Binary expression tree
+     * @param depth the current depth of the node on the binary expression tree
      */
     inline void set_bounds(double* lb, double* ub, const int depth=1) const {};
 
@@ -484,7 +484,7 @@ public:
      * @brief Set the bounds for the nl parameterization
      *
      * @param params pointer to the parameters vector
-     * @param depth the current depth of the node on the Binary expression tree
+     * @param depth the current depth of the node on the binary expression tree
      */
     virtual void initialize_params(double* params, const int depth = 1) const {};
 
diff --git a/src/feature_creation/node/ModelNode.hpp b/src/feature_creation/node/ModelNode.hpp
index 914496d95dfeed5569e67557282643040011bae6..33419865db2c51283c5dcac8363ea862c5c2fa9a 100644
--- a/src/feature_creation/node/ModelNode.hpp
+++ b/src/feature_creation/node/ModelNode.hpp
@@ -76,7 +76,7 @@ public:
      * @param matlab_fxn_expr The code to evaluate the feature in matlab
      * @param value Value of the feature for each sample
      * @param test_value Value of the feature for each test sample
-     * @param x_in_expr_list //!< vector storing the expressions for all primary features that show up in feature in the order they appear in the postfix notation
+     * @param x_in_expr_list vector storing the expressions for all primary features that show up in feature in the order they appear in the postfix notation
      * @param unit Unit of the feature
      */
     ModelNode(
@@ -96,7 +96,7 @@ public:
     /**
      * @brief Copy constructor from general node_ptr
      *
-     * @param in_node Node to be copied
+     * @param in_node (Node) Node to be copied
      */
     ModelNode(node_ptr in_node);
 
@@ -193,17 +193,14 @@ public:
 
     // DocString: model_node_x_in_expr
     /**
-     * @brief get the list of feature expressions for each of the leaves
-     * @return Vector containing the expression for each leaf in the binary expression tree
+     * @brief A vector storing the expressions for all primary features in the order they appear in the postfix expression
      */
     inline std::vector<std::string> get_x_in_expr_list() const {return _x_in_expr_list;}
 
     // DocString: model_node_n_leaves
     /**
-     * @brief return the number of leaves in the feature
+     * @brief The number of primary features (non-unique) this feature contains (The number of leaves of the Binary Expression Tree)
      *
-     * @param cur_n_leaves The current number of primary features that are inside the feature
-     * @return total number of leaves of the binary expression tree
      */
     inline int n_leaves(int n_cur_leaves = 0) const {return _n_leaves;}
 
@@ -239,31 +236,35 @@ public:
 
     // DocString: model_node_set_value
     /**
-     * @brief Set the values of the training data for the feature inside of the value storage arrays
+     * @brief Set the value of all training samples for the feature inside the central data storage array
      *
-     * @param offset(int) Key to determine which part of the temporary storage array to look into
-     * @param for_comp(bool) If true then the evaluation is for comparing features
+     * @param offset (int) Where the current node is in the binary expression tree relative to other nodes at the same depth
+     * @param for_comp(bool) If true then the evaluation is used for comparing features
      */
     inline void set_value(int offset=-1, const bool for_comp=false) const {return;}
 
     // DocString: model_node_set_test_value
     /**
-     * @brief Set the values of the test data for the feature inside of the value storage arrays
+     * @brief Set the value of all test samples for the feature inside the central data storage array
      *
-     * @param offset(int) Key to determine which part of the temporary storage array to look into
-     * @param for_comp(bool) If true then the evaluation is for comparing features
+     * @param offset (int) Where the current node is in the binary expression tree relative to other nodes at the same depth
+     * @param for_comp(bool) If true then the evaluation is used for comparing features
      */
     inline void set_test_value(int offset=-1, const bool for_comp=false) const {return;}
 
     // DocString: model_node_is_nan
     /**
-     * @brief Check if the feature contains NaN
+     * @brief Check if the feature has a NaN value in it
+     *
+     * @return True if one of the values of the feature is a NaN
      */
     inline bool is_nan() const {return false;}
 
     // DocString: model_node_is_const
     /**
-     * @brief Check if feature is constant
+     * @brief Check if feature is constant for one of the tasks
+     *
+     * @return True if the feature is constant for all samples in any task
      */
     inline bool is_const() const {return false;}
 
@@ -275,24 +276,24 @@ public:
     /**
      * @brief The pointer to where the feature's training data is stored
      *
-     * @param offset(int) Key to determine which part of the temporary storage array to look into
-     * @param for_comp(bool) If true then the evaluation is for comparing features
+     * @param offset (int) Where the current node is in the binary expression tree relative to other nodes at the same depth
+     * @param for_comp (bool) If true then the evaluation is used for comparing features
      */
     inline double* value_ptr(int offset=-1, const bool for_comp=false){return _value.data();}
 
     /**
      * @brief The pointer to where the feature's test data is stored
      *
-     * @param offset(int) Key to determine which part of the temporary storage array to look into
-     * @param for_comp(bool) If true then the evaluation is for comparing features
+     * @param offset (int) Where the current node is in the binary expression tree relative to other nodes at the same depth
+     * @param for_comp (bool) If true then the evaluation is used for comparing features
      */
     inline double* test_value_ptr(int offset=-1, const bool for_comp=false){return _test_value.data();}
 
     /**
      * @brief The pointer to where the feature's training data is stored
      *
-     * @param offset(int) Key to determine which part of the temporary storage array to look into
-     * @param for_comp(bool) If true then the evaluation is for comparing features
+     * @param offset (int) Where the current node is in the binary expression tree relative to other nodes at the same depth
+     * @param for_comp (bool) If true then the evaluation is used for comparing features
      */
     inline double* value_ptr(int offset=-1, const bool for_comp=false) const
     {
@@ -303,8 +304,8 @@ public:
     /**
      * @brief The pointer to where the feature's test data is stored
      *
-     * @param offset(int) Key to determine which part of the temporary storage array to look into
-     * @param for_comp(bool) If true then the evaluation is for comparing features
+     * @param offset (int) Where the current node is in the binary expression tree relative to other nodes at the same depth
+     * @param for_comp (bool) If true then the evaluation is used for comparing features
      */
     inline double* test_value_ptr(int offset=-1, const bool for_comp=false) const
     {
@@ -314,9 +315,8 @@ public:
 
     // DocString: model_node_rung
     /**
-     * @brief return the rung of the feature
+     * @brief return the rung of the feature (Height of the binary expression tree + 1)
      *
-     * @param cur_rung(int) The rung current rung of the feature tree (used to recursively calculate rung)
      */
     inline int rung(int cur_rung = 0) const {return _rung;}
 
@@ -341,7 +341,7 @@ public:
 
     // DocString: model_node_latex_expr
     /**
-     * @brief Get the latexified expression for the overall feature (From root node down)
+     * @brief Get the valid LaTeX expression that represents the feature
      */
     inline std::string get_latex_expr() const {return _latex_expr.substr(1, _latex_expr.size() - 2);}
 
@@ -376,11 +376,65 @@ public:
     void update_div_mult_leaves(std::map<std::string, double>& div_mult_leaves, const double fact, double& expected_abs_tot) const;
 
     #ifdef PY_BINDINGS
+    // DocString: model_node_init_list
+    /**
+     * @brief Constructs a feature node
+     *
+     * @param feat_ind (int) index of the feature
+     * @param rung (int) the rung of the feature
+     * @param expr (str) Expression for the feature
+     * @param latex_expr (str) The LaTeXified expression for the feature
+     * @param matlab_fxn_expr (str) The code to evaluate the feature in matlab
+     * @param value (list) Value of the feature for each sample
+     * @param test_value (list) Value of the feature for each test sample
+     * @param x_in_expr_list (list) //!< vector storing the expressions for all primary features that show up in feature in the order they appear in the postfix notation
+     * @param unit (Unit) Unit of the feature
+     */
+    ModelNode(
+        const unsigned long int feat_ind,
+        const unsigned long int rung,
+        const std::string expr,
+        const std::string latex_expr,
+        const std::string expr_postfix,
+        const std::string matlab_fxn_expr,
+        const py::list value,
+        const py::list test_value,
+        const py::list x_in_expr_list,
+        const Unit unit
+    );
+
+    // DocString: model_node_init_arr
+    /**
+     * @brief Constructs a feature node
+     *
+     * @param feat_ind (int) index of the feature
+     * @param rung (int) the rung of the feature
+     * @param expr (str) Expression for the feature
+     * @param latex_expr (str) The LaTeXified expression for the feature
+     * @param matlab_fxn_expr (str) The code to evaluate the feature in matlab
+     * @param value (np.ndarray) Value of the feature for each sample
+     * @param test_value (np.ndarray) Value of the feature for each test sample
+     * @param x_in_expr_list (list) //!< vector storing the expressions for all primary features that show up in feature in the order they appear in the postfix notation
+     * @param unit (Unit) Unit of the feature
+     */
+    ModelNode(
+        const unsigned long int feat_ind,
+        const unsigned long int rung,
+        const std::string expr,
+        const std::string latex_expr,
+        const std::string expr_postfix,
+        const std::string matlab_fxn_expr,
+        const np::ndarray value,
+        const np::ndarray test_value,
+        const py::list x_in_expr_list,
+        const Unit unit
+    );
+
     // DocString: model_node_eval_arr
     /**
      * @brief Evaluate the model for a new point
      *
-     * @param x_in The data point to evaluate the model (order the same as appending the results of _feats[nn]->get_x_in_expr_list() for all feature)
+     * @param x_in (np.ndarray) The data point to evaluate the model (order the same as appending the results of _feats[nn]->get_x_in_expr_list() for all feature)
      * @return The prediction of the model for a given data point
      */
     inline double eval_py(np::ndarray x_in){return eval(python_conv_utils::from_ndarray<double>(x_in));}
@@ -389,7 +443,7 @@ public:
     /**
      * @brief Evaluate the model for a new point
      *
-     * @param x_in The data point to evaluate the model (order the same as appending the results of _feats[nn]->get_x_in_expr_list() for all feature)
+     * @param x_in (list) The data point to evaluate the model (order the same as appending the results of _feats[nn]->get_x_in_expr_list() for all feature)
      * @return The prediction of the model for a given data point
      */
     inline double eval_py(py::list x_in){return eval(python_conv_utils::from_list<double>(x_in));}
@@ -398,7 +452,7 @@ public:
     /**
      * @brief Evaluate the model for a new point
      *
-     * @param x_in_dct Dictionary describing the new point (\"feature expr\": value)
+     * @param x_in (dict) Dictionary describing the new point (\"feature expr\": value)
      * @return The prediction of the model for a given data point
      */
     inline double eval_py(py::dict x_in){return eval(python_conv_utils::from_dict<std::string, double>(x_in));}
@@ -407,7 +461,7 @@ public:
     /**
      * @brief Evaluate the model for a set of new points
      *
-     * @param x_in The set data for a set of new data points (size of n_feature x n_points, and order the same as appending the results of _feats[nn]->get_x_in_expr_list() for all feature)
+     * @param x_in (np.ndarray) The set data for a set of new data points (size of n_feature x n_points, and order the same as appending the results of _feats[nn]->get_x_in_expr_list() for all feature)
      * @return The prediction of the model for a given data point
      */
     np::ndarray eval_many_py(np::ndarray x_in);
@@ -416,7 +470,7 @@ public:
     /**
      * @brief Evaluate the model for a set of new points
      *
-     * @param x_in_dct The set of data points to evaluate the model. Keys must be strings representing feature expressions and vectors must be the same length
+     * @param x_in (dict) The set of data points to evaluate the model. Keys must be strings representing feature expressions and vectors must be the same length
      * @return The prediction of the model for a given data point
      */
     np::ndarray eval_many_py(py::dict x_in);
@@ -434,7 +488,7 @@ public:
      * @brief Get the string that corresponds to the code needed to evaluate the node in matlab
      *
      * @param params parameter values for non-linear operations
-     * @param depth the current depth of the node on the Binary expression tree
+     * @param depth the current depth of the node on the binary expression tree
      * @return The matlab code for the feature
      */
     inline std::string matlab_fxn_expr(const double* params, const int depth=1) const
diff --git a/src/feature_creation/node/Node.hpp b/src/feature_creation/node/Node.hpp
index 87bc48dc280ae433e4b05e8ceb818ff01bb4533a..037b3d31a92fb0b32bdcea121d71280422b8d480 100644
--- a/src/feature_creation/node/Node.hpp
+++ b/src/feature_creation/node/Node.hpp
@@ -126,35 +126,33 @@ public:
 
     // DocString: node_x_in_expr
     /**
-     * @brief Get the list of feature expressions
-     * @return vector storing the expressions for all primary features that show up in feature in the order they appear in the postfix notation
+     * @brief A vector storing the expressions for all primary features in the order they appear in the postfix expression
      */
     virtual std::vector<std::string> get_x_in_expr_list() const = 0;
 
     // DocString: node_n_leaves
     /**
-     * @brief return the number of leaves in the feature
+     * @brief The number of primary features (non-unique) this feature contains (The number of leaves of the Binary Expression Tree)
      *
-     * @param cur_n_leaves The current number of primary features that are inside the feature
-     * @return total number of leaves of the binary expression tree
+     * @param cur_n_leaves (int) A recurisve counting variable
+     * @return The number of leaves of the Binary Expression Tree
      */
     virtual int n_leaves(const int cur_n_leaves = 0) const = 0;
 
     // DocString: node_reindex_1
     /**
-     * @brief Reindex the feature
-     * @details re-index the feature to be continuous
+     * @brief Reset _feat_ind and _arr_ind to ind
      *
-     * @param ind(int) the new feature and array index
+     * @param ind (int) the new value of _feat_ind and _arr_ind
      */
     inline void reindex(const unsigned long int ind){_feat_ind = ind; _arr_ind = ind;}
 
     // DocString: node_reindex_2
     /**
-     * @brief Reindex the feature
-     * @details re-index the feature to be continuous
+     * @brief Reset _feat_ind and _arr_ind to feat_ind and arr_ind, respectively
      *
-     * @param ind(int) the new feature and array index
+     * @param feat_ind (int) the new value of _feat_ind
+     * @param arr_ind (int) the new value of _arr_ind
      */
     inline void reindex(const unsigned long int feat_ind, const unsigned long int arr_ind){_feat_ind = feat_ind; _arr_ind = arr_ind;}
 
@@ -166,13 +164,13 @@ public:
      */
     virtual unsigned long long sort_score(unsigned int max_ind) const = 0;
 
-    // DocString: node_samp
+    // DocString: node_n_samp
     /**
-     * @brief The number of samples
+     * @brief The number of samples in the training set
      */
     inline int n_samp() const {return _n_samp;}
 
-    // DocString: node_test_samp
+    // DocString: node_n_test_samp
     /**
      * @brief The number of samples in the test set
      */
@@ -180,19 +178,19 @@ public:
 
     // DocString: node_feat_ind
     /**
-     * @brief The feature index
+     * @brief The unique index ID for the feature
      */
     inline unsigned long int feat_ind() const {return _feat_ind;}
 
     // DocString: node_arr_ind
     /**
-     * @brief The feature array index
+     * @brief The index that is used to access the data stored in the central data storage
      */
     inline unsigned long int arr_ind() const {return _arr_ind;}
 
     // DocString: node_selected
     /**
-     * @brief True if feature is selected
+     * @brief True if the feature is selected
      */
     inline bool selected() const {return _selected;}
 
@@ -203,7 +201,6 @@ public:
      */
     inline void set_selected(const bool sel){_selected = sel;}
 
-    // DocString: node_d_mat_ind
     /**
      * @brief  Setter function for _d_mat_ind
      *
@@ -211,89 +208,95 @@ public:
      */
     inline void set_d_mat_ind(const int ind){_d_mat_ind = ind;}
 
+    // DocString: node_d_mat_ind
     /**
-     * @brief The descriptor matrix index
+     * @brief The index used to access the data stored in the description matrix
      */
     inline int d_mat_ind() const{return _d_mat_ind;}
 
     // DocString: node_expr
     /**
-     * @brief Get the expression for the overall feature (From root node down)
+     * @brief A human readable equation representing the feature
      */
     virtual std::string expr() const = 0;
 
-    // DocString: node_latex_expr
     /**
-     * @brief Get the latexified expression for the overall feature (From root node down)
+     * @brief Get the valid LaTeX expression that represents the feature
      */
     virtual std::string get_latex_expr() const = 0;
 
     // DocString: node_latex_expr
     /**
-     * @brief Get the latexified expression for the overall feature (From root node down)
+     * @brief The valid LaTeX expression that represents the feature
      */
     inline std::string latex_expr() const {return "$" + get_latex_expr() + "$";}
 
     // DocString: node_unit
     /**
-     * @brief Get the unit for the overall feature (From root node down)
+     * @brief The unit of the feature (Derived recursively from the primary features and operators)
      */
     virtual Unit unit() const = 0;
 
     /**
-     * @brief Get the training data of the feature
+     * @brief An array containing the values of the training set samples for the feature
+
      */
     virtual std::vector<double> value() const = 0;
 
     /**
-     * @brief Get the test data for the feature
+     * @brief An array containing the values of the test set samples for the feature
+
      */
     virtual std::vector<double> test_value() const = 0;
 
 
     // DocString: node_set_value
     /**
-     * @brief Set the values of the training data for the feature inside of the value storage arrays
+     * @brief Set the value of all training samples for the feature inside the central data storage array
      *
-     * @param offset(int) Key to determine which part of the temporary storage array to look into
-     * @param for_comp(bool) If true then the evaluation is for comparing features
+     * @param offset (int) Where the current node is in the binary expression tree relative to other nodes at the same depth
+     * @param for_comp (bool) If true then the evaluation is used for comparing features
      */
     virtual void set_value(int offset=-1, const bool for_comp=false) const = 0;
 
     /**
      * @brief The pointer to where the feature's training data is stored
      *
-     * @param offset(int) Key to determine which part of the temporary storage array to look into
-     * @param for_comp(bool) If true then the evaluation is for comparing features
+     * @param offset (int) Where the current node is in the binary expression tree relative to other nodes at the same depth
+     * @param for_comp (bool) If true then the evaluation is used for comparing features
      */
     virtual double* value_ptr(int offset=-1, const bool for_comp=false) const = 0;
 
     // DocString: node_set_test_value
     /**
-     * @brief Set the values of the test data for the feature inside of the value storage arrays
+     * @brief Set the value of all test samples for the feature inside the central data storage array
      *
-     * @param offset(int) Key to determine which part of the temporary storage array to look into
-     * @param for_comp(bool) If true then the evaluation is for comparing features
+     * @param offset (int) Where the current node is in the binary expression tree relative to other nodes at the same depth
+     * @param for_comp (bool) If true then the evaluation is used for comparing features
      */
     virtual void set_test_value(int offset=-1, const bool for_comp=false) const = 0;
 
     /**
      * @brief The pointer to where the feature's test data is stored
      *
-     * @param offset(int) Key to determine which part of the temporary storage array to look into
-     * @param for_comp(bool) If true then the evaluation is for comparing features
+     * @param offset (int) Where the current node is in the binary expression tree relative to other nodes at the same depth
+     * @param for_comp (bool) If true then the evaluation is used for comparing features
      */
     virtual double* test_value_ptr(int offset=-1, const bool for_comp=false) const = 0;
 
     // DocString: node_is_nan
     /**
-     * @brief Check if the feature contains NaN
+     * @brief Check if the feature has a NaN value in it
+     *
+     * @return True if one of the values of the feature is a NaN
      */
     virtual bool is_nan() const = 0;
 
     // DocString: node_is_const
     /**
-     * @brief Check if feature is constant
+     * @brief Check if feature is constant for one of the tasks
+     *
+     * @return True if the feature is constant for all samples in any task
      */
     virtual bool is_const() const = 0;
 
@@ -304,9 +307,9 @@ public:
 
     // DocString: node_rung
     /**
-     * @brief return the rung of the feature
+     * @brief return the rung of the feature (Height of the binary expression tree + 1)
      *
-     * @param cur_rung(int) The rung current rung of the feature tree (used to recursively calculate rung)
+     * @param cur_rung (int) A recursive helper counter for the rung
      */
     virtual int rung(const int cur_rung = 0) const = 0;
 
@@ -336,13 +339,12 @@ public:
 
     // DocString: node_postfix_expr
     /**
-     * @brief Get the postfix expression for the feature
-     * @return The postfix string for the expression
+     * @brief A computer readable representation of the feature. Primary features represented by their index in phi_0, node types are represented by abbreviations, and the order is the same as the postfix representation of the expression
      */
     inline std::string postfix_expr() const {std::string cur_expr = ""; update_postfix(cur_expr); return cur_expr.substr(0, cur_expr.size() - 1);}
 
     /**
-     * @brief Get the string character representation of the node for the postfix expression
+     * @brief Get a computer readable representation of the feature. Primary features represented by their index in phi_0, node types are represented by abbreviations, and the order is the same as the postfix representation of the expression
      * @return the string representation of the node for the postfix expression
      */
     virtual std::string get_postfix_term() const = 0;
@@ -392,7 +394,7 @@ public:
      */
     virtual void set_parameters(const double* params) = 0;
 
-    //DocString: node_get_params
+    // DocString: node_get_params
     /**
      * @brief Solve the non-linear optimization to set the parameters
      * @details Fits the data points from _feats->value_ptr and prop to get the parameters for the feature
@@ -405,17 +407,17 @@ public:
      * @brief returns the number of parameters for this feature
      *
      * @param n_cur the current number of parameters for the feature
-     * @param depth the current depth of the node on the Binary expression tree
+     * @param depth the current depth of the node on the binary expression tree
      * @return the number of parameters (_params.size())
      */
     virtual int n_params(const int n_cur = 0, const int depth = 1) const = 0;
 
     /**
-     * @brief Set the values of the training data for the feature inside of the value storage arrays
+     * @brief Set the value of all training samples for the feature inside the central data storage array
      *
-     * @param offset(int) Key to determine which part of the temporary storage array to look into
-     * @param params pointer to the parameter values
-     * @param for_comp(bool) If true then the evaluation is for comparing features
+     * @param offset (int) Where the current node is in the binary expression tree relative to other nodes at the same depth
+     * @param params pointer to the non-linear parameter values
+     * @param for_comp (bool) If true then the evaluation is used for comparing features
      * @param depth the current depth of the node in the binary expression tree (from the root)
      */
     virtual void set_value(const double* params, int offset=-1, const bool for_comp=false, const int depth=1) const = 0;
@@ -423,20 +425,20 @@ public:
     /**
      * @brief The pointer to where the feature's training data is stored
      *
-     * @param offset(int) Key to determine which part of the temporary storage array to look into
-     * @param params pointer to the parameter values
-     * @param for_comp(bool) If true then the evaluation is for comparing features
+     * @param offset (int) Where the current node is in the binary expression tree relative to other nodes at the same depth
+     * @param params pointer to the non-linear parameter values
+     * @param for_comp (bool) If true then the evaluation is used for comparing features
      * @param depth the current depth of the node in the binary expression tree (from the root)
      * @returns the pointer to the feature's data
      */
     virtual double* value_ptr(const double* params, int offset=-1, const bool for_comp=false, const int depth=1) const = 0;
 
     /**
-     * @brief Set the values of the test data for the feature inside of the value storage arrays
+     * @brief Set the value of all test samples for the feature inside the central data storage array
      *
-     * @param offset(int) Key to determine which part of the temporary storage array to look into
-     * @param params pointer to the parameter values
-     * @param for_comp(bool) If true then the evaluation is for comparing features
+     * @param offset (int) Where the current node is in the binary expression tree relative to other nodes at the same depth
+     * @param params pointer to the non-linear parameter values
+     * @param for_comp (bool) If true then the evaluation is used for comparing features
      * @param depth the current depth of the node in the binary expression tree (from the root)
      */
     virtual void set_test_value(const double* params, int offset=-1, const bool for_comp=false, const int depth=1) const = 0;
@@ -444,9 +446,9 @@ public:
     /**
      * @brief The pointer to where the feature's test data is stored
      *
-     * @param offset(int) Key to determine which part of the temporary storage array to look into
-     * @param params pointer to the parameter values
-     * @param for_comp(bool) If true then the evaluation is for comparing features
+     * @param offset (int) Where the current node is in the binary expression tree relative to other nodes at the same depth
+     * @param params pointer to the non-linear parameter values
+     * @param for_comp (bool) If true then the evaluation is used for comparing features
      * @param depth the current depth of the node in the binary expression tree (from the root)
      * @returns the pointer to the feature's data
      */
@@ -456,16 +458,16 @@ public:
      * @brief The expression of the feature
      *
      * @param params parameter values for non-linear operations
-     * @param depth the current depth of the node on the Binary expression tree
+     * @param depth the current depth of the node on the binary expression tree
      * @return feature expression
      */
     virtual std::string expr(const double* params, const int depth = 1) const = 0;
 
     /**
-     * @brief Get the latexified expression for the overall feature (From root node down)
+     * @brief Get valid LaTeX equation that represents the feature
      *
      * @param params parameter values for non-linear operations
-     * @param depth the current depth of the node on the Binary expression tree
+     * @param depth the current depth of the node on the binary expression tree
      * @return the latexified expression
      */
     virtual std::string get_latex_expr(const double* params, const int depth=1) const = 0;
@@ -475,7 +477,7 @@ public:
      * @brief Get the string that corresponds to the code needed to evaluate the node in matlab
      *
      * @param params parameter values for non-linear operations
-     * @param depth the current depth of the node on the Binary expression tree
+     * @param depth the current depth of the node on the binary expression tree
      * @return The matlab code for the feature
      */
     virtual std::string matlab_fxn_expr(const double* params, const int depth=1) const = 0;
@@ -486,7 +488,7 @@ public:
      * @param lb pointer to the lower bounds data
      * @param ub pointer to the upper bounds data
      * @param from_parent How many parameters are between the start of this node's parameters and its parent
-     * @param depth the current depth of the node on the Binary expression tree
+     * @param depth the current depth of the node on the binary expression tree
      */
     virtual void set_bounds(double* lb, double* ub, const int depth=1) const = 0;
 
@@ -494,7 +496,7 @@ public:
      * @brief Set the bounds for the nl parameterization
      *
      * @param params pointer to the parameters vector
-     * @param depth the current depth of the node on the Binary expression tree
+     * @param depth the current depth of the node on the binary expression tree
      */
     virtual void initialize_params(double* params, const int depth = 1) const = 0;
 
@@ -524,19 +526,19 @@ public:
     virtual void gradient(double* grad, double* dfdp, const double* params) const = 0;
     #endif
 
-    //DocString: node_nfeats
+    // DocString: node_n_feats
     /**
-     * @brief Number of features used for an operator node
-     * @return the number of features for an operator node
+     * @brief The number of features used for an operator (Number of child node)
+     * @return The number of features the operator is acting on (Number of child node)
      */
     virtual int n_feats() const = 0;
 
-    //DocString: node_feat
+    // DocString: node_feat
     /**
-     * @brief Return the ind node_ptr in the operator node's feat list
+     * @brief Return the ind^th feature stored by an operator node
      *
-     * @param ind the index of the node to access
-     * @return the ind feature in feature_list
+     * @param ind (int) the index of the feats list to be accessed
+     * @return The feature stored in _feats[ind]
      */
     virtual std::shared_ptr<Node> feat(const int ind) const = 0;
 
@@ -544,31 +546,27 @@ public:
 
     // DocString: node_value_py
     /**
-     * @brief The training data of the feature
-     * @return The training data as a numpy array
+     * @brief An array containing the values of the training set samples for the feature
      */
     inline np::ndarray value_py(){return python_conv_utils::to_ndarray<double>(value());}
 
     // DocString: node_test_value_py
     /**
-     * @brief The test data of the feature
-     * @return The test data as a numpy array
+     * @brief An array containing the values of the test set samples for the feature
      */
     inline np::ndarray test_value_py(){return python_conv_utils::to_ndarray<double>(test_value());}
 
     #ifdef PARAMETERIZE
     // DocString: node_parameters_py
     /**
-     * @brief The parameters used for non-linear operator nodes
-     * @return The operator node parameters as a list [alpha, a]
+     * @brief The vector storing all non-linear parameters of the feature
      */
     inline py::list parameters_py(){return python_conv_utils::to_list<double>(parameters());}
     #endif
 
     // DocString: node_primary_feature_decomp
     /**
-     * @brief Get the primary feature decomposition of a feature
-     * @return A python dict representing the primary feature comprising a feature
+     * @brief The decomposition of the primary features and how often they appear in the feature
      */
     inline py::dict primary_feature_decomp_py(){return python_conv_utils::to_dict<int, int>(primary_feature_decomp());}
 
diff --git a/src/feature_creation/node/operator_nodes/OperatorNode.hpp b/src/feature_creation/node/operator_nodes/OperatorNode.hpp
index fce88709c27ce4149219e2cfdba70069315e203e..147607523722f8f7ef85b36ea58dd364497872f4 100644
--- a/src/feature_creation/node/operator_nodes/OperatorNode.hpp
+++ b/src/feature_creation/node/operator_nodes/OperatorNode.hpp
@@ -132,8 +132,7 @@ public:
     }
 
     /**
-     * @brief Get the list of feature expressions
-     * @return vector storing the expressions for all primary features that show up in feature in the order they appear in the postfix notation
+     * @brief A vector storing the expressions for all primary features in the order they appear in the postfix expression
      */
     std::vector<std::string> get_x_in_expr_list() const
     {
@@ -148,10 +147,8 @@ public:
 
     // DocString: op_node_n_leaves
     /**
-     * @brief return the number of leaves in the feature
+     * @brief The number of primary features (non-unique) this feature contains (The number of leaves of the Binary Expression Tree)
      *
-     * @param cur_n_leaves The current number of primary features that are inside the feature
-     * @return total number of leaves of the binary expression tree
      */
     int n_leaves(int cur_n_leaves = 0) const
     {
@@ -184,14 +181,14 @@ public:
         );
     }
 
-    // DocString: node_latex_expr
     /**
-     * @brief Get the latexified expression for the overall feature (From root node down)
+     * @brief Get the valid LaTeX expression that represents the feature
      */
     virtual std::string get_latex_expr() const = 0;
 
     /**
-     * @brief Get the training data of the feature
+     * @brief An array containing the values of the training set samples for the feature
+
      */
     std::vector<double> value() const
     {
@@ -201,7 +198,8 @@ public:
     }
 
     /**
-     * @brief Get the test data for the feature
+     * @brief An array containing the values of the test set samples for the feature
+
      */
     std::vector<double> test_value() const
     {
@@ -212,19 +210,19 @@ public:
 
     // DocString: op_node_set_value
     /**
-     * @brief Set the values of the training data for the feature inside of the value storage arrays
+     * @brief Set the value of all training samples for the feature inside the central data storage array
      *
-     * @param offset(int) Key to determine which part of the temporary storage array to look into
-     * @param for_comp(bool) If true then the evaluation is for comparing features
+     * @param offset (int) Where the current node is in the binary expression tree relative to other nodes at the same depth
+     * @param for_comp(bool) If true then the evaluation is used for comparing features
      */
     virtual void set_value(int offset=-1, const bool for_comp=false) const = 0;
 
     // DocString: op_node_set_test_value
     /**
-     * @brief Set the values of the test data for the feature inside of the value storage arrays
+     * @brief Set the value of all test samples for the feature inside the central data storage array
      *
-     * @param offset(int) Key to determine which part of the temporary storage array to look into
-     * @param for_comp(bool) If true then the evaluation is for comparing features
+     * @param offset (int) Where the current node is in the binary expression tree relative to other nodes at the same depth
+     * @param for_comp(bool) If true then the evaluation is used for comparing features
      */
     virtual void set_test_value(int offset=-1, const bool for_comp=false) const = 0;
 
@@ -233,7 +231,7 @@ public:
      * @details If the feature is not already stored in memory, then calculate the feature and return the pointer to the data
      *
      * @param offset(int) the integer value to offset the location in the temporary storage array
-     * @param for_comp(bool) If true then the evaluation is for comparing features
+     * @param for_comp (bool) If true then the evaluation is used for comparing features
      *
      * @return pointer to the feature's training value
      */
@@ -264,7 +262,7 @@ public:
      * @details If the feature is not already stored in memory, then calculate the feature and return the pointer to the data
      *
      * @param offset(int) the integer value to offset the location in the temporary storage array
-     * @param for_comp(bool) If true then the evaluation is for comparing features
+     * @param for_comp (bool) If true then the evaluation is used for comparing features
      *
      * @return pointer to the feature's test values
      */
@@ -285,7 +283,9 @@ public:
 
     // DocString: op_node_is_nan
     /**
-     * @brief Check if the feature contains NaN
+     * @brief Check if the feature has a NaN value in it
+     *
+     * @return True if one of the values of the feature is a NaN
      */
     inline bool is_nan() const
     {
@@ -295,7 +295,9 @@ public:
 
     // DocString: op_node_is_const
     /**
-     * @brief Check if feature is constant
+     * @brief Check if feature is constant for one of the tasks
+     *
+     * @return True if the feature is constant for all samples in any task
      */
     bool is_const() const
     {
@@ -315,9 +317,9 @@ public:
 
     // DocString: op_node_rung
     /**
-     * @brief return the rung of the feature
+     * @brief return the rung of the feature (Height of the binary expression tree + 1)
      *
-     * @param cur_rung The rung current rung of the feature tree (used to recursively calculate rung)
+     * @param cur_rung (int) A recursive helper counter for the rung
      */
     virtual int rung(int cur_rung = 0) const = 0;
 
@@ -384,19 +386,18 @@ public:
      */
     virtual std::string matlab_fxn_expr() const = 0;
 
-    //DocString: op_node_nfeats
+    // DocString: op_node_n_feats
      /**
-     * @brief Number of features used for an operator node
-     * @return the number of features for an operator node
+     * @brief The number of features used for an operator (Number of child node)
      */
     inline int n_feats() const {return N;}
 
-    //DocString: op_node_feat
+    // DocString: op_node_feat
     /**
-     * @brief Return the ind node_ptr in the operator node's feat list
+     * @brief Return the ind^th feature stored by an operator node
      *
-     * @param ind the index of the node to access
-     * @return the ind feature in feature_list
+     * @param ind (int) the index of the feats list to be accessed
+     * @return The feature stored in _feats[ind]
      */
     inline node_ptr feat(const int ind) const
     {
@@ -432,12 +433,12 @@ public:
      */
     virtual std::vector<double> parameters() const = 0;
 
-    //DocString: op_node_get_params
+    // DocString: op_node_get_params
     /**
-     * @brief Solve the non-linear optimization to set the parameters
+     * @brief Optimize the non-linear parameters of the feature
      * @details Fits the data points from _feats->value_ptr and prop to get the parameters for the feature
      *
-     * @param optimizer The optimizer used to get the paremeters
+     * @param optimizer (NLOptimizer) The optimizer used to get the parameters
      */
     virtual void get_parameters(std::shared_ptr<NLOptimizer> optimizer) = 0;
 
@@ -473,11 +474,11 @@ public:
     }
 
     /**
-     * @brief Set the values of the training data for the feature inside of the value storage arrays
+     * @brief Set the value of all training samples for the feature inside the central data storage array
      *
-     * @param params pointer to the parameter values
-     * @param offset(int) Key to determine which part of the temporary storage array to look into
-     * @param for_comp(bool) If true then the evaluation is for comparing features
+     * @param params pointer to the non-linear parameter values
+     * @param offset (int) Where the current node is in the binary expression tree relative to other nodes at the same depth
+     * @param for_comp (bool) If true then the evaluation is used for comparing features
      * @param depth the max depth of paramterization (in the binary expression tree)
      */
     virtual void set_value(const double* params, int offset=-1, const bool for_comp=false, const int depth=1) const = 0;
@@ -485,9 +486,9 @@ public:
     /**
      * @brief The pointer to where the feature's training data is stored
      *
-     * @param params pointer to the parameter values
-     * @param offset(int) Key to determine which part of the temporary storage array to look into
-     * @param for_comp(bool) If true then the evaluation is for comparing features
+     * @param params pointer to the non-linear parameter values
+     * @param offset (int) Where the current node is in the binary expression tree relative to other nodes at the same depth
+     * @param for_comp (bool) If true then the evaluation is used for comparing features
      * @param depth the max depth of paramterization (in the binary expression tree)
      * @returns the pointer to the feature's data
      */
@@ -504,11 +505,11 @@ public:
     }
 
     /**
-     * @brief Set the values of the test data for the feature inside of the value storage arrays
+     * @brief Set the value of all test samples for the feature inside the central data storage array
      *
-     * @param params pointer to the parameter values
-     * @param offset(int) Key to determine which part of the temporary storage array to look into
-     * @param for_comp(bool) If true then the evaluation is for comparing features
+     * @param params pointer to the non-linear parameter values
+     * @param offset (int) Where the current node is in the binary expression tree relative to other nodes at the same depth
+     * @param for_comp (bool) If true then the evaluation is used for comparing features
      * @param depth the max depth of paramterization (in the binary expression tree)
      */
     virtual void set_test_value(const double* params, int offset=-1, const bool for_comp=false, const int depth=1) const = 0;
@@ -516,9 +517,9 @@ public:
     /**
      * @brief The pointer to where the feature's test data is stored
      *
-     * @param params pointer to the parameter values
-     * @param offset(int) Key to determine which part of the temporary storage array to look into
-     * @param for_comp(bool) If true then the evaluation is for comparing features
+     * @param params pointer to the non-linear parameter values
+     * @param offset (int) Where the current node is in the binary expression tree relative to other nodes at the same depth
+     * @param for_comp (bool) If true then the evaluation is used for comparing features
      * @param depth the max depth of paramterization (in the binary expression tree)
      * @returns the pointer to the feature's data
      */
@@ -531,19 +532,20 @@ public:
     }
 
     /**
-     * @brief The expression of the feature
+     * @brief A human readable equation representing the feature
      *
      * @param params parameter values for non-linear operations
-     * @param depth the current depth of the node on the Binary expression tree
+     * @param depth the current depth of the node on the binary expression tree
      * @return feature expression
      */
     virtual std::string expr(const double* params, const int depth=1) const = 0;
 
     /**
-     * @brief Get the latexified expression for the overall feature (From root node down)
+     * @brief Get the valid LaTeX expression that represents the feature
      *
      * @param params parameter values for non-linear operations
-     * @param depth the current depth of the node on the Binary expression tree
+     * @param depth the current depth of the node on the binary expression tree
+     * @return the latexified expression
      */
     virtual std::string get_latex_expr(const double* params, const int depth=1) const = 0;
 
@@ -551,7 +553,7 @@ public:
      * @brief Get the string that corresponds to the code needed to evaluate the node in matlab
      *
      * @param params parameter values for non-linear operations
-     * @param depth the current depth of the node on the Binary expression tree
+     * @param depth the current depth of the node on the binary expression tree
      * @return The matlab code for the feature
      */
     virtual std::string matlab_fxn_expr(const double* params, const int depth=1) const = 0;
@@ -562,7 +564,7 @@ public:
      * @param lb pointer to the lower bounds data
      * @param ub pointer to the upper bounds data
      * @param from_parent How many parameters are between the start of this node's parameters and its parent
-     * @param depth the current depth of the node on the Binary expression tree
+     * @param depth the current depth of the node on the binary expression tree
      */
     virtual void set_bounds(double* lb, double* ub, const int depth=1) const = 0;
 
@@ -570,7 +572,7 @@ public:
      * @brief Set the bounds for the nl parameterization
      *
      * @param params pointer to the parameters vector
-     * @param depth the current depth of the node on the Binary expression tree
+     * @param depth the current depth of the node on the binary expression tree
      */
     virtual void initialize_params(double* params, const int depth = 1) const = 0;
 
@@ -640,7 +642,7 @@ public:
     /**
      * @brief Set the non-linear parameters
      *
-     * @param params The new parameters of the node
+     * @param params (list) The new parameters of the node
      */
     inline void set_parameters(py::list params){set_parameters(python_conv_utils::from_list<double>(params));}
 
@@ -648,7 +650,7 @@ public:
     /**
      * @brief Set the non-linear parameters
      *
-     * @param params The new parameters of the node
+     * @param params (np.ndarray) The new parameters of the node
      */
     inline void set_parameters(np::ndarray params){set_parameters( python_conv_utils::from_ndarray<double>(params));}
     #endif
diff --git a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/abs/absolute_value.hpp b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/abs/absolute_value.hpp
index cc7696ae658e4221e3c684e35e5dab2eedf13e2d..5fb6af8f0f96e36127e62101c13c575ae600ea47 100644
--- a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/abs/absolute_value.hpp
+++ b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/abs/absolute_value.hpp
@@ -42,8 +42,8 @@ public:
      * @brief Constructor
      * @details Constructs the Node from an array of features to operate on
      *
-     * @param feat shared_ptr of the feature to operate on (A)
-     * @param feat_ind Index of the new feature
+     * @param feat (Node) shared_ptr of the feature to operate on (A)
+     * @param feat_ind (int) Index of the new feature
      */
     AbsNode(const node_ptr feat, const unsigned long int feat_ind);
 
@@ -52,10 +52,10 @@ public:
      * @brief Constructor
      * @details Constructs the Node from node pointer of the feature to operate on
      *
-     * @param feat shared_ptr of the feature to operate on (A)
-     * @param feat_ind Index of the new feature
-     * @param l_bound Minimum absolute value allowed for the feature.
-     * @param u_bound Maximum absolute value allowed for the feature.
+     * @param feat (Node) shared_ptr of the feature to operate on (A)
+     * @param feat_ind (int) Index of the new feature
+     * @param l_bound (double) Minimum absolute value allowed for the feature.
+     * @param u_bound (double) Maximum absolute value allowed for the feature.
      */
     AbsNode(const node_ptr feat, const unsigned long int feat_ind, const double l_bound, const double u_bound);
 
@@ -73,7 +73,7 @@ public:
 
     // DocString: abs_node_expr
     /**
-     * @brief Get the expression for the overall feature (From root node down)
+     * @brief A human readable equation representing the feature
      */
     inline std::string expr() const
     {
@@ -85,7 +85,7 @@ public:
 
     // DocString: abs_node_latex_expr
     /**
-     * @brief Get the expression for the overall feature (From root node down)
+     * @brief Get the valid LaTeX expression that represents the feature
      */
     inline std::string get_latex_expr() const
     {
@@ -97,27 +97,26 @@ public:
 
     // DocString: abs_node_set_value
     /**
-     * @brief Set the values of the training data for the feature inside of the value storage arrays
+     * @brief Set the value of all training samples for the feature inside the central data storage array
      *
-     * @param offset(int) Key to determine which part of the temporary storage array to look into
-     * @param for_comp(bool) If true then the evaluation is for comparing features
+     * @param offset (int) Where the current node is in the binary expression tree relative to other nodes at the same depth
+     * @param for_comp (bool) If true then the evaluation is used for comparing features
      */
     virtual void set_value(int offset=-1, const bool for_comp=false) const;
 
     // DocString: abs_node_set_test_value
     /**
-     * @brief Set the values of the test data for the feature inside of the value storage arrays
+     * @brief Set the value of all test samples for the feature inside the central data storage array
      *
-     * @param offset(int) Key to determine which part of the temporary storage array to look into
-     * @param for_comp(bool) If true then the evaluation is for comparing features
+     * @param offset (int) Where the current node is in the binary expression tree relative to other nodes at the same depth
+     * @param for_comp (bool) If true then the evaluation is used for comparing features
      */
     virtual void set_test_value(int offset=-1, const bool for_comp=false) const;
 
     // DocString: abs_node_rung
     /**
-     * @brief return the rung of the feature
+     * @brief return the rung of the feature (Height of the binary expression tree + 1)
      *
-     * @param cur_rung The rung current rung of the feature tree (used to recursively calculate rung)
      */
     inline int rung(const int cur_rung=0) const {return _feats[0]->rung(cur_rung + 1);}
 
@@ -193,30 +192,30 @@ public:
     virtual void set_parameters(const double* params){return;}
 
     /**
-     * @brief Set the values of the training data for the feature inside of the value storage arrays
+     * @brief Set the value of all training samples for the feature inside the central data storage array
      *
-     * @param params pointer to the parameter values
-     * @param offset(int) Key to determine which part of the temporary storage array to look into
-     * @param for_comp(bool) If true then the evaluation is for comparing features
+     * @param params pointer to the non-linear parameter values
+     * @param offset (int) Where the current node is in the binary expression tree relative to other nodes at the same depth
+     * @param for_comp (bool) If true then the evaluation is used for comparing features
      * @param depth the max depth of paramterization (in the binary expression tree)
      */
     void set_value(const double* params, int offset=-1, const bool for_comp=false, const int depth=1) const;
 
     /**
-     * @brief Set the values of the test data for the feature inside of the value storage arrays
+     * @brief Set the value of all test samples for the feature inside the central data storage array
      *
-     * @param params pointer to the parameter values
-     * @param offset(int) Key to determine which part of the temporary storage array to look into
-     * @param for_comp(bool) If true then the evaluation is for comparing features
+     * @param params pointer to the non-linear parameter values
+     * @param offset (int) Where the current node is in the binary expression tree relative to other nodes at the same depth
+     * @param for_comp (bool) If true then the evaluation is used for comparing features
      * @param depth the max depth of paramterization (in the binary expression tree)
      */
     void set_test_value(const double* params, int offset=-1, const bool for_comp=false, const int depth=1) const;
 
     /**
-     * @brief The expression of the feature
+     * @brief A human readable equation representing the feature
      *
      * @param params parameter values for non-linear operations
-     * @param depth the current depth of the node on the Binary expression tree
+     * @param depth the current depth of the node on the binary expression tree
      * @return feature expression
      */
     inline std::string expr(const double* params, const int depth=1) const
@@ -230,10 +229,10 @@ public:
     }
 
     /**
-     * @brief Get the latexified expression for the overall feature (From root node down)
+     * @brief Get the valid LaTeX expression that represents the feature
      *
      * @param params parameter values for non-linear operations
-     * @param depth the current depth of the node on the Binary expression tree
+     * @param depth the current depth of the node on the binary expression tree
      * @return the latexified expression
      */
     inline std::string get_latex_expr(const double* params, const int depth=1) const
@@ -250,7 +249,7 @@ public:
      * @brief Get the string that corresponds to the code needed to evaluate the node in matlab
      *
      * @param params parameter values for non-linear operations
-     * @param depth the current depth of the node on the Binary expression tree
+     * @param depth the current depth of the node on the binary expression tree
      * @return The matlab code for the feature
      */
     inline std::string matlab_fxn_expr(const double* params, const int depth=1) const
@@ -269,7 +268,7 @@ public:
      * @param lb pointer to the lower bounds data
      * @param ub pointer to the upper bounds data
      * @param from_parent How many parameters are between the start of this node's parameters and its parent
-     * @param depth the current depth of the node on the Binary expression tree
+     * @param depth the current depth of the node on the binary expression tree
      */
     void set_bounds(double* lb, double* ub, const int depth=1) const;
 
@@ -277,7 +276,7 @@ public:
      * @brief Set the bounds for the nl parameterization
      *
      * @param params pointer to the parameters vector
-     * @param depth the current depth of the node on the Binary expression tree
+     * @param depth the current depth of the node on the binary expression tree
      */
     void initialize_params(double* params, const int depth = 1) const;
 
diff --git a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/abs/parameterized_absolute_value.hpp b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/abs/parameterized_absolute_value.hpp
index 9419f726a574b8b3fe001657cd997c7d9d93381f..c16a85039533c13a09362b257cde7e87f473d09c 100644
--- a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/abs/parameterized_absolute_value.hpp
+++ b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/abs/parameterized_absolute_value.hpp
@@ -55,10 +55,10 @@ public:
      * @brief Constructor
      * @details Constructs the Node from node pointer of the feature to operate on
      *
-     * @param feat shared_ptr of the feature to operate on (A)
-     * @param feat_ind Index of the new feature
-     * @param l_bound Minimum absolute value allowed for the feature.
-     * @param u_bound Maximum absolute value allowed for the feature.
+     * @param feat (Node) shared_ptr of the feature to operate on (A)
+     * @param feat_ind (int) Index of the new feature
+     * @param l_bound (double) Minimum absolute value allowed for the feature.
+     * @param u_bound (double) Maximum absolute value allowed for the feature.
      * @param optimizer The optimizer to find the optimal parameters for the features
      */
     AbsParamNode(
@@ -73,8 +73,8 @@ public:
      * @brief Constructor
      * @details Constructs the Node from node pointer of the feature to operate on
      *
-     * @param feat shared_ptr of the feature to operate on (A)
-     * @param feat_ind Index of the new feature
+     * @param feat (Node) shared_ptr of the feature to operate on (A)
+     * @param feat_ind (int) Index of the new feature
      * @param optimizer The optimizer to find the optimal parameters for the features
      */
     AbsParamNode(const node_ptr feat, const unsigned long int feat_ind, std::shared_ptr<NLOptimizer> optimizer);
@@ -84,10 +84,10 @@ public:
      * @brief Constructor
      * @details Constructs the Node from node pointer of the feature to operate on
      *
-     * @param feat shared_ptr of the feature to operate on (A)
-     * @param feat_ind Index of the new feature
-     * @param l_bound Minimum absolute value allowed for the feature.
-     * @param u_bound Maximum absolute value allowed for the feature.
+     * @param feat (Node) shared_ptr of the feature to operate on (A)
+     * @param feat_ind (int) Index of the new feature
+     * @param l_bound (double) Minimum absolute value allowed for the feature.
+     * @param u_bound (double) Maximum absolute value allowed for the feature.
      */
     AbsParamNode(const node_ptr feat, const unsigned long int feat_ind, double const l_bound=1e-50, const double u_bound=1e50);
 
@@ -99,19 +99,19 @@ public:
 
     // DocString: abs_param_node_set_value
     /**
-     * @brief Set the values of the training data for the feature inside of the value storage arrays
+     * @brief Set the value of all training samples for the feature inside the central data storage array
      *
-     * @param offset(int) Key to determine which part of the temporary storage array to look into
-     * @param for_comp(bool) If true then the evaluation is for comparing features
+     * @param offset (int) Where the current node is in the binary expression tree relative to other nodes at the same depth
+     * @param for_comp (bool) If true then the evaluation is used for comparing features
      */
     inline void set_value(int offset=-1, const bool for_comp=false) const {set_value(_params.data(), offset, for_comp);}
 
     // DocString: abs_param_node_set_test_value
     /**
-     * @brief Set the values of the test data for the feature inside of the value storage arrays
+     * @brief Set the value of all test samples for the feature inside the central data storage array
      *
-     * @param offset(int) Key to determine which part of the temporary storage array to look into
-     * @param for_comp(bool) If true then the evaluation is for comparing features
+     * @param offset (int) Where the current node is in the binary expression tree relative to other nodes at the same depth
+     * @param for_comp (bool) If true then the evaluation is used for comparing features
      */
     inline void set_test_value(int offset=-1, const bool for_comp=false) const {set_test_value(_params.data(), offset, for_comp);}
 
@@ -120,7 +120,7 @@ public:
      * @details If the feature is not already stored in memory, then calculate the feature and return the pointer to the data
      *
      * @param offset(int) the integer value to offset the location in the temporary storage array
-     * @param for_comp(bool) If true then the evaluation is for comparing features
+     * @param for_comp (bool) If true then the evaluation is used for comparing features
      *
      * @return pointer to the feature's test values
      */
@@ -131,7 +131,7 @@ public:
      * @details If the feature is not already stored in memory, then calculate the feature and return the pointer to the data
      *
      * @param offset(int) the integer value to offset the location in the temporary storage array
-     * @param for_comp(bool) If true then the evaluation is for comparing features
+     * @param for_comp (bool) If true then the evaluation is used for comparing features
      *
      * @return pointer to the feature's test values
      */
@@ -144,13 +144,13 @@ public:
 
     // DocString: abs_param_node_expr
     /**
-     * @brief Get the expression for the overall feature (From root node down)
+     * @brief A human readable equation representing the feature
      */
     inline std::string expr() const {return expr(_params.data());}
 
     // DocString: abs_param_node_get_latex_expr
     /**
-     * @brief Get the expression for the overall feature (From root node down)
+     * @brief Get the valid LaTeX expression that represents the feature
      */
     inline std::string get_latex_expr() const {return get_latex_expr(_params.data());}
 
diff --git a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/abs_diff/absolute_difference.hpp b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/abs_diff/absolute_difference.hpp
index d7368152a734ba96d2109ca016ca6e4db5a94432..b125a367eda4eda35bace8dcd53155f497e99a26 100644
--- a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/abs_diff/absolute_difference.hpp
+++ b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/abs_diff/absolute_difference.hpp
@@ -43,11 +43,11 @@ public:
      * @brief Constructor
      * @details Constructs the Node from an array of features to operate on
      *
-     * @param feat_1 shared_ptr of the first feature to operate on (A)
-     * @param feat_2 shared_ptr of the second feature to operate on (B)
-     * @param feat_ind Index of the new feature
-     * @param l_bound Minimum absolute value allowed for the feature.
-     * @param u_bound Maximum absolute value allowed for the feature.
+     * @param feat_1 (Node) shared_ptr of the first feature to operate on (A)
+     * @param feat_2 (Node) shared_ptr of the second feature to operate on (B)
+     * @param feat_ind (int) Index of the new feature
+     * @param l_bound (double) Minimum absolute value allowed for the feature.
+     * @param u_bound (double) Maximum absolute value allowed for the feature.
      */
     AbsDiffNode(const node_ptr feat_1, const node_ptr feat_2, const unsigned long int feat_ind);
 
@@ -56,11 +56,11 @@ public:
      * @brief Constructor
      * @details Constructs the Node from node pointer of the features to operate on
      *
-     * @param feat_1 shared_ptr of the first feature to operate on (A)
-     * @param feat_2 shared_ptr of the second feature to operate on (B)
-     * @param feat_ind Index of the new feature
-     * @param l_bound Minimum absolute value allowed for the feature.
-     * @param u_bound Maximum absolute value allowed for the feature.
+     * @param feat_1 (Node) shared_ptr of the first feature to operate on (A)
+     * @param feat_2 (Node) shared_ptr of the second feature to operate on (B)
+     * @param feat_ind (int) Index of the new feature
+     * @param l_bound (double) Minimum absolute value allowed for the feature.
+     * @param u_bound (double) Maximum absolute value allowed for the feature.
      */
     AbsDiffNode(const node_ptr feat_1, const node_ptr feat_2, const unsigned long int feat_ind, const double l_bound, const double u_bound);
 
@@ -79,7 +79,7 @@ public:
 
     // DocString: abs_diff_node_expr
     /**
-     * @brief Get the expression for the overall feature (From root node down)
+     * @brief A human readable equation representing the feature
      */
     inline std::string expr() const
     {
@@ -92,7 +92,7 @@ public:
 
     // DocString: abs_diff_node_latex_expr
     /**
-     * @brief Get the expression for the overall feature (From root node down)
+     * @brief Get the valid LaTeX expression that represents the feature
      */
     inline std::string get_latex_expr() const
     {
@@ -105,27 +105,26 @@ public:
 
     // DocString: abs_diff_node_set_value
     /**
-     * @brief Set the values of the training data for the feature inside of the value storage arrays
+     * @brief Set the value of all training samples for the feature inside the central data storage array
      *
-     * @param offset(int) Key to determine which part of the temporary storage array to look into
-     * @param for_comp(bool) If true then the evaluation is for comparing features
+     * @param offset (int) Where the current node is in the binary expression tree relative to other nodes at the same depth
+     * @param for_comp (bool) If true then the evaluation is used for comparing features
      */
     virtual void set_value(int offset=-1, const bool for_comp=false) const;
 
     // DocString: abs_diff_node_set_test_value
     /**
-     * @brief Set the values of the test data for the feature inside of the value storage arrays
+     * @brief Set the value of all test samples for the feature inside the central data storage array
      *
-     * @param offset(int) Key to determine which part of the temporary storage array to look into
-     * @param for_comp(bool) If true then the evaluation is for comparing features
+     * @param offset (int) Where the current node is in the binary expression tree relative to other nodes at the same depth
+     * @param for_comp (bool) If true then the evaluation is used for comparing features
      */
     virtual void set_test_value(int offset=-1, const bool for_comp=false) const;
 
     // DocString: abs_diff_node_rung
     /**
-     * @brief return the rung of the feature
+     * @brief return the rung of the feature (Height of the binary expression tree + 1)
      *
-     * @param cur_rung The rung current rung of the feature tree (used to recursively calculate rung)
      */
     inline int rung(const int cur_rung=0) const {return std::max(_feats[0]->rung(cur_rung + 1), _feats[1]->rung(cur_rung + 1));}
 
@@ -208,30 +207,30 @@ public:
     virtual void set_parameters(const double* params){return;}
 
     /**
-     * @brief Set the values of the training data for the feature inside of the value storage arrays
+     * @brief Set the value of all training samples for the feature inside the central data storage array
      *
-     * @param params pointer to the parameter values
-     * @param offset(int) Key to determine which part of the temporary storage array to look into
-     * @param for_comp(bool) If true then the evaluation is for comparing features
+     * @param params pointer to the non-linear parameter values
+     * @param offset (int) Where the current node is in the binary expression tree relative to other nodes at the same depth
+     * @param for_comp (bool) If true then the evaluation is used for comparing features
      * @param depth the max depth of paramterization (in the binary expression tree)
      */
     void set_value(const double* params, int offset=-1, const bool for_comp=false, const int depth=1) const;
 
     /**
-     * @brief Set the values of the test data for the feature inside of the value storage arrays
+     * @brief Set the value of all test samples for the feature inside the central data storage array
      *
-     * @param params pointer to the parameter values
-     * @param offset(int) Key to determine which part of the temporary storage array to look into
-     * @param for_comp(bool) If true then the evaluation is for comparing features
+     * @param params pointer to the non-linear parameter values
+     * @param offset (int) Where the current node is in the binary expression tree relative to other nodes at the same depth
+     * @param for_comp (bool) If true then the evaluation is used for comparing features
      * @param depth the max depth of paramterization (in the binary expression tree)
      */
     void set_test_value(const double* params, int offset=-1, const bool for_comp=false, const int depth=1) const;
 
     /**
-     * @brief The expression of the feature
+     * @brief A human readable equation representing the feature
      *
      * @param params parameter values for non-linear operations
-     * @param depth the current depth of the node on the Binary expression tree
+     * @param depth the current depth of the node on the binary expression tree
      * @return feature expression
      */
     inline std::string expr(const double* params, const int depth=1) const
@@ -246,10 +245,10 @@ public:
     }
 
     /**
-     * @brief Get the latexified expression for the overall feature (From root node down)
+     * @brief Get the valid LaTeX expression that represents the feature
      *
      * @param params parameter values for non-linear operations
-     * @param depth the current depth of the node on the Binary expression tree
+     * @param depth the current depth of the node on the binary expression tree
      * @return the latexified expression
      */
     inline std::string get_latex_expr(const double* params, const int depth=1) const
@@ -267,7 +266,7 @@ public:
      * @brief Get the string that corresponds to the code needed to evaluate the node in matlab
      *
      * @param params parameter values for non-linear operations
-     * @param depth the current depth of the node on the Binary expression tree
+     * @param depth the current depth of the node on the binary expression tree
      * @return The matlab code for the feature
      */
     inline std::string matlab_fxn_expr(const double* params, const int depth=1) const
@@ -287,7 +286,7 @@ public:
      * @param lb pointer to the lower bounds data
      * @param ub pointer to the upper bounds data
      * @param from_parent How many parameters are between the start of this node's parameters and its parent
-     * @param depth the current depth of the node on the Binary expression tree
+     * @param depth the current depth of the node on the binary expression tree
      */
     void set_bounds(double* lb, double* ub, const int depth=1) const;
 
@@ -295,7 +294,7 @@ public:
      * @brief Set the bounds for the nl parameterization
      *
      * @param params pointer to the parameters vector
-     * @param depth the current depth of the node on the Binary expression tree
+     * @param depth the current depth of the node on the binary expression tree
      */
     void initialize_params(double* params, const int depth = 1) const;
 
diff --git a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/abs_diff/parameterized_absolute_difference.hpp b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/abs_diff/parameterized_absolute_difference.hpp
index 5a6223869a0dd909ef7024da04628c74923cc131..adfc9b2526f1d649facd22d5a66dbafe3601473f 100644
--- a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/abs_diff/parameterized_absolute_difference.hpp
+++ b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/abs_diff/parameterized_absolute_difference.hpp
@@ -55,9 +55,9 @@ public:
      *
      * @param feat_1 shared_ptr of the feature to operate on (A)
      * @param feat_2 shared_ptr of the feature to operate on (B)
-     * @param feat_ind Index of the new feature
-     * @param l_bound Minimum absolute value allowed for the feature.
-     * @param u_bound Maximum absolute value allowed for the feature.
+     * @param feat_ind (int) Index of the new feature
+     * @param l_bound (double) Minimum absolute value allowed for the feature.
+     * @param u_bound (double) Maximum absolute value allowed for the feature.
      * @param optimizer The optimizer to find the optimal parameters for the features
      */
     AbsDiffParamNode(
@@ -75,7 +75,7 @@ public:
      *
      * @param feat_1 shared_ptr of the feature to operate on (A)
      * @param feat_2 shared_ptr of the feature to operate on (B)
-     * @param feat_ind Index of the new feature
+     * @param feat_ind (int) Index of the new feature
      * @param optimizer The optimizer to find the optimal parameters for the features
      */
     AbsDiffParamNode(const node_ptr feat_1, const node_ptr feat_2, const unsigned long int feat_ind, std::shared_ptr<NLOptimizer> optimizer);
@@ -87,9 +87,9 @@ public:
      *
      * @param feat_1 shared_ptr of the feature to operate on (A)
      * @param feat_2 shared_ptr of the feature to operate on (B)
-     * @param feat_ind Index of the new feature
-     * @param l_bound Minimum absolute value allowed for the feature.
-     * @param u_bound Maximum absolute value allowed for the feature.
+     * @param feat_ind (int) Index of the new feature
+     * @param l_bound (double) Minimum absolute value allowed for the feature.
+     * @param u_bound (double) Maximum absolute value allowed for the feature.
      */
     AbsDiffParamNode(const node_ptr feat_1, const node_ptr feat_2, const unsigned long int feat_ind, double const l_bound=1e-50, const double u_bound=1e50);
 
@@ -101,19 +101,19 @@ public:
 
     // DocString: abs_diff_param_node_set_value
     /**
-     * @brief Set the values of the training data for the feature inside of the value storage arrays
+     * @brief Set the value of all training samples for the feature inside the central data storage array
      *
-     * @param offset(int) Key to determine which part of the temporary storage array to look into
-     * @param for_comp(bool) If true then the evaluation is for comparing features
+     * @param offset (int) Where the current node is in the binary expression tree relative to other nodes at the same depth
+     * @param for_comp (bool) If true then the evaluation is used for comparing features
      */
     inline void set_value(int offset=-1, const bool for_comp=false) const {set_value(_params.data(), offset, for_comp);}
 
     // DocString: abs_diff_param_node_set_test_value
     /**
-     * @brief Set the values of the test data for the feature inside of the value storage arrays
+     * @brief Set the value of all test samples for the feature inside the central data storage array
      *
-     * @param offset(int) Key to determine which part of the temporary storage array to look into
-     * @param for_comp(bool) If true then the evaluation is for comparing features
+     * @param offset (int) Where the current node is in the binary expression tree relative to other nodes at the same depth
+     * @param for_comp (bool) If true then the evaluation is used for comparing features
      */
     inline void set_test_value(int offset=-1, const bool for_comp=false) const {set_test_value(_params.data(), offset, for_comp);}
 
@@ -122,7 +122,7 @@ public:
      * @details If the feature is not already stored in memory, then calculate the feature and return the pointer to the data
      *
      * @param offset(int) the integer value to offset the location in the temporary storage array
-     * @param for_comp(bool) If true then the evaluation is for comparing features
+     * @param for_comp (bool) If true then the evaluation is used for comparing features
      *
      * @return pointer to the feature's test values
      */
@@ -133,7 +133,7 @@ public:
      * @details If the feature is not already stored in memory, then calculate the feature and return the pointer to the data
      *
      * @param offset(int) the integer value to offset the location in the temporary storage array
-     * @param for_comp(bool) If true then the evaluation is for comparing features
+     * @param for_comp (bool) If true then the evaluation is used for comparing features
      *
      * @return pointer to the feature's test values
      */
@@ -146,13 +146,13 @@ public:
 
     // DocString: abs_diff_param_node_expr
     /**
-     * @brief Get the expression for the overall feature (From root node down)
+     * @brief A human readable equation representing the feature
      */
     inline std::string expr() const {return expr(_params.data());}
 
     // DocString: abs_diff_param_node_get_latex_expr
     /**
-     * @brief Get the expression for the overall feature (From root node down)
+     * @brief Get the valid LaTeX expression that represents the feature
      */
     inline std::string get_latex_expr() const {return get_latex_expr(_params.data());}
 
diff --git a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/add/add.hpp b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/add/add.hpp
index 8770fc902e7351c501de89969888588fbf25c11c..7e1096f48c4498c3b82b50262f18b529ea6754f7 100644
--- a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/add/add.hpp
+++ b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/add/add.hpp
@@ -42,9 +42,9 @@ public:
      * @brief Constructor
      * @details Constructs the Node from an array of features to operate on
      *
-     * @param feat_1 shared_ptr of the first feature to operate on (A)
-     * @param feat_2 shared_ptr of the second feature to operate on (B)
-     * @param feat_ind Index of the new feature
+     * @param feat_1 (Node) shared_ptr of the first feature to operate on (A)
+     * @param feat_2 (Node) shared_ptr of the second feature to operate on (B)
+     * @param feat_ind (int) Index of the new feature
      */
     AddNode(const node_ptr feat_1, const node_ptr feat_2, const unsigned long int feat_ind);
 
@@ -53,11 +53,11 @@ public:
      * @brief Constructor
      * @details Constructs the Node from node pointer of the features to operate on
      *
-     * @param feat_1 shared_ptr of the first feature to operate on (A)
-     * @param feat_1 shared_ptr of the second feature to operate on (B)
-     * @param feat_ind Index of the new feature
-     * @param l_bound Minimum absolute value allowed for the feature.
-     * @param u_bound Maximum absolute value allowed for the feature.
+     * @param feat_1 (Node) shared_ptr of the first feature to operate on (A)
+     * @param feat_2 (Node) shared_ptr of the second feature to operate on (B)
+     * @param feat_ind (int) Index of the new feature
+     * @param l_bound (double) Minimum absolute value allowed for the feature.
+     * @param u_bound (double) Maximum absolute value allowed for the feature.
      */
     AddNode(const node_ptr feat_1, const node_ptr feat_2, const unsigned long int feat_ind, const double l_bound, const double u_bound);
 
@@ -75,7 +75,7 @@ public:
 
     // DocString: add_node_expr
     /**
-     * @brief Get the expression for the overall feature (From root node down)
+     * @brief A human readable equation representing the feature
      */
     inline std::string expr() const
     {
@@ -86,25 +86,12 @@ public:
         );
     }
 
-    // DocString: add_node_get_latex_expr
-    /**
-     * @brief Get the expression for the overall feature (From root node down)
-     */
-    inline std::string get_latex_expr() const
-    {
-        return fmt::format(
-            "\\left({} + {}\\right)]",
-            _feats[0]->get_latex_expr(),
-            _feats[1]->get_latex_expr()
-        );
-    }
-
     // DocString: add_node_set_value
     /**
-     * @brief Set the values of the training data for the feature inside of the value storage arrays
+     * @brief Set the value of all training samples for the feature inside the central data storage array
      *
-     * @param offset(int) Key to determine which part of the temporary storage array to look into
-     * @param for_comp(bool) If true then the evaluation is for comparing features
+     * @param offset (int) Where the current node is in the binary expression tree relative to other nodes at the same depth
+     * @param for_comp (bool) If true then the evaluation is used for comparing features
      */
     virtual void set_value(int offset=-1, const bool for_comp=false) const;
 
@@ -112,18 +99,30 @@ public:
     /**
      * @brief Set the test value for the feature inside of the value storage arrays
      *
-     * @param offset(int) Key to determine which part of the temporary storage array to look into
+     * @param offset (int) Where the current node is in the binary expression tree relative to other nodes at the same depth
      */
     virtual void set_test_value(int offset=-1, const bool for_comp=false) const;
 
     // DocString: add_node_rung
     /**
-     * @brief return the rung of the feature
+     * @brief return the rung of the feature (Height of the binary expression tree + 1)
      *
-     * @param cur_rung The rung current rung of the feature tree (used to recursively calculate rung)
      */
     inline int rung(const int cur_rung=0) const {return std::max(_feats[0]->rung(cur_rung + 1), _feats[1]->rung(cur_rung + 1));}
 
+    /**
+     * @brief Get the valid LaTeX expression that represents the feature
+     */
+    inline std::string get_latex_expr() const
+    {
+        return fmt::format(
+            "\\left({} + {}\\right)]",
+            _feats[0]->get_latex_expr(),
+            _feats[1]->get_latex_expr()
+        );
+    }
+
+
     /**
      * @brief Returns the type of node this is
      */
@@ -202,30 +201,30 @@ public:
     virtual void set_parameters(const double* params){return;}
 
     /**
-     * @brief Set the values of the training data for the feature inside of the value storage arrays
+     * @brief Set the value of all training samples for the feature inside the central data storage array
      *
-     * @param params pointer to the parameter values
-     * @param offset(int) Key to determine which part of the temporary storage array to look into
-     * @param for_comp(bool) If true then the evaluation is for comparing features
+     * @param params pointer to the non-linear parameter values
+     * @param offset (int) Where the current node is in the binary expression tree relative to other nodes at the same depth
+     * @param for_comp (bool) If true then the evaluation is used for comparing features
      * @param depth the max depth of paramterization (in the binary expression tree)
      */
     void set_value(const double* params, int offset=-1, const bool for_comp=false, const int depth=1) const;
 
     /**
-     * @brief Set the values of the test data for the feature inside of the value storage arrays
+     * @brief Set the value of all test samples for the feature inside the central data storage array
      *
-     * @param params pointer to the parameter values
-     * @param offset(int) Key to determine which part of the temporary storage array to look into
-     * @param for_comp(bool) If true then the evaluation is for comparing features
+     * @param params pointer to the non-linear parameter values
+     * @param offset (int) Where the current node is in the binary expression tree relative to other nodes at the same depth
+     * @param for_comp (bool) If true then the evaluation is used for comparing features
      * @param depth the max depth of paramterization (in the binary expression tree)
      */
     void set_test_value(const double* params, int offset=-1, const bool for_comp=false, const int depth=1) const;
 
     /**
-     * @brief The expression of the feature
+     * @brief A human readable equation representing the feature
      *
      * @param params parameter values for non-linear operations
-     * @param depth the current depth of the node on the Binary expression tree
+     * @param depth the current depth of the node on the binary expression tree
      * @return feature expression
      */
     inline std::string expr(const double* params, const int depth=1) const
@@ -239,10 +238,10 @@ public:
     }
 
     /**
-     * @brief Get the latexified expression for the overall feature (From root node down)
+     * @brief Get the valid LaTeX expression that represents the feature
      *
      * @param params parameter values for non-linear operations
-     * @param depth the current depth of the node on the Binary expression tree
+     * @param depth the current depth of the node on the binary expression tree
      * @return the latexified expression
      */
     inline std::string get_latex_expr(const double* params, const int depth=1) const
@@ -259,7 +258,7 @@ public:
      * @brief Get the string that corresponds to the code needed to evaluate the node in matlab
      *
      * @param params parameter values for non-linear operations
-     * @param depth the current depth of the node on the Binary expression tree
+     * @param depth the current depth of the node on the binary expression tree
      * @return The matlab code for the feature
      */
     inline std::string matlab_fxn_expr(const double* params, const int depth=1) const
@@ -278,7 +277,7 @@ public:
      * @param lb pointer to the lower bounds data
      * @param ub pointer to the upper bounds data
      * @param from_parent How many parameters are between the start of this node's parameters and its parent
-     * @param depth the current depth of the node on the Binary expression tree
+     * @param depth the current depth of the node on the binary expression tree
      */
     void set_bounds(double* lb, double* ub, const int depth=1) const;
 
@@ -286,7 +285,7 @@ public:
      * @brief Set the bounds for the nl parameterization
      *
      * @param params pointer to the parameters vector
-     * @param depth the current depth of the node on the Binary expression tree
+     * @param depth the current depth of the node on the binary expression tree
      */
     void initialize_params(double* params, const int depth = 1) const;
 
diff --git a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/add/parameterized_add.hpp b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/add/parameterized_add.hpp
index 554f3e7fb5553d1d7687d3c286490536d2bec7de..36b463157ad284bc7624df7233af22483d377921 100644
--- a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/add/parameterized_add.hpp
+++ b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/add/parameterized_add.hpp
@@ -57,9 +57,9 @@ public:
      *
      * @param feat_1 shared_ptr of the feature to operate on (A)
      * @param feat_2 shared_ptr of the feature to operate on (B)
-     * @param feat_ind Index of the new feature
-     * @param l_bound Minimum absolute value allowed for the feature.
-     * @param u_bound Maximum absolute value allowed for the feature.
+     * @param feat_ind (int) Index of the new feature
+     * @param l_bound (double) Minimum absolute value allowed for the feature.
+     * @param u_bound (double) Maximum absolute value allowed for the feature.
      * @param optimizer The optimizer to find the optimal parameters for the features
      */
     AddParamNode(
@@ -77,7 +77,7 @@ public:
      *
      * @param feat_1 shared_ptr of the feature to operate on (A)
      * @param feat_2 shared_ptr of the feature to operate on (B)
-     * @param feat_ind Index of the new feature
+     * @param feat_ind (int) Index of the new feature
      * @param optimizer The optimizer to find the optimal parameters for the features
      */
     AddParamNode(const node_ptr feat_1, const node_ptr feat_2, const unsigned long int feat_ind, std::shared_ptr<NLOptimizer> optimizer);
@@ -87,10 +87,10 @@ public:
      * @brief Constructor
      * @details Constructs the Node from node pointer of the feature to operate on
      *
-     * @param feat shared_ptr of the feature to operate on (A)
-     * @param feat_ind Index of the new feature
-     * @param l_bound Minimum absolute value allowed for the feature.
-     * @param u_bound Maximum absolute value allowed for the feature.
+     * @param feat (Node) shared_ptr of the feature to operate on (A)
+     * @param feat_ind (int) Index of the new feature
+     * @param l_bound (double) Minimum absolute value allowed for the feature.
+     * @param u_bound (double) Maximum absolute value allowed for the feature.
      */
     AddParamNode(const node_ptr feat_1, const node_ptr feat_2, const unsigned long int feat_ind, double const l_bound=1e-50, const double u_bound=1e50);
 
@@ -102,19 +102,19 @@ public:
 
     // DocString: add_param_node_set_value
     /**
-     * @brief Set the values of the training data for the feature inside of the value storage arrays
+     * @brief Set the value of all training samples for the feature inside the central data storage array
      *
-     * @param offset(int) Key to determine which part of the temporary storage array to look into
-     * @param for_comp(bool) If true then the evaluation is for comparing features
+     * @param offset (int) Where the current node is in the binary expression tree relative to other nodes at the same depth
+     * @param for_comp (bool) If true then the evaluation is used for comparing features
      */
     inline void set_value(int offset=-1, const bool for_comp=false) const {set_value(_params.data(), offset, for_comp);}
 
     // DocString: add_param_node_set_test_value
     /**
-     * @brief Set the values of the test data for the feature inside of the value storage arrays
+     * @brief Set the value of all test samples for the feature inside the central data storage array
      *
-     * @param offset(int) Key to determine which part of the temporary storage array to look into
-     * @param for_comp(bool) If true then the evaluation is for comparing features
+     * @param offset (int) Where the current node is in the binary expression tree relative to other nodes at the same depth
+     * @param for_comp (bool) If true then the evaluation is used for comparing features
      */
     inline void set_test_value(int offset=-1, const bool for_comp=false) const {set_test_value(_params.data(), offset, for_comp);}
 
@@ -123,7 +123,7 @@ public:
      * @details If the feature is not already stored in memory, then calculate the feature and return the pointer to the data
      *
      * @param offset(int) the integer value to offset the location in the temporary storage array
-     * @param for_comp(bool) If true then the evaluation is for comparing features
+     * @param for_comp (bool) If true then the evaluation is used for comparing features
      *
      * @return pointer to the feature's test values
      */
@@ -134,7 +134,7 @@ public:
      * @details If the feature is not already stored in memory, then calculate the feature and return the pointer to the data
      *
      * @param offset(int) the integer value to offset the location in the temporary storage array
-     * @param for_comp(bool) If true then the evaluation is for comparing features
+     * @param for_comp (bool) If true then the evaluation is used for comparing features
      *
      * @return pointer to the feature's test values
      */
@@ -147,13 +147,13 @@ public:
 
     // DocString: add_param_node_expr
     /**
-     * @brief Get the expression for the overall feature (From root node down)
+     * @brief A human readable equation representing the feature
      */
     inline std::string expr() const {return expr(_params.data());}
 
     // DocString: add_param_node_get_latex_expr
     /**
-     * @brief Get the expression for the overall feature (From root node down)
+     * @brief Get the valid LaTeX expression that represents the feature
      */
     inline std::string get_latex_expr() const {return get_latex_expr(_params.data());}
 
diff --git a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/cb/cube.hpp b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/cb/cube.hpp
index 5547beee10fe34c4c8c2b2c3967f43816c1202b5..132a8259ce6e5d12ded2c566dc45136250394fb3 100644
--- a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/cb/cube.hpp
+++ b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/cb/cube.hpp
@@ -42,8 +42,8 @@ public:
      * @brief Constructor
      * @details Constructs the Node from an array of features to operate on
      *
-     * @param feat shared_ptr of the feature to operate on (A)
-     * @param feat_ind Index of the new feature
+     * @param feat (Node) shared_ptr of the feature to operate on (A)
+     * @param feat_ind (int) Index of the new feature
      */
     CbNode(const node_ptr feat, const unsigned long int feat_ind);
 
@@ -52,10 +52,10 @@ public:
      * @brief Constructor
      * @details Constructs the Node from node pointer of the feature to operate on
      *
-     * @param feat shared_ptr of the feature to operate on (A)
-     * @param feat_ind Index of the new feature
-     * @param l_bound Minimum absolute value allowed for the feature.
-     * @param u_bound Maximum absolute value allowed for the feature.
+     * @param feat (Node) shared_ptr of the feature to operate on (A)
+     * @param feat_ind (int) Index of the new feature
+     * @param l_bound (double) Minimum absolute value allowed for the feature.
+     * @param u_bound (double) Maximum absolute value allowed for the feature.
      */
     CbNode(const node_ptr feat, const unsigned long int feat_ind, const double l_bound, const double u_bound);
 
@@ -73,7 +73,7 @@ public:
 
     // DocString: cb_node_expr
     /**
-     * @brief Get the expression for the overall feature (From root node down)
+     * @brief A human readable equation representing the feature
      */
     inline std::string expr() const
     {
@@ -85,7 +85,7 @@ public:
 
     // DocString: cb_node_latex_expr
     /**
-     * @brief Get the expression for the overall feature (From root node down)
+     * @brief Get the valid LaTeX expression that represents the feature
      */
     inline std::string get_latex_expr() const
     {
@@ -97,27 +97,26 @@ public:
 
     // DocString: cb_node_set_value
     /**
-     * @brief Set the values of the training data for the feature inside of the value storage arrays
+     * @brief Set the value of all training samples for the feature inside the central data storage array
      *
-     * @param offset(int) Key to determine which part of the temporary storage array to look into
-     * @param for_comp(bool) If true then the evaluation is for comparing features
+     * @param offset (int) Where the current node is in the binary expression tree relative to other nodes at the same depth
+     * @param for_comp (bool) If true then the evaluation is used for comparing features
      */
     virtual void set_value(int offset=-1, const bool for_comp=false) const;
 
     // DocString: cb_node_set_test_value
     /**
-     * @brief Set the values of the test data for the feature inside of the value storage arrays
+     * @brief Set the value of all test samples for the feature inside the central data storage array
      *
-     * @param offset(int) Key to determine which part of the temporary storage array to look into
-     * @param for_comp(bool) If true then the evaluation is for comparing features
+     * @param offset (int) Where the current node is in the binary expression tree relative to other nodes at the same depth
+     * @param for_comp (bool) If true then the evaluation is used for comparing features
      */
     virtual void set_test_value(int offset=-1, const bool for_comp=false) const;
 
     // DocString: cb_node_rung
     /**
-     * @brief return the rung of the feature
+     * @brief return the rung of the feature (Height of the binary expression tree + 1)
      *
-     * @param cur_rung The rung current rung of the feature tree (used to recursively calculate rung)
      */
     inline int rung(const int cur_rung=0) const {return _feats[0]->rung(cur_rung + 1);}
 
@@ -193,30 +192,30 @@ public:
     virtual void set_parameters(const double* params){return;}
 
     /**
-     * @brief Set the values of the training data for the feature inside of the value storage arrays
+     * @brief Set the value of all training samples for the feature inside the central data storage array
      *
-     * @param params pointer to the parameter values
-     * @param offset(int) Key to determine which part of the temporary storage array to look into
-     * @param for_comp(bool) If true then the evaluation is for comparing features
+     * @param params pointer to the non-linear parameter values
+     * @param offset (int) Where the current node is in the binary expression tree relative to other nodes at the same depth
+     * @param for_comp (bool) If true then the evaluation is used for comparing features
      * @param depth the max depth of paramterization (in the binary expression tree)
      */
     void set_value(const double* params, int offset=-1, const bool for_comp=false, const int depth=1) const;
 
     /**
-     * @brief Set the values of the test data for the feature inside of the value storage arrays
+     * @brief Set the value of all test samples for the feature inside the central data storage array
      *
-     * @param params pointer to the parameter values
-     * @param offset(int) Key to determine which part of the temporary storage array to look into
-     * @param for_comp(bool) If true then the evaluation is for comparing features
+     * @param params pointer to the non-linear parameter values
+     * @param offset (int) Where the current node is in the binary expression tree relative to other nodes at the same depth
+     * @param for_comp (bool) If true then the evaluation is used for comparing features
      * @param depth the max depth of paramterization (in the binary expression tree)
      */
     void set_test_value(const double* params, int offset=-1, const bool for_comp=false, const int depth=1) const;
 
     /**
-     * @brief The expression of the feature
+     * @brief A human readable equation representing the feature
      *
      * @param params parameter values for non-linear operations
-     * @param depth the current depth of the node on the Binary expression tree
+     * @param depth the current depth of the node on the binary expression tree
      * @return feature expression
      */
     inline std::string expr(const double* params, const int depth=1) const
@@ -229,10 +228,10 @@ public:
     }
 
     /**
-     * @brief Get the latexified expression for the overall feature (From root node down)
+     * @brief Get the valid LaTeX expression that represents the feature
      *
      * @param params parameter values for non-linear operations
-     * @param depth the current depth of the node on the Binary expression tree
+     * @param depth the current depth of the node on the binary expression tree
      * @return the latexified expression
      */
     inline std::string get_latex_expr(const double* params, const int depth=1) const
@@ -248,7 +247,7 @@ public:
      * @brief Get the string that corresponds to the code needed to evaluate the node in matlab
      *
      * @param params parameter values for non-linear operations
-     * @param depth the current depth of the node on the Binary expression tree
+     * @param depth the current depth of the node on the binary expression tree
      * @return The matlab code for the feature
      */
     inline std::string matlab_fxn_expr(const double* params, const int depth=1) const
@@ -266,7 +265,7 @@ public:
      * @param lb pointer to the lower bounds data
      * @param ub pointer to the upper bounds data
      * @param from_parent How many parameters are between the start of this node's parameters and its parent
-     * @param depth the current depth of the node on the Binary expression tree
+     * @param depth the current depth of the node on the binary expression tree
      */
     void set_bounds(double* lb, double* ub, const int depth=1) const;
 
@@ -274,7 +273,7 @@ public:
      * @brief Set the bounds for the nl parameterization
      *
      * @param params pointer to the parameters vector
-     * @param depth the current depth of the node on the Binary expression tree
+     * @param depth the current depth of the node on the binary expression tree
      */
     void initialize_params(double* params, const int depth = 1) const;
 
diff --git a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/cb/parameterized_cube.hpp b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/cb/parameterized_cube.hpp
index 764fd0100a91ef6b140f1a9f36222100fad788b2..1ce23ecefca402bd687bd58c44de7f40cd22b1fd 100644
--- a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/cb/parameterized_cube.hpp
+++ b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/cb/parameterized_cube.hpp
@@ -55,10 +55,10 @@ public:
      * @brief Constructor
      * @details Constructs the Node from node pointer of the feature to operate on
      *
-     * @param feat shared_ptr of the feature to operate on (A)
-     * @param feat_ind Index of the new feature
-     * @param l_bound Minimum absolute value allowed for the feature.
-     * @param u_bound Maximum absolute value allowed for the feature.
+     * @param feat (Node) shared_ptr of the feature to operate on (A)
+     * @param feat_ind (int) Index of the new feature
+     * @param l_bound (double) Minimum absolute value allowed for the feature.
+     * @param u_bound (double) Maximum absolute value allowed for the feature.
      * @param optimizer The optimizer to find the optimal parameters for the features
      */
     CbParamNode(
@@ -73,8 +73,8 @@ public:
      * @brief Constructor
      * @details Constructs the Node from node pointer of the feature to operate on
      *
-     * @param feat shared_ptr of the feature to operate on (A)
-     * @param feat_ind Index of the new feature
+     * @param feat (Node) shared_ptr of the feature to operate on (A)
+     * @param feat_ind (int) Index of the new feature
      * @param optimizer The optimizer to find the optimal parameters for the features
      */
     CbParamNode(const node_ptr feat, const unsigned long int feat_ind, std::shared_ptr<NLOptimizer> optimizer);
@@ -84,10 +84,10 @@ public:
      * @brief Constructor
      * @details Constructs the Node from node pointer of the feature to operate on
      *
-     * @param feat shared_ptr of the feature to operate on (A)
-     * @param feat_ind Index of the new feature
-     * @param l_bound Minimum absolute value allowed for the feature.
-     * @param u_bound Maximum absolute value allowed for the feature.
+     * @param feat (Node) shared_ptr of the feature to operate on (A)
+     * @param feat_ind (int) Index of the new feature
+     * @param l_bound (double) Minimum absolute value allowed for the feature.
+     * @param u_bound (double) Maximum absolute value allowed for the feature.
      */
     CbParamNode(const node_ptr feat, const unsigned long int feat_ind, double const l_bound=1e-50, const double u_bound=1e50);
 
@@ -99,19 +99,19 @@ public:
 
     // DocString: cb_param_node_set_value
     /**
-     * @brief Set the values of the training data for the feature inside of the value storage arrays
+     * @brief Set the value of all training samples for the feature inside the central data storage array
      *
-     * @param offset(int) Key to determine which part of the temporary storage array to look into
-     * @param for_comp(bool) If true then the evaluation is for comparing features
+     * @param offset (int) Where the current node is in the binary expression tree relative to other nodes at the same depth
+     * @param for_comp (bool) If true then the evaluation is used for comparing features
      */
     inline void set_value(int offset=-1, const bool for_comp=false) const {set_value(_params.data(), offset, for_comp);}
 
     // DocString: cb_param_node_set_test_value
     /**
-     * @brief Set the values of the test data for the feature inside of the value storage arrays
+     * @brief Set the value of all test samples for the feature inside the central data storage array
      *
-     * @param offset(int) Key to determine which part of the temporary storage array to look into
-     * @param for_comp(bool) If true then the evaluation is for comparing features
+     * @param offset (int) Where the current node is in the binary expression tree relative to other nodes at the same depth
+     * @param for_comp (bool) If true then the evaluation is used for comparing features
      */
     inline void set_test_value(int offset=-1, const bool for_comp=false) const {set_test_value(_params.data(), offset, for_comp);}
 
@@ -120,7 +120,7 @@ public:
      * @details If the feature is not already stored in memory, then calculate the feature and return the pointer to the data
      *
      * @param offset(int) the integer value to offset the location in the temporary storage array
-     * @param for_comp(bool) If true then the evaluation is for comparing features
+     * @param for_comp (bool) If true then the evaluation is used for comparing features
      *
      * @return pointer to the feature's test values
      */
@@ -131,7 +131,7 @@ public:
      * @details If the feature is not already stored in memory, then calculate the feature and return the pointer to the data
      *
      * @param offset(int) the integer value to offset the location in the temporary storage array
-     * @param for_comp(bool) If true then the evaluation is for comparing features
+     * @param for_comp (bool) If true then the evaluation is used for comparing features
      *
      * @return pointer to the feature's test values
      */
@@ -144,13 +144,13 @@ public:
 
     // DocString: cb_param_node_expr
     /**
-     * @brief Get the expression for the overall feature (From root node down)
+     * @brief A human readable equation representing the feature
      */
     inline std::string expr() const {return expr(_params.data());}
 
     // DocString: cb_param_node_get_latex_expr
     /**
-     * @brief Get the expression for the overall feature (From root node down)
+     * @brief Get the valid LaTeX expression that represents the feature
      */
     inline std::string get_latex_expr() const {return get_latex_expr(_params.data());}
 
diff --git a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/cbrt/cube_root.hpp b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/cbrt/cube_root.hpp
index 9d1ef920d9ac1e1a78d176697d8455761739c0f5..bf6e419fe9493f1f1d17db4051e9243b95d3547c 100644
--- a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/cbrt/cube_root.hpp
+++ b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/cbrt/cube_root.hpp
@@ -42,8 +42,8 @@ public:
      * @brief Constructor
      * @details Constructs the Node from an array of features to operate on
      *
-     * @param feat shared_ptr of the feature to operate on (A)
-     * @param feat_ind Index of the new feature
+     * @param feat (Node) shared_ptr of the feature to operate on (A)
+     * @param feat_ind (int) Index of the new feature
      */
     CbrtNode(const node_ptr feat, const unsigned long int feat_ind);
 
@@ -52,10 +52,10 @@ public:
      * @brief Constructor
      * @details Constructs the Node from node pointer of the feature to operate on
      *
-     * @param feat shared_ptr of the feature to operate on (A)
-     * @param feat_ind Index of the new feature
-     * @param l_bound Minimum absolute value allowed for the feature.
-     * @param u_bound Maximum absolute value allowed for the feature.
+     * @param feat (Node) shared_ptr of the feature to operate on (A)
+     * @param feat_ind (int) Index of the new feature
+     * @param l_bound (double) Minimum absolute value allowed for the feature.
+     * @param u_bound (double) Maximum absolute value allowed for the feature.
      */
     CbrtNode(const node_ptr feat, const unsigned long int feat_ind, const double l_bound, const double u_bound);
 
@@ -73,7 +73,7 @@ public:
 
     // DocString: cbrt_node_expr
     /**
-     * @brief Get the expression for the overall feature (From root node down)
+     * @brief A human readable equation representing the feature
      */
     inline std::string expr() const
     {
@@ -85,7 +85,7 @@ public:
 
     // DocString: cbrt_node_latex_expr
     /**
-     * @brief Get the expression for the overall feature (From root node down)
+     * @brief Get the valid LaTeX expression that represents the feature
      */
     inline std::string get_latex_expr() const
     {
@@ -97,27 +97,26 @@ public:
 
     // DocString: cbrt_node_set_value
     /**
-     * @brief Set the values of the training data for the feature inside of the value storage arrays
+     * @brief Set the value of all training samples for the feature inside the central data storage array
      *
-     * @param offset(int) Key to determine which part of the temporary storage array to look into
-     * @param for_comp(bool) If true then the evaluation is for comparing features
+     * @param offset (int) Where the current node is in the binary expression tree relative to other nodes at the same depth
+     * @param for_comp (bool) If true then the evaluation is used for comparing features
      */
     virtual void set_value(int offset=-1, const bool for_comp=false) const;
 
     // DocString: cbrt_node_set_test_value
     /**
-     * @brief Set the values of the test data for the feature inside of the value storage arrays
+     * @brief Set the value of all test samples for the feature inside the central data storage array
      *
-     * @param offset(int) Key to determine which part of the temporary storage array to look into
-     * @param for_comp(bool) If true then the evaluation is for comparing features
+     * @param offset (int) Where the current node is in the binary expression tree relative to other nodes at the same depth
+     * @param for_comp (bool) If true then the evaluation is used for comparing features
      */
     virtual void set_test_value(int offset=-1, const bool for_comp=false) const;
 
     // DocString: cbrt_node_rung
     /**
-     * @brief return the rung of the feature
+     * @brief return the rung of the feature (Height of the binary expression tree + 1)
      *
-     * @param cur_rung The rung current rung of the feature tree (used to recursively calculate rung)
      */
     inline int rung(const int cur_rung=0) const {return _feats[0]->rung(cur_rung + 1);}
 
@@ -193,27 +192,27 @@ public:
     virtual void set_parameters(const double* params){return;}
 
     /**
-     * @brief Set the values of the training data for the feature inside of the value storage arrays
+     * @brief Set the value of all training samples for the feature inside the central data storage array
      *
-     * @param params pointer to the parameter values
-     * @param offset(int) Key to determine which part of the temporary storage array to look into
-     * @param for_comp(bool) If true then the evaluation is for comparing features
+     * @param params pointer to the non-linear parameter values
+     * @param offset (int) Where the current node is in the binary expression tree relative to other nodes at the same depth
+     * @param for_comp (bool) If true then the evaluation is used for comparing features
      * @param depth the max depth of paramterization (in the binary expression tree)
      */
     void set_value(const double* params, int offset=-1, const bool for_comp=false, const int depth=1) const;
 
     /**
-     * @brief Set the values of the test data for the feature inside of the value storage arrays
+     * @brief Set the value of all test samples for the feature inside the central data storage array
      *
-     * @param params pointer to the parameter values
-     * @param offset(int) Key to determine which part of the temporary storage array to look into
-     * @param for_comp(bool) If true then the evaluation is for comparing features
+     * @param params pointer to the non-linear parameter values
+     * @param offset (int) Where the current node is in the binary expression tree relative to other nodes at the same depth
+     * @param for_comp (bool) If true then the evaluation is used for comparing features
      * @param depth the max depth of paramterization (in the binary expression tree)
      */
     void set_test_value(const double* params, int offset=-1, const bool for_comp=false, const int depth=1) const;
 
     /**
-     * @brief The expression of the feature
+     * @brief A human readable equation representing the feature
      *
      * @param params parameter values for non-linear operations
      * @return feature expression
@@ -228,10 +227,10 @@ public:
     }
 
     /**
-     * @brief Get the latexified expression for the overall feature (From root node down)
+     * @brief Get the valid LaTeX expression that represents the feature
      *
      * @param params parameter values for non-linear operations
-     * @param depth the current depth of the node on the Binary expression tree
+     * @param depth the current depth of the node on the binary expression tree
      * @return the latexified expression
      */
     inline std::string get_latex_expr(const double* params, const int depth=1) const
@@ -247,7 +246,7 @@ public:
      * @brief Get the string that corresponds to the code needed to evaluate the node in matlab
      *
      * @param params parameter values for non-linear operations
-     * @param depth the current depth of the node on the Binary expression tree
+     * @param depth the current depth of the node on the binary expression tree
      * @return The matlab code for the feature
      */
     inline std::string matlab_fxn_expr(const double* params, const int depth=1) const
@@ -265,7 +264,7 @@ public:
      * @param lb pointer to the lower bounds data
      * @param ub pointer to the upper bounds data
      * @param from_parent How many parameters are between the start of this node's parameters and its parent
-     * @param depth the current depth of the node on the Binary expression tree
+     * @param depth the current depth of the node on the binary expression tree
      */
     void set_bounds(double* lb, double* ub, const int depth=1) const;
 
@@ -273,7 +272,7 @@ public:
      * @brief Set the bounds for the nl parameterization
      *
      * @param params pointer to the parameters vector
-     * @param depth the current depth of the node on the Binary expression tree
+     * @param depth the current depth of the node on the binary expression tree
      */
     void initialize_params(double* params, const int depth = 1) const;
 
diff --git a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/cbrt/parameterized_cube_root.hpp b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/cbrt/parameterized_cube_root.hpp
index d96e0a6893eb375c04bde72f0f0a97130e666fc3..b9c4c4111fc34105744ca4c27713bebc991e4a6f 100644
--- a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/cbrt/parameterized_cube_root.hpp
+++ b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/cbrt/parameterized_cube_root.hpp
@@ -54,10 +54,10 @@ public:
      * @brief Constructor
      * @details Constructs the Node from node pointer of the feature to operate on
      *
-     * @param feat shared_ptr of the feature to operate on (A)
-     * @param feat_ind Index of the new feature
-     * @param l_bound Minimum absolute value allowed for the feature.
-     * @param u_bound Maximum absolute value allowed for the feature.
+     * @param feat (Node) shared_ptr of the feature to operate on (A)
+     * @param feat_ind (int) Index of the new feature
+     * @param l_bound (double) Minimum absolute value allowed for the feature.
+     * @param u_bound (double) Maximum absolute value allowed for the feature.
      * @param optimizer The optimizer to find the optimal parameters for the features
      */
     CbrtParamNode(
@@ -72,8 +72,8 @@ public:
      * @brief Constructor
      * @details Constructs the Node from node pointer of the feature to operate on
      *
-     * @param feat shared_ptr of the feature to operate on (A)
-     * @param feat_ind Index of the new feature
+     * @param feat (Node) shared_ptr of the feature to operate on (A)
+     * @param feat_ind (int) Index of the new feature
      * @param optimizer The optimizer to find the optimal parameters for the features
      */
     CbrtParamNode(const node_ptr feat, const unsigned long int feat_ind, std::shared_ptr<NLOptimizer> optimizer);
@@ -83,10 +83,10 @@ public:
      * @brief Constructor
      * @details Constructs the Node from node pointer of the feature to operate on
      *
-     * @param feat shared_ptr of the feature to operate on (A)
-     * @param feat_ind Index of the new feature
-     * @param l_bound Minimum absolute value allowed for the feature.
-     * @param u_bound Maximum absolute value allowed for the feature.
+     * @param feat (Node) shared_ptr of the feature to operate on (A)
+     * @param feat_ind (int) Index of the new feature
+     * @param l_bound (double) Minimum absolute value allowed for the feature.
+     * @param u_bound (double) Maximum absolute value allowed for the feature.
      */
     CbrtParamNode(const node_ptr feat, const unsigned long int feat_ind, double const l_bound=1e-50, const double u_bound=1e50);
 
@@ -98,19 +98,19 @@ public:
 
     // DocString: cbrt_param_node_set_value
     /**
-     * @brief Set the values of the training data for the feature inside of the value storage arrays
+     * @brief Set the value of all training samples for the feature inside the central data storage array
      *
-     * @param offset(int) Key to determine which part of the temporary storage array to look into
-     * @param for_comp(bool) If true then the evaluation is for comparing features
+     * @param offset (int) Where the current node is in the binary expression tree relative to other nodes at the same depth
+     * @param for_comp (bool) If true then the evaluation is used for comparing features
      */
     inline void set_value(int offset=-1, const bool for_comp=false) const {set_value(_params.data(), offset, for_comp);}
 
     // DocString: cbrt_param_node_set_test_value
     /**
-     * @brief Set the values of the test data for the feature inside of the value storage arrays
+     * @brief Set the value of all test samples for the feature inside the central data storage array
      *
-     * @param offset(int) Key to determine which part of the temporary storage array to look into
-     * @param for_comp(bool) If true then the evaluation is for comparing features
+     * @param offset (int) Where the current node is in the binary expression tree relative to other nodes at the same depth
+     * @param for_comp (bool) If true then the evaluation is used for comparing features
      */
     inline void set_test_value(int offset=-1, const bool for_comp=false) const {set_test_value(_params.data(), offset, for_comp);}
 
@@ -119,7 +119,7 @@ public:
      * @details If the feature is not already stored in memory, then calculate the feature and return the pointer to the data
      *
      * @param offset(int) the integer value to offset the location in the temporary storage array
-     * @param for_comp(bool) If true then the evaluation is for comparing features
+     * @param for_comp (bool) If true then the evaluation is used for comparing features
      *
      * @return pointer to the feature's test values
      */
@@ -130,7 +130,7 @@ public:
      * @details If the feature is not already stored in memory, then calculate the feature and return the pointer to the data
      *
      * @param offset(int) the integer value to offset the location in the temporary storage array
-     * @param for_comp(bool) If true then the evaluation is for comparing features
+     * @param for_comp (bool) If true then the evaluation is used for comparing features
      *
      * @return pointer to the feature's test values
      */
@@ -143,13 +143,13 @@ public:
 
     // DocString: cbrt_param_node_expr
     /**
-     * @brief Get the expression for the overall feature (From root node down)
+     * @brief A human readable equation representing the feature
      */
     inline std::string expr() const {return expr(_params.data());}
 
     // DocString: cbrt_param_node_get_latex_expr
     /**
-     * @brief Get the expression for the overall feature (From root node down)
+     * @brief Get the valid LaTeX expression that represents the feature
      */
     inline std::string get_latex_expr() const {return get_latex_expr(_params.data());}
 
diff --git a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/cos/cos.hpp b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/cos/cos.hpp
index fe730c6a6d25aa547577a5a84c174b3db1d62a2c..b5ff54cd81adb2bc5227e39379c2640d5b447f7f 100644
--- a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/cos/cos.hpp
+++ b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/cos/cos.hpp
@@ -42,8 +42,8 @@ public:
      * @brief Constructor
      * @details Constructs the Node from an array of features to operate on
      *
-     * @param feat shared_ptr of the feature to operate on (A)
-     * @param feat_ind Index of the new feature
+     * @param feat (Node) shared_ptr of the feature to operate on (A)
+     * @param feat_ind (int) Index of the new feature
      */
     CosNode(const node_ptr feat, const unsigned long int feat_ind);
 
@@ -52,10 +52,10 @@ public:
      * @brief Constructor
      * @details Constructs the Node from node pointer of the feature to operate on
      *
-     * @param feat shared_ptr of the feature to operate on (A)
-     * @param feat_ind Index of the new feature
-     * @param l_bound Minimum absolute value allowed for the feature.
-     * @param u_bound Maximum absolute value allowed for the feature.
+     * @param feat (Node) shared_ptr of the feature to operate on (A)
+     * @param feat_ind (int) Index of the new feature
+     * @param l_bound (double) Minimum absolute value allowed for the feature.
+     * @param u_bound (double) Maximum absolute value allowed for the feature.
      */
     CosNode(const node_ptr feat, const unsigned long int feat_ind, const double l_bound, const double u_bound);
 
@@ -73,7 +73,7 @@ public:
 
     // DocString: cos_node_expr
     /**
-     * @brief Get the expression for the overall feature (From root node down)
+     * @brief A human readable equation representing the feature
      */
     inline std::string expr() const
     {
@@ -85,7 +85,7 @@ public:
 
     // DocString: cos_node_latex_expr
     /**
-     * @brief Get the expression for the overall feature (From root node down)
+     * @brief Get the valid LaTeX expression that represents the feature
      */
     inline std::string get_latex_expr() const
     {
@@ -97,27 +97,26 @@ public:
 
     // DocString: cos_node_set_value
     /**
-     * @brief Set the values of the training data for the feature inside of the value storage arrays
+     * @brief Set the value of all training samples for the feature inside the central data storage array
      *
-     * @param offset(int) Key to determine which part of the temporary storage array to look into
-     * @param for_comp(bool) If true then the evaluation is for comparing features
+     * @param offset (int) Where the current node is in the binary expression tree relative to other nodes at the same depth
+     * @param for_comp (bool) If true then the evaluation is used for comparing features
      */
     virtual void set_value(int offset=-1, const bool for_comp=false) const;
 
     // DocString: cos_node_set_test_value
     /**
-     * @brief Set the values of the test data for the feature inside of the value storage arrays
+     * @brief Set the value of all test samples for the feature inside the central data storage array
      *
-     * @param offset(int) Key to determine which part of the temporary storage array to look into
-     * @param for_comp(bool) If true then the evaluation is for comparing features
+     * @param offset (int) Where the current node is in the binary expression tree relative to other nodes at the same depth
+     * @param for_comp (bool) If true then the evaluation is used for comparing features
      */
     virtual void set_test_value(int offset=-1, const bool for_comp=false) const;
 
     // DocString: cos_node_rung
     /**
-     * @brief return the rung of the feature
+     * @brief return the rung of the feature (Height of the binary expression tree + 1)
      *
-     * @param cur_rung The rung current rung of the feature tree (used to recursively calculate rung)
      */
     inline int rung(const int cur_rung=0) const {return _feats[0]->rung(cur_rung + 1);}
 
@@ -193,27 +192,27 @@ public:
     virtual void set_parameters(const double* params){return;}
 
     /**
-     * @brief Set the values of the training data for the feature inside of the value storage arrays
+     * @brief Set the value of all training samples for the feature inside the central data storage array
      *
-     * @param params pointer to the parameter values
-     * @param offset(int) Key to determine which part of the temporary storage array to look into
-     * @param for_comp(bool) If true then the evaluation is for comparing features
+     * @param params pointer to the non-linear parameter values
+     * @param offset (int) Where the current node is in the binary expression tree relative to other nodes at the same depth
+     * @param for_comp (bool) If true then the evaluation is used for comparing features
      * @param depth the max depth of paramterization (in the binary expression tree)
      */
     void set_value(const double* params, int offset=-1, const bool for_comp=false, const int depth=1) const;
 
     /**
-     * @brief Set the values of the test data for the feature inside of the value storage arrays
+     * @brief Set the value of all test samples for the feature inside the central data storage array
      *
-     * @param params pointer to the parameter values
-     * @param offset(int) Key to determine which part of the temporary storage array to look into
-     * @param for_comp(bool) If true then the evaluation is for comparing features
+     * @param params pointer to the non-linear parameter values
+     * @param offset (int) Where the current node is in the binary expression tree relative to other nodes at the same depth
+     * @param for_comp (bool) If true then the evaluation is used for comparing features
      * @param depth the max depth of paramterization (in the binary expression tree)
      */
     void set_test_value(const double* params, int offset=-1, const bool for_comp=false, const int depth=1) const;
 
     /**
-     * @brief The expression of the feature
+     * @brief A human readable equation representing the feature
      *
      * @param params parameter values for non-linear operations
      * @return feature expression
@@ -229,10 +228,10 @@ public:
     }
 
     /**
-     * @brief Get the latexified expression for the overall feature (From root node down)
+     * @brief Get the valid LaTeX expression that represents the feature
      *
      * @param params parameter values for non-linear operations
-     * @param depth the current depth of the node on the Binary expression tree
+     * @param depth the current depth of the node on the binary expression tree
      * @return the latexified expression
      */
     inline std::string get_latex_expr(const double* params, const int depth=1) const
@@ -249,7 +248,7 @@ public:
      * @brief Get the string that corresponds to the code needed to evaluate the node in matlab
      *
      * @param params parameter values for non-linear operations
-     * @param depth the current depth of the node on the Binary expression tree
+     * @param depth the current depth of the node on the binary expression tree
      * @return The matlab code for the feature
      */
     inline std::string matlab_fxn_expr(const double* params, const int depth=1) const
@@ -268,7 +267,7 @@ public:
      * @param lb pointer to the lower bounds data
      * @param ub pointer to the upper bounds data
      * @param from_parent How many parameters are between the start of this node's parameters and its parent
-     * @param depth the current depth of the node on the Binary expression tree
+     * @param depth the current depth of the node on the binary expression tree
      */
     void set_bounds(double* lb, double* ub, const int depth=1) const;
 
@@ -276,7 +275,7 @@ public:
      * @brief Set the bounds for the nl parameterization
      *
      * @param params pointer to the parameters vector
-     * @param depth the current depth of the node on the Binary expression tree
+     * @param depth the current depth of the node on the binary expression tree
      */
     void initialize_params(double* params, const int depth = 1) const;
 
diff --git a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/cos/parameterized_cos.hpp b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/cos/parameterized_cos.hpp
index 13e4f13a1cddd9e0ed14cfef087c6b8c6d0b23ed..46f2115caaf8effa18568a1d6de1587e067df47a 100644
--- a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/cos/parameterized_cos.hpp
+++ b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/cos/parameterized_cos.hpp
@@ -55,10 +55,10 @@ public:
      * @brief Constructor
      * @details Constructs the Node from node pointer of the feature to operate on
      *
-     * @param feat shared_ptr of the feature to operate on (A)
-     * @param feat_ind Index of the new feature
-     * @param l_bound Minimum absolute value allowed for the feature.
-     * @param u_bound Maximum absolute value allowed for the feature.
+     * @param feat (Node) shared_ptr of the feature to operate on (A)
+     * @param feat_ind (int) Index of the new feature
+     * @param l_bound (double) Minimum absolute value allowed for the feature.
+     * @param u_bound (double) Maximum absolute value allowed for the feature.
      * @param optimizer The optimizer to find the optimal parameters for the features
      */
     CosParamNode(
@@ -73,8 +73,8 @@ public:
      * @brief Constructor
      * @details Constructs the Node from node pointer of the feature to operate on
      *
-     * @param feat shared_ptr of the feature to operate on (A)
-     * @param feat_ind Index of the new feature
+     * @param feat (Node) shared_ptr of the feature to operate on (A)
+     * @param feat_ind (int) Index of the new feature
      * @param optimizer The optimizer to find the optimal parameters for the features
      */
     CosParamNode(const node_ptr feat, const unsigned long int feat_ind, std::shared_ptr<NLOptimizer> optimizer);
@@ -84,10 +84,10 @@ public:
      * @brief Constructor
      * @details Constructs the Node from node pointer of the feature to operate on
      *
-     * @param feat shared_ptr of the feature to operate on (A)
-     * @param feat_ind Index of the new feature
-     * @param l_bound Minimum absolute value allowed for the feature.
-     * @param u_bound Maximum absolute value allowed for the feature.
+     * @param feat (Node) shared_ptr of the feature to operate on (A)
+     * @param feat_ind (int) Index of the new feature
+     * @param l_bound (double) Minimum absolute value allowed for the feature.
+     * @param u_bound (double) Maximum absolute value allowed for the feature.
      */
     CosParamNode(const node_ptr feat, const unsigned long int feat_ind, double const l_bound=1e-50, const double u_bound=1e50);
 
@@ -99,19 +99,19 @@ public:
 
     // DocString: cos_param_node_set_value
     /**
-     * @brief Set the values of the training data for the feature inside of the value storage arrays
+     * @brief Set the value of all training samples for the feature inside the central data storage array
      *
-     * @param offset(int) Key to determine which part of the temporary storage array to look into
-     * @param for_comp(bool) If true then the evaluation is for comparing features
+     * @param offset (int) Where the current node is in the binary expression tree relative to other nodes at the same depth
+     * @param for_comp (bool) If true then the evaluation is used for comparing features
      */
     inline void set_value(int offset=-1, const bool for_comp=false) const {set_value(_params.data(), offset, for_comp);}
 
     // DocString: cos_param_node_set_test_value
     /**
-     * @brief Set the values of the test data for the feature inside of the value storage arrays
+     * @brief Set the value of all test samples for the feature inside the central data storage array
      *
-     * @param offset(int) Key to determine which part of the temporary storage array to look into
-     * @param for_comp(bool) If true then the evaluation is for comparing features
+     * @param offset (int) Where the current node is in the binary expression tree relative to other nodes at the same depth
+     * @param for_comp (bool) If true then the evaluation is used for comparing features
      */
     inline void set_test_value(int offset=-1, const bool for_comp=false) const {set_test_value(_params.data(), offset, for_comp);}
 
@@ -120,7 +120,7 @@ public:
      * @details If the feature is not already stored in memory, then calculate the feature and return the pointer to the data
      *
      * @param offset(int) the integer value to offset the location in the temporary storage array
-     * @param for_comp(bool) If true then the evaluation is for comparing features
+     * @param for_comp (bool) If true then the evaluation is used for comparing features
      *
      * @return pointer to the feature's test values
      */
@@ -131,7 +131,7 @@ public:
      * @details If the feature is not already stored in memory, then calculate the feature and return the pointer to the data
      *
      * @param offset(int) the integer value to offset the location in the temporary storage array
-     * @param for_comp(bool) If true then the evaluation is for comparing features
+     * @param for_comp (bool) If true then the evaluation is used for comparing features
      *
      * @return pointer to the feature's test values
      */
@@ -144,13 +144,13 @@ public:
 
     // DocString: cos_param_node_expr
     /**
-     * @brief Get the expression for the overall feature (From root node down)
+     * @brief A human readable equation representing the feature
      */
     inline std::string expr() const {return expr(_params.data());}
 
     // DocString: cos_param_node_get_latex_expr
     /**
-     * @brief Get the expression for the overall feature (From root node down)
+     * @brief Get the valid LaTeX expression that represents the feature
      */
     inline std::string get_latex_expr() const {return get_latex_expr(_params.data());}
 
diff --git a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/div/divide.hpp b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/div/divide.hpp
index a8fffa381ae0ca602847e9a690ea47eb48156dd4..93d78546dfafd1b19aa9b46b720866b281012cd3 100644
--- a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/div/divide.hpp
+++ b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/div/divide.hpp
@@ -42,9 +42,9 @@ public:
      * @brief Constructor
      * @details Constructs the Node from an array of features to operate on
      *
-     * @param feat_1 shared_ptr of the first feature to operate on (A)
-     * @param feat_2 shared_ptr of the second feature to operate on (B)
-     * @param feat_ind Index of the new feature
+     * @param feat_1 (Node) shared_ptr of the first feature to operate on (A)
+     * @param feat_2 (Node) shared_ptr of the second feature to operate on (B)
+     * @param feat_ind (int) Index of the new feature
      */
     DivNode(const node_ptr feat_1, const node_ptr feat_2, const unsigned long int feat_ind);
 
@@ -53,11 +53,11 @@ public:
      * @brief Constructor
      * @details Constructs the Node from node pointer of the features to operate on
      *
-     * @param feat_1 shared_ptr of the first feature to operate on (A)
+     * @param feat_1 (Node) shared_ptr of the first feature to operate on (A)
      * @param feat_1 shared_ptr of the second feature to operate on (B)
-     * @param feat_ind Index of the new feature
-     * @param l_bound Minimum absolute value allowed for the feature.
-     * @param u_bound Maximum absolute value allowed for the feature.
+     * @param feat_ind (int) Index of the new feature
+     * @param l_bound (double) Minimum absolute value allowed for the feature.
+     * @param u_bound (double) Maximum absolute value allowed for the feature.
      */
     DivNode(const node_ptr feat_1, const node_ptr feat_2, const unsigned long int feat_ind, const double l_bound, const double u_bound);
 
@@ -75,7 +75,7 @@ public:
 
     // DocString: div_node_expr
     /**
-     * @brief Get the expression for the overall feature (From root node down)
+     * @brief A human readable equation representing the feature
      */
     inline std::string expr() const
     {
@@ -88,7 +88,7 @@ public:
 
     // DocString: div_node_latex_expr
     /**
-     * @brief Get the expression for the overall feature (From root node down)
+     * @brief Get the valid LaTeX expression that represents the feature
      */
     inline std::string get_latex_expr() const
     {
@@ -101,27 +101,26 @@ public:
 
     // DocString: div_node_set_value
     /**
-     * @brief Set the values of the training data for the feature inside of the value storage arrays
+     * @brief Set the value of all training samples for the feature inside the central data storage array
      *
-     * @param offset(int) Key to determine which part of the temporary storage array to look into
-     * @param for_comp(bool) If true then the evaluation is for comparing features
+     * @param offset (int) Where the current node is in the binary expression tree relative to other nodes at the same depth
+     * @param for_comp (bool) If true then the evaluation is used for comparing features
      */
     virtual void set_value(int offset=-1, const bool for_comp=false) const;
 
     // DocString: div_node_set_test_value
     /**
-     * @brief Set the values of the test data for the feature inside of the value storage arrays
+     * @brief Set the value of all test samples for the feature inside the central data storage array
      *
-     * @param offset(int) Key to determine which part of the temporary storage array to look into
-     * @param for_comp(bool) If true then the evaluation is for comparing features
+     * @param offset (int) Where the current node is in the binary expression tree relative to other nodes at the same depth
+     * @param for_comp (bool) If true then the evaluation is used for comparing features
      */
     virtual void set_test_value(int offset=-1, const bool for_comp=false) const;
 
     // DocString: div_node_rung
     /**
-     * @brief return the rung of the feature
+     * @brief return the rung of the feature (Height of the binary expression tree + 1)
      *
-     * @param cur_rung The rung current rung of the feature tree (used to recursively calculate rung)
      */
     inline int rung(const int cur_rung=0) const {return std::max(_feats[0]->rung(cur_rung + 1), _feats[1]->rung(cur_rung + 1));}
 
@@ -204,30 +203,30 @@ public:
     virtual void set_parameters(const double* params){return;}
 
     /**
-     * @brief Set the values of the training data for the feature inside of the value storage arrays
+     * @brief Set the value of all training samples for the feature inside the central data storage array
      *
-     * @param params pointer to the parameter values
-     * @param offset(int) Key to determine which part of the temporary storage array to look into
-     * @param for_comp(bool) If true then the evaluation is for comparing features
+     * @param params pointer to the non-linear parameter values
+     * @param offset (int) Where the current node is in the binary expression tree relative to other nodes at the same depth
+     * @param for_comp (bool) If true then the evaluation is used for comparing features
      * @param depth the max depth of paramterization (in the binary expression tree)
      */
     void set_value(const double* params, int offset=-1, const bool for_comp=false, const int depth=1) const;
 
     /**
-     * @brief Set the values of the test data for the feature inside of the value storage arrays
+     * @brief Set the value of all test samples for the feature inside the central data storage array
      *
-     * @param params pointer to the parameter values
-     * @param offset(int) Key to determine which part of the temporary storage array to look into
-     * @param for_comp(bool) If true then the evaluation is for comparing features
+     * @param params pointer to the non-linear parameter values
+     * @param offset (int) Where the current node is in the binary expression tree relative to other nodes at the same depth
+     * @param for_comp (bool) If true then the evaluation is used for comparing features
      * @param depth the max depth of paramterization (in the binary expression tree)
      */
     void set_test_value(const double* params, int offset=-1, const bool for_comp=false, const int depth=1) const;
 
     /**
-     * @brief The expression of the feature
+     * @brief A human readable equation representing the feature
      *
      * @param params parameter values for non-linear operations
-     * @param depth the current depth of the node on the Binary expression tree
+     * @param depth the current depth of the node on the binary expression tree
      * @return feature expression
      */
     inline std::string expr(const double* params, const int depth=1) const
@@ -241,10 +240,10 @@ public:
     }
 
     /**
-     * @brief Get the latexified expression for the overall feature (From root node down)
+     * @brief Get the valid LaTeX expression that represents the feature
      *
      * @param params parameter values for non-linear operations
-     * @param depth the current depth of the node on the Binary expression tree
+     * @param depth the current depth of the node on the binary expression tree
      * @return the latexified expression
      */
     inline std::string get_latex_expr(const double* params, const int depth=1) const
@@ -261,7 +260,7 @@ public:
      * @brief Get the string that corresponds to the code needed to evaluate the node in matlab
      *
      * @param params parameter values for non-linear operations
-     * @param depth the current depth of the node on the Binary expression tree
+     * @param depth the current depth of the node on the binary expression tree
      * @return The matlab code for the feature
      */
     inline std::string matlab_fxn_expr(const double* params, const int depth=1) const
@@ -280,7 +279,7 @@ public:
      * @param lb pointer to the lower bounds data
      * @param ub pointer to the upper bounds data
      * @param from_parent How many parameters are between the start of this node's parameters and its parent
-     * @param depth the current depth of the node on the Binary expression tree
+     * @param depth the current depth of the node on the binary expression tree
      */
     void set_bounds(double* lb, double* ub, const int depth=1) const;
 
@@ -288,7 +287,7 @@ public:
      * @brief Set the bounds for the nl parameterization
      *
      * @param params pointer to the parameters vector
-     * @param depth the current depth of the node on the Binary expression tree
+     * @param depth the current depth of the node on the binary expression tree
      */
     void initialize_params(double* params, const int depth = 1) const;
 
diff --git a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/div/parameterized_divide.hpp b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/div/parameterized_divide.hpp
index 58410be65d3bc10b24ee7b3f7a50394af6060565..56387c9cb75a04641afa1acce6c00eec3ab78ac3 100644
--- a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/div/parameterized_divide.hpp
+++ b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/div/parameterized_divide.hpp
@@ -57,9 +57,9 @@ public:
      *
      * @param feat_1 shared_ptr of the feature to operate on (A)
      * @param feat_2 shared_ptr of the feature to operate on (B)
-     * @param feat_ind Index of the new feature
-     * @param l_bound Minimum absolute value allowed for the feature.
-     * @param u_bound Maximum absolute value allowed for the feature.
+     * @param feat_ind (int) Index of the new feature
+     * @param l_bound (double) Minimum absolute value allowed for the feature.
+     * @param u_bound (double) Maximum absolute value allowed for the feature.
      * @param optimizer The optimizer to find the optimal parameters for the features
      */
     DivParamNode(
@@ -77,7 +77,7 @@ public:
      *
      * @param feat_1 shared_ptr of the feature to operate on (A)
      * @param feat_2 shared_ptr of the feature to operate on (B)
-     * @param feat_ind Index of the new feature
+     * @param feat_ind (int) Index of the new feature
      * @param optimizer The optimizer to find the optimal parameters for the features
      */
     DivParamNode(const node_ptr feat_1, const node_ptr feat_2, const unsigned long int feat_ind, std::shared_ptr<NLOptimizer> optimizer);
@@ -87,10 +87,10 @@ public:
      * @brief Constructor
      * @details Constructs the Node from node pointer of the feature to operate on
      *
-     * @param feat shared_ptr of the feature to operate on (A)
-     * @param feat_ind Index of the new feature
-     * @param l_bound Minimum absolute value allowed for the feature.
-     * @param u_bound Maximum absolute value allowed for the feature.
+     * @param feat (Node) shared_ptr of the feature to operate on (A)
+     * @param feat_ind (int) Index of the new feature
+     * @param l_bound (double) Minimum absolute value allowed for the feature.
+     * @param u_bound (double) Maximum absolute value allowed for the feature.
      */
     DivParamNode(const node_ptr feat_1, const node_ptr feat_2, const unsigned long int feat_ind, double const l_bound=1e-50, const double u_bound=1e50);
 
@@ -102,19 +102,19 @@ public:
 
     // DocString: div_param_node_set_value
     /**
-     * @brief Set the values of the training data for the feature inside of the value storage arrays
+     * @brief Set the value of all training samples for the feature inside the central data storage array
      *
-     * @param offset(int) Key to determine which part of the temporary storage array to look into
-     * @param for_comp(bool) If true then the evaluation is for comparing features
+     * @param offset (int) Where the current node is in the binary expression tree relative to other nodes at the same depth
+     * @param for_comp (bool) If true then the evaluation is used for comparing features
      */
     inline void set_value(int offset=-1, const bool for_comp=false) const {set_value(_params.data(), offset, for_comp);}
 
     // DocString: div_param_node_set_test_value
     /**
-     * @brief Set the values of the test data for the feature inside of the value storage arrays
+     * @brief Set the value of all test samples for the feature inside the central data storage array
      *
-     * @param offset(int) Key to determine which part of the temporary storage array to look into
-     * @param for_comp(bool) If true then the evaluation is for comparing features
+     * @param offset (int) Where the current node is in the binary expression tree relative to other nodes at the same depth
+     * @param for_comp (bool) If true then the evaluation is used for comparing features
      */
     inline void set_test_value(int offset=-1, const bool for_comp=false) const {set_test_value(_params.data(), offset, for_comp);}
 
@@ -123,7 +123,7 @@ public:
      * @details If the feature is not already stored in memory, then calculate the feature and return the pointer to the data
      *
      * @param offset(int) the integer value to offset the location in the temporary storage array
-     * @param for_comp(bool) If true then the evaluation is for comparing features
+     * @param for_comp (bool) If true then the evaluation is used for comparing features
      *
      * @return pointer to the feature's test values
      */
@@ -134,7 +134,7 @@ public:
      * @details If the feature is not already stored in memory, then calculate the feature and return the pointer to the data
      *
      * @param offset(int) the integer value to offset the location in the temporary storage array
-     * @param for_comp(bool) If true then the evaluation is for comparing features
+     * @param for_comp (bool) If true then the evaluation is used for comparing features
      *
      * @return pointer to the feature's test values
      */
@@ -147,13 +147,13 @@ public:
 
     // DocString: div_param_node_expr
     /**
-     * @brief Get the expression for the overall feature (From root node down)
+     * @brief A human readable equation representing the feature
      */
     inline std::string expr() const {return expr(_params.data());}
 
     // DocString: div_param_node_get_latex_expr
     /**
-     * @brief Get the expression for the overall feature (From root node down)
+     * @brief Get the valid LaTeX expression that represents the feature
      */
     inline std::string get_latex_expr() const {return get_latex_expr(_params.data());}
 
diff --git a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/exp/exponential.hpp b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/exp/exponential.hpp
index b3d0b3158fb4efe587cc817a320049e78b2595c5..6a220215fc9382bdc12b42f1dbb391924eb2a09e 100644
--- a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/exp/exponential.hpp
+++ b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/exp/exponential.hpp
@@ -42,8 +42,8 @@ public:
      * @brief Constructor
      * @details Constructs the Node from an array of features to operate on
      *
-     * @param feat shared_ptr of the feature to operate on (A)
-     * @param feat_ind Index of the new feature
+     * @param feat (Node) shared_ptr of the feature to operate on (A)
+     * @param feat_ind (int) Index of the new feature
      */
     ExpNode(const node_ptr feat, const unsigned long int feat_ind);
 
@@ -52,10 +52,10 @@ public:
      * @brief Constructor
      * @details Constructs the Node from node pointer of the feature to operate on
      *
-     * @param feat shared_ptr of the feature to operate on (A)
-     * @param feat_ind Index of the new feature
-     * @param l_bound Minimum absolute value allowed for the feature.
-     * @param u_bound Maximum absolute value allowed for the feature.
+     * @param feat (Node) shared_ptr of the feature to operate on (A)
+     * @param feat_ind (int) Index of the new feature
+     * @param l_bound (double) Minimum absolute value allowed for the feature.
+     * @param u_bound (double) Maximum absolute value allowed for the feature.
      */
     ExpNode(const node_ptr feat, const unsigned long int feat_ind, const double l_bound, const double u_bound);
 
@@ -73,7 +73,7 @@ public:
 
     // DocString: exp_node_expr
     /**
-     * @brief Get the expression for the overall feature (From root node down)
+     * @brief A human readable equation representing the feature
      */
     inline std::string expr() const
     {
@@ -85,7 +85,7 @@ public:
 
     // DocString: exp_node_latex_expr
     /**
-     * @brief Get the expression for the overall feature (From root node down)
+     * @brief Get the valid LaTeX expression that represents the feature
      */
     inline std::string get_latex_expr() const
     {
@@ -97,27 +97,26 @@ public:
 
     // DocString: exp_node_set_value
     /**
-     * @brief Set the values of the training data for the feature inside of the value storage arrays
+     * @brief Set the value of all training samples for the feature inside the central data storage array
      *
-     * @param offset(int) Key to determine which part of the temporary storage array to look into
-     * @param for_comp(bool) If true then the evaluation is for comparing features
+     * @param offset (int) Where the current node is in the binary expression tree relative to other nodes at the same depth
+     * @param for_comp (bool) If true then the evaluation is used for comparing features
      */
     virtual void set_value(int offset=-1, const bool for_comp=false) const;
 
     // DocString: exp_node_set_test_value
     /**
-     * @brief Set the values of the test data for the feature inside of the value storage arrays
+     * @brief Set the value of all test samples for the feature inside the central data storage array
      *
-     * @param offset(int) Key to determine which part of the temporary storage array to look into
-     * @param for_comp(bool) If true then the evaluation is for comparing features
+     * @param offset (int) Where the current node is in the binary expression tree relative to other nodes at the same depth
+     * @param for_comp (bool) If true then the evaluation is used for comparing features
      */
     virtual void set_test_value(int offset=-1, const bool for_comp=false) const;
 
     // DocString: exp_node_rung
     /**
-     * @brief return the rung of the feature
+     * @brief return the rung of the feature (Height of the binary expression tree + 1)
      *
-     * @param cur_rung The rung current rung of the feature tree (used to recursively calculate rung)
      */
     inline int rung(const int cur_rung=0) const {return _feats[0]->rung(cur_rung + 1);}
 
@@ -193,27 +192,27 @@ public:
     virtual void set_parameters(const double* params){return;}
 
     /**
-     * @brief Set the values of the training data for the feature inside of the value storage arrays
+     * @brief Set the value of all training samples for the feature inside the central data storage array
      *
-     * @param params pointer to the parameter values
-     * @param offset(int) Key to determine which part of the temporary storage array to look into
-     * @param for_comp(bool) If true then the evaluation is for comparing features
+     * @param params pointer to the non-linear parameter values
+     * @param offset (int) Where the current node is in the binary expression tree relative to other nodes at the same depth
+     * @param for_comp (bool) If true then the evaluation is used for comparing features
      * @param depth the max depth of paramterization (in the binary expression tree)
      */
     void set_value(const double* params, int offset=-1, const bool for_comp=false, const int depth=1) const;
 
     /**
-     * @brief Set the values of the test data for the feature inside of the value storage arrays
+     * @brief Set the value of all test samples for the feature inside the central data storage array
      *
-     * @param params pointer to the parameter values
-     * @param offset(int) Key to determine which part of the temporary storage array to look into
-     * @param for_comp(bool) If true then the evaluation is for comparing features
+     * @param params pointer to the non-linear parameter values
+     * @param offset (int) Where the current node is in the binary expression tree relative to other nodes at the same depth
+     * @param for_comp (bool) If true then the evaluation is used for comparing features
      * @param depth the max depth of paramterization (in the binary expression tree)
      */
     void set_test_value(const double* params, int offset=-1, const bool for_comp=false, const int depth=1) const;
 
     /**
-     * @brief The expression of the feature
+     * @brief A human readable equation representing the feature
      *
      * @param params parameter values for non-linear operations
      * @return feature expression
@@ -228,10 +227,10 @@ public:
     }
 
     /**
-     * @brief Get the latexified expression for the overall feature (From root node down)
+     * @brief Get the valid LaTeX expression that represents the feature
      *
      * @param params parameter values for non-linear operations
-     * @param depth the current depth of the node on the Binary expression tree
+     * @param depth the current depth of the node on the binary expression tree
      * @return the latexified expression
      */
     inline std::string get_latex_expr(const double* params, const int depth=1) const
@@ -247,7 +246,7 @@ public:
      * @brief Get the string that corresponds to the code needed to evaluate the node in matlab
      *
      * @param params parameter values for non-linear operations
-     * @param depth the current depth of the node on the Binary expression tree
+     * @param depth the current depth of the node on the binary expression tree
      * @return The matlab code for the feature
      */
     inline std::string matlab_fxn_expr(const double* params, const int depth=1) const
@@ -265,7 +264,7 @@ public:
      * @param lb pointer to the lower bounds data
      * @param ub pointer to the upper bounds data
      * @param from_parent How many parameters are between the start of this node's parameters and its parent
-     * @param depth the current depth of the node on the Binary expression tree
+     * @param depth the current depth of the node on the binary expression tree
      */
     void set_bounds(double* lb, double* ub, const int depth=1) const;
 
@@ -273,7 +272,7 @@ public:
      * @brief Set the bounds for the nl parameterization
      *
      * @param params pointer to the parameters vector
-     * @param depth the current depth of the node on the Binary expression tree
+     * @param depth the current depth of the node on the binary expression tree
      */
     void initialize_params(double* params, const int depth = 1) const;
 
diff --git a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/exp/parameterized_exponential.hpp b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/exp/parameterized_exponential.hpp
index 43c2845afe333dba766e019a9b269360a71b05c4..f53fc3fbfed36287becfafb4fdd0a737d9fb30d6 100644
--- a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/exp/parameterized_exponential.hpp
+++ b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/exp/parameterized_exponential.hpp
@@ -55,10 +55,10 @@ public:
      * @brief Constructor
      * @details Constructs the Node from node pointer of the feature to operate on
      *
-     * @param feat shared_ptr of the feature to operate on (A)
-     * @param feat_ind Index of the new feature
-     * @param l_bound Minimum absolute value allowed for the feature.
-     * @param u_bound Maximum absolute value allowed for the feature.
+     * @param feat (Node) shared_ptr of the feature to operate on (A)
+     * @param feat_ind (int) Index of the new feature
+     * @param l_bound (double) Minimum absolute value allowed for the feature.
+     * @param u_bound (double) Maximum absolute value allowed for the feature.
      * @param optimizer The optimizer to find the optimal parameters for the features
      */
     ExpParamNode(
@@ -73,8 +73,8 @@ public:
      * @brief Constructor
      * @details Constructs the Node from node pointer of the feature to operate on
      *
-     * @param feat shared_ptr of the feature to operate on (A)
-     * @param feat_ind Index of the new feature
+     * @param feat (Node) shared_ptr of the feature to operate on (A)
+     * @param feat_ind (int) Index of the new feature
      * @param optimizer The optimizer to find the optimal parameters for the features
      */
     ExpParamNode(const node_ptr feat, const unsigned long int feat_ind, std::shared_ptr<NLOptimizer> optimizer);
@@ -83,10 +83,10 @@ public:
      * @brief Constructor
      * @details Constructs the Node from node pointer of the feature to operate on
      *
-     * @param feat shared_ptr of the feature to operate on (A)
-     * @param feat_ind Index of the new feature
-     * @param l_bound Minimum absolute value allowed for the feature.
-     * @param u_bound Maximum absolute value allowed for the feature.
+     * @param feat (Node) shared_ptr of the feature to operate on (A)
+     * @param feat_ind (int) Index of the new feature
+     * @param l_bound (double) Minimum absolute value allowed for the feature.
+     * @param u_bound (double) Maximum absolute value allowed for the feature.
      */
     ExpParamNode(const node_ptr feat, const unsigned long int feat_ind, double const l_bound=1e-50, const double u_bound=1e50);
 
@@ -98,19 +98,19 @@ public:
 
     // DocString: exp_param_node_set_value
     /**
-     * @brief Set the values of the training data for the feature inside of the value storage arrays
+     * @brief Set the value of all training samples for the feature inside the central data storage array
      *
-     * @param offset(int) Key to determine which part of the temporary storage array to look into
-     * @param for_comp(bool) If true then the evaluation is for comparing features
+     * @param offset (int) Where the current node is in the binary expression tree relative to other nodes at the same depth
+     * @param for_comp (bool) If true then the evaluation is used for comparing features
      */
     inline void set_value(int offset=-1, const bool for_comp=false) const {set_value(_params.data(), offset, for_comp);}
 
     // DocString: exp_param_node_set_test_value
     /**
-     * @brief Set the values of the test data for the feature inside of the value storage arrays
+     * @brief Set the value of all test samples for the feature inside the central data storage array
      *
-     * @param offset(int) Key to determine which part of the temporary storage array to look into
-     * @param for_comp(bool) If true then the evaluation is for comparing features
+     * @param offset (int) Where the current node is in the binary expression tree relative to other nodes at the same depth
+     * @param for_comp (bool) If true then the evaluation is used for comparing features
      */
     inline void set_test_value(int offset=-1, const bool for_comp=false) const {set_test_value(_params.data(), offset, for_comp);}
 
@@ -119,7 +119,7 @@ public:
      * @details If the feature is not already stored in memory, then calculate the feature and return the pointer to the data
      *
      * @param offset(int) the integer value to offset the location in the temporary storage array
-     * @param for_comp(bool) If true then the evaluation is for comparing features
+     * @param for_comp (bool) If true then the evaluation is used for comparing features
      *
      * @return pointer to the feature's test values
      */
@@ -130,7 +130,7 @@ public:
      * @details If the feature is not already stored in memory, then calculate the feature and return the pointer to the data
      *
      * @param offset(int) the integer value to offset the location in the temporary storage array
-     * @param for_comp(bool) If true then the evaluation is for comparing features
+     * @param for_comp (bool) If true then the evaluation is used for comparing features
      *
      * @return pointer to the feature's test values
      */
@@ -143,13 +143,13 @@ public:
 
     // DocString: exp_param_node_expr
     /**
-     * @brief Get the expression for the overall feature (From root node down)
+     * @brief A human readable equation representing the feature
      */
     inline std::string expr() const {return expr(_params.data());}
 
     // DocString: exp_param_node_get_latex_expr
     /**
-     * @brief Get the expression for the overall feature (From root node down)
+     * @brief Get the valid LaTeX expression that represents the feature
      */
     inline std::string get_latex_expr() const {return get_latex_expr(_params.data());}
 
diff --git a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/inv/inverse.hpp b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/inv/inverse.hpp
index 3bc75dd2122c0decd86ba876aeb424b43b76c772..d3445f6cf1aae9a479294944287cb3ac5b474ae5 100644
--- a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/inv/inverse.hpp
+++ b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/inv/inverse.hpp
@@ -38,8 +38,8 @@ public:
      * @brief Constructor
      * @details Constructs the Node from an array of features to operate on
      *
-     * @param feat shared_ptr of the feature to operate on (A)
-     * @param feat_ind Index of the new feature
+     * @param feat (Node) shared_ptr of the feature to operate on (A)
+     * @param feat_ind (int) Index of the new feature
      */
     InvNode(const node_ptr feat, const unsigned long int feat_ind);
 
@@ -48,10 +48,10 @@ public:
      * @brief Constructor without checking feature values
      * @details Constructs the Node from node pointer of the feature to operate on
      *
-     * @param feat shared_ptr of the feature to operate on (A)
-     * @param feat_ind Index of the new feature
-     * @param l_bound Minimum absolute value allowed for the feature.
-     * @param u_bound Maximum absolute value allowed for the feature.
+     * @param feat (Node) shared_ptr of the feature to operate on (A)
+     * @param feat_ind (int) Index of the new feature
+     * @param l_bound (double) Minimum absolute value allowed for the feature.
+     * @param u_bound (double) Maximum absolute value allowed for the feature.
      */
     InvNode(const node_ptr feat, const unsigned long int feat_ind, const double l_bound, const double u_bound);
 
@@ -69,7 +69,7 @@ public:
 
     // DocString: inv_node_expr
     /**
-     * @brief Get the expression for the overall feature (From root node down)
+     * @brief A human readable equation representing the feature
      */
     inline std::string expr() const
     {
@@ -81,7 +81,7 @@ public:
 
     // DocString: inv_node_latex_expr
     /**
-     * @brief Get the expression for the overall feature (From root node down)
+     * @brief Get the valid LaTeX expression that represents the feature
      */
     inline std::string get_latex_expr() const
     {
@@ -93,27 +93,26 @@ public:
 
     // DocString: inv_node_set_value
     /**
-     * @brief Set the values of the training data for the feature inside of the value storage arrays
+     * @brief Set the value of all training samples for the feature inside the central data storage array
      *
-     * @param offset(int) Key to determine which part of the temporary storage array to look into
-     * @param for_comp(bool) If true then the evaluation is for comparing features
+     * @param offset (int) Where the current node is in the binary expression tree relative to other nodes at the same depth
+     * @param for_comp (bool) If true then the evaluation is used for comparing features
      */
     virtual void set_value(int offset=-1, const bool for_comp=false) const;
 
     // DocString: inv_node_set_test_value
     /**
-     * @brief Set the values of the test data for the feature inside of the value storage arrays
+     * @brief Set the value of all test samples for the feature inside the central data storage array
      *
-     * @param offset(int) Key to determine which part of the temporary storage array to look into
-     * @param for_comp(bool) If true then the evaluation is for comparing features
+     * @param offset (int) Where the current node is in the binary expression tree relative to other nodes at the same depth
+     * @param for_comp (bool) If true then the evaluation is used for comparing features
      */
     virtual void set_test_value(int offset=-1, const bool for_comp=false) const;
 
     // DocString: inv_node_rung
     /**
-     * @brief return the rung of the feature
+     * @brief return the rung of the feature (Height of the binary expression tree + 1)
      *
-     * @param cur_rung The rung current rung of the feature tree (used to recursively calculate rung)
      */
     inline int rung(const int cur_rung=0) const {return _feats[0]->rung(cur_rung + 1);}
 
@@ -189,27 +188,27 @@ public:
     virtual void set_parameters(const double* params){return;}
 
     /**
-     * @brief Set the values of the training data for the feature inside of the value storage arrays
+     * @brief Set the value of all training samples for the feature inside the central data storage array
      *
-     * @param params pointer to the parameter values
-     * @param offset(int) Key to determine which part of the temporary storage array to look into
-     * @param for_comp(bool) If true then the evaluation is for comparing features
+     * @param params pointer to the non-linear parameter values
+     * @param offset (int) Where the current node is in the binary expression tree relative to other nodes at the same depth
+     * @param for_comp (bool) If true then the evaluation is used for comparing features
      * @param depth the max depth of paramterization (in the binary expression tree)
      */
     void set_value(const double* params, int offset=-1, const bool for_comp=false, const int depth=1) const;
 
     /**
-     * @brief Set the values of the test data for the feature inside of the value storage arrays
+     * @brief Set the value of all test samples for the feature inside the central data storage array
      *
-     * @param params pointer to the parameter values
-     * @param offset(int) Key to determine which part of the temporary storage array to look into
-     * @param for_comp(bool) If true then the evaluation is for comparing features
+     * @param params pointer to the non-linear parameter values
+     * @param offset (int) Where the current node is in the binary expression tree relative to other nodes at the same depth
+     * @param for_comp (bool) If true then the evaluation is used for comparing features
      * @param depth the max depth of paramterization (in the binary expression tree)
      */
     void set_test_value(const double* params, int offset=-1, const bool for_comp=false, const int depth=1) const;
 
     /**
-     * @brief The expression of the feature
+     * @brief A human readable equation representing the feature
      *
      * @param params parameter values for non-linear operations
      * @return feature expression
@@ -224,10 +223,10 @@ public:
     }
 
     /**
-     * @brief Get the latexified expression for the overall feature (From root node down)
+     * @brief Get the valid LaTeX expression that represents the feature
      *
      * @param params parameter values for non-linear operations
-     * @param depth the current depth of the node on the Binary expression tree
+     * @param depth the current depth of the node on the binary expression tree
      * @return the latexified expression
      */
     inline std::string get_latex_expr(const double* params, const int depth=1) const
@@ -243,7 +242,7 @@ public:
      * @brief Get the string that corresponds to the code needed to evaluate the node in matlab
      *
      * @param params parameter values for non-linear operations
-     * @param depth the current depth of the node on the Binary expression tree
+     * @param depth the current depth of the node on the binary expression tree
      * @return The matlab code for the feature
      */
     inline std::string matlab_fxn_expr(const double* params, const int depth=1) const
@@ -261,7 +260,7 @@ public:
      * @param lb pointer to the lower bounds data
      * @param ub pointer to the upper bounds data
      * @param from_parent How many parameters are between the start of this node's parameters and its parent
-     * @param depth the current depth of the node on the Binary expression tree
+     * @param depth the current depth of the node on the binary expression tree
      */
     void set_bounds(double* lb, double* ub, const int depth=1) const;
 
@@ -269,7 +268,7 @@ public:
      * @brief Set the bounds for the nl parameterization
      *
      * @param params pointer to the parameters vector
-     * @param depth the current depth of the node on the Binary expression tree
+     * @param depth the current depth of the node on the binary expression tree
      */
     void initialize_params(double* params, const int depth = 1) const;
 
diff --git a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/inv/parameterized_inverse.hpp b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/inv/parameterized_inverse.hpp
index 5356e5c9cc8198e0afc4f4626b54122fdfc90c3a..f98d7fedbbabf48f35daab03919392a6ec27b2eb 100644
--- a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/inv/parameterized_inverse.hpp
+++ b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/inv/parameterized_inverse.hpp
@@ -55,10 +55,10 @@ public:
      * @brief Constructor
      * @details Constructs the Node from node pointer of the feature to operate on
      *
-     * @param feat shared_ptr of the feature to operate on (A)
-     * @param feat_ind Index of the new feature
-     * @param l_bound Minimum absolute value allowed for the feature.
-     * @param u_bound Maximum absolute value allowed for the feature.
+     * @param feat (Node) shared_ptr of the feature to operate on (A)
+     * @param feat_ind (int) Index of the new feature
+     * @param l_bound (double) Minimum absolute value allowed for the feature.
+     * @param u_bound (double) Maximum absolute value allowed for the feature.
      * @param optimizer The optimizer to find the optimal parameters for the features
      */
     InvParamNode(
@@ -73,8 +73,8 @@ public:
      * @brief Constructor
      * @details Constructs the Node from node pointer of the feature to operate on
      *
-     * @param feat shared_ptr of the feature to operate on (A)
-     * @param feat_ind Index of the new feature
+     * @param feat (Node) shared_ptr of the feature to operate on (A)
+     * @param feat_ind (int) Index of the new feature
      * @param optimizer The optimizer to find the optimal parameters for the features
      */
     InvParamNode(const node_ptr feat, const unsigned long int feat_ind, std::shared_ptr<NLOptimizer> optimizer);
@@ -84,10 +84,10 @@ public:
      * @brief Constructor
      * @details Constructs the Node from node pointer of the feature to operate on
      *
-     * @param feat shared_ptr of the feature to operate on (A)
-     * @param feat_ind Index of the new feature
-     * @param l_bound Minimum absolute value allowed for the feature.
-     * @param u_bound Maximum absolute value allowed for the feature.
+     * @param feat (Node) shared_ptr of the feature to operate on (A)
+     * @param feat_ind (int) Index of the new feature
+     * @param l_bound (double) Minimum absolute value allowed for the feature.
+     * @param u_bound (double) Maximum absolute value allowed for the feature.
      */
     InvParamNode(const node_ptr feat, const unsigned long int feat_ind, double const l_bound=1e-50, const double u_bound=1e50);
 
@@ -99,19 +99,19 @@ public:
 
     // DocString: inv_param_node_set_value
     /**
-     * @brief Set the values of the training data for the feature inside of the value storage arrays
+     * @brief Set the value of all training samples for the feature inside the central data storage array
      *
-     * @param offset(int) Key to determine which part of the temporary storage array to look into
-     * @param for_comp(bool) If true then the evaluation is for comparing features
+     * @param offset (int) Where the current node is in the binary expression tree relative to other nodes at the same depth
+     * @param for_comp (bool) If true then the evaluation is used for comparing features
      */
     inline void set_value(int offset=-1, const bool for_comp=false) const {set_value(_params.data(), offset, for_comp);}
 
     // DocString: inv_param_node_set_test_value
     /**
-     * @brief Set the values of the test data for the feature inside of the value storage arrays
+     * @brief Set the value of all test samples for the feature inside the central data storage array
      *
-     * @param offset(int) Key to determine which part of the temporary storage array to look into
-     * @param for_comp(bool) If true then the evaluation is for comparing features
+     * @param offset (int) Where the current node is in the binary expression tree relative to other nodes at the same depth
+     * @param for_comp (bool) If true then the evaluation is used for comparing features
      */
     inline void set_test_value(int offset=-1, const bool for_comp=false) const {set_test_value(_params.data(), offset, for_comp);}
 
@@ -120,7 +120,7 @@ public:
      * @details If the feature is not already stored in memory, then calculate the feature and return the pointer to the data
      *
      * @param offset(int) the integer value to offset the location in the temporary storage array
-     * @param for_comp(bool) If true then the evaluation is for comparing features
+     * @param for_comp (bool) If true then the evaluation is used for comparing features
      *
      * @return pointer to the feature's test values
      */
@@ -131,7 +131,7 @@ public:
      * @details If the feature is not already stored in memory, then calculate the feature and return the pointer to the data
      *
      * @param offset(int) the integer value to offset the location in the temporary storage array
-     * @param for_comp(bool) If true then the evaluation is for comparing features
+     * @param for_comp (bool) If true then the evaluation is used for comparing features
      *
      * @return pointer to the feature's test values
      */
@@ -144,13 +144,13 @@ public:
 
     // DocString: inv_param_node_expr
     /**
-     * @brief Get the expression for the overall feature (From root node down)
+     * @brief A human readable equation representing the feature
      */
     inline std::string expr() const {return expr(_params.data());}
 
     // DocString: inv_param_node_get_latex_expr
     /**
-     * @brief Get the expression for the overall feature (From root node down)
+     * @brief Get the valid LaTeX expression that represents the feature
      */
     inline std::string get_latex_expr() const {return get_latex_expr(_params.data());}
 
diff --git a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/log/log.hpp b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/log/log.hpp
index 6b7c1f9eff18ea3acf4d885678a327864d7b8b14..6456f4f95676a291e7d64ad246ac8135535baed4 100644
--- a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/log/log.hpp
+++ b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/log/log.hpp
@@ -42,8 +42,8 @@ public:
      * @brief Constructor
      * @details Constructs the Node from an array of features to operate on
      *
-     * @param feat shared_ptr of the feature to operate on (A)
-     * @param feat_ind Index of the new feature
+     * @param feat (Node) shared_ptr of the feature to operate on (A)
+     * @param feat_ind (int) Index of the new feature
      */
     LogNode(const node_ptr feat, const unsigned long int feat_ind);
 
@@ -52,10 +52,10 @@ public:
      * @brief Constructor
      * @details Constructs the Node from node pointer of the feature to operate on
      *
-     * @param feat shared_ptr of the feature to operate on (A)
-     * @param feat_ind Index of the new feature
-     * @param l_bound Minimum absolute value allowed for the feature.
-     * @param u_bound Maximum absolute value allowed for the feature.
+     * @param feat (Node) shared_ptr of the feature to operate on (A)
+     * @param feat_ind (int) Index of the new feature
+     * @param l_bound (double) Minimum absolute value allowed for the feature.
+     * @param u_bound (double) Maximum absolute value allowed for the feature.
      */
     LogNode(const node_ptr feat, const unsigned long int feat_ind, const double l_bound, const double u_bound);
 
@@ -73,7 +73,7 @@ public:
 
     // DocString: log_node_expr
     /**
-     * @brief Get the expression for the overall feature (From root node down)
+     * @brief A human readable equation representing the feature
      */
     inline std::string expr() const
     {
@@ -85,7 +85,7 @@ public:
 
     // DocString: log_node_latex_expr
     /**
-     * @brief Get the expression for the overall feature (From root node down)
+     * @brief Get the valid LaTeX expression that represents the feature
      */
     inline std::string get_latex_expr() const
     {
@@ -97,10 +97,10 @@ public:
 
     // DocString: log_node_set_value
     /**
-     * @brief Set the values of the training data for the feature inside of the value storage arrays
+     * @brief Set the value of all training samples for the feature inside the central data storage array
      *
-     * @param offset(int) Key to determine which part of the temporary storage array to look into
-     * @param for_comp(bool) If true then the evaluation is for comparing features
+     * @param offset (int) Where the current node is in the binary expression tree relative to other nodes at the same depth
+     * @param for_comp (bool) If true then the evaluation is used for comparing features
      */
     virtual void set_value(int offset=-1, const bool for_comp=false) const;
 
@@ -112,18 +112,17 @@ public:
 
     // DocString: log_node_set_test_value
     /**
-     * @brief Set the values of the test data for the feature inside of the value storage arrays
+     * @brief Set the value of all test samples for the feature inside the central data storage array
      *
-     * @param offset(int) Key to determine which part of the temporary storage array to look into
-     * @param for_comp(bool) If true then the evaluation is for comparing features
+     * @param offset (int) Where the current node is in the binary expression tree relative to other nodes at the same depth
+     * @param for_comp (bool) If true then the evaluation is used for comparing features
      */
     virtual void set_test_value(int offset=-1, const bool for_comp=false) const;
 
     // DocString: log_node_rung
     /**
-     * @brief return the rung of the feature
+     * @brief return the rung of the feature (Height of the binary expression tree + 1)
      *
-     * @param cur_rung The rung current rung of the feature tree (used to recursively calculate rung)
      */
     inline int rung(const int cur_rung=0) const {return _feats[0]->rung(cur_rung + 1);}
 
@@ -193,27 +192,27 @@ public:
     virtual void set_parameters(const std::vector<double> params, const bool check_sz=true){return;}
 
     /**
-     * @brief Set the values of the training data for the feature inside of the value storage arrays
+     * @brief Set the value of all training samples for the feature inside the central data storage array
      *
-     * @param params pointer to the parameter values
-     * @param offset(int) Key to determine which part of the temporary storage array to look into
-     * @param for_comp(bool) If true then the evaluation is for comparing features
+     * @param params pointer to the non-linear parameter values
+     * @param offset (int) Where the current node is in the binary expression tree relative to other nodes at the same depth
+     * @param for_comp (bool) If true then the evaluation is used for comparing features
      * @param depth the max depth of paramterization (in the binary expression tree)
      */
     void set_value(const double* params, int offset=-1, const bool for_comp=false, const int depth=1) const;
 
     /**
-     * @brief Set the values of the test data for the feature inside of the value storage arrays
+     * @brief Set the value of all test samples for the feature inside the central data storage array
      *
-     * @param params pointer to the parameter values
-     * @param offset(int) Key to determine which part of the temporary storage array to look into
-     * @param for_comp(bool) If true then the evaluation is for comparing features
+     * @param params pointer to the non-linear parameter values
+     * @param offset (int) Where the current node is in the binary expression tree relative to other nodes at the same depth
+     * @param for_comp (bool) If true then the evaluation is used for comparing features
      * @param depth the max depth of paramterization (in the binary expression tree)
      */
     void set_test_value(const double* params, int offset=-1, const bool for_comp=false, const int depth=1) const;
 
     /**
-     * @brief The expression of the feature
+     * @brief A human readable equation representing the feature
      *
      * @param params parameter values for non-linear operations
      * @return feature expression
@@ -229,10 +228,10 @@ public:
     }
 
     /**
-     * @brief Get the latexified expression for the overall feature (From root node down)
+     * @brief Get the valid LaTeX expression that represents the feature
      *
      * @param params parameter values for non-linear operations
-     * @param depth the current depth of the node on the Binary expression tree
+     * @param depth the current depth of the node on the binary expression tree
      * @return the latexified expression
      */
     inline std::string get_latex_expr(const double* params, const int depth=1) const
@@ -249,7 +248,7 @@ public:
      * @brief Get the string that corresponds to the code needed to evaluate the node in matlab
      *
      * @param params parameter values for non-linear operations
-     * @param depth the current depth of the node on the Binary expression tree
+     * @param depth the current depth of the node on the binary expression tree
      * @return The matlab code for the feature
      */
     inline std::string matlab_fxn_expr(const double* params, const int depth=1) const
@@ -268,7 +267,7 @@ public:
      * @param lb pointer to the lower bounds data
      * @param ub pointer to the upper bounds data
      * @param from_parent How many parameters are between the start of this node's parameters and its parent
-     * @param depth the current depth of the node on the Binary expression tree
+     * @param depth the current depth of the node on the binary expression tree
      */
     virtual void set_bounds(double* lb, double* ub, const int depth=1) const;
 
@@ -276,7 +275,7 @@ public:
      * @brief Set the bounds for the nl parameterization
      *
      * @param params pointer to the parameters vector
-     * @param depth the current depth of the node on the Binary expression tree
+     * @param depth the current depth of the node on the binary expression tree
      */
     virtual void initialize_params(double* params, const int depth = 1) const;
 
diff --git a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/log/parameterized_log.hpp b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/log/parameterized_log.hpp
index 9cfc6dc792cde71d9578cdb298d5cd2cca480033..cc3b1bad4cf3b7e7c80f8caec7838454ba0c1530 100644
--- a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/log/parameterized_log.hpp
+++ b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/log/parameterized_log.hpp
@@ -57,10 +57,10 @@ public:
      * @brief Constructor
      * @details Constructs the Node from node pointer of the feature to operate on
      *
-     * @param feat shared_ptr of the feature to operate on (A)
-     * @param feat_ind Index of the new feature
-     * @param l_bound Minimum absolute value allowed for the feature.
-     * @param u_bound Maximum absolute value allowed for the feature.
+     * @param feat (Node) shared_ptr of the feature to operate on (A)
+     * @param feat_ind (int) Index of the new feature
+     * @param l_bound (double) Minimum absolute value allowed for the feature.
+     * @param u_bound (double) Maximum absolute value allowed for the feature.
      * @param optimizer The optimizer to find the optimal parameters for the features
      */
     LogParamNode(
@@ -75,8 +75,8 @@ public:
      * @brief Constructor
      * @details Constructs the Node from node pointer of the feature to operate on
      *
-     * @param feat shared_ptr of the feature to operate on (A)
-     * @param feat_ind Index of the new feature
+     * @param feat (Node) shared_ptr of the feature to operate on (A)
+     * @param feat_ind (int) Index of the new feature
      * @param optimizer The optimizer to find the optimal parameters for the features
      */
     LogParamNode(const node_ptr feat, const unsigned long int feat_ind, std::shared_ptr<NLOptimizer> optimizer);
@@ -86,10 +86,10 @@ public:
      * @brief Constructor
      * @details Constructs the Node from node pointer of the feature to operate on
      *
-     * @param feat shared_ptr of the feature to operate on (A)
-     * @param feat_ind Index of the new feature
-     * @param l_bound Minimum absolute value allowed for the feature.
-     * @param u_bound Maximum absolute value allowed for the feature.
+     * @param feat (Node) shared_ptr of the feature to operate on (A)
+     * @param feat_ind (int) Index of the new feature
+     * @param l_bound (double) Minimum absolute value allowed for the feature.
+     * @param u_bound (double) Maximum absolute value allowed for the feature.
      */
     LogParamNode(const node_ptr feat, const unsigned long int feat_ind, double const l_bound=1e-50, const double u_bound=1e50);
 
@@ -101,19 +101,19 @@ public:
 
     // DocString: log_param_node_set_value
     /**
-     * @brief Set the values of the training data for the feature inside of the value storage arrays
+     * @brief Set the value of all training samples for the feature inside the central data storage array
      *
-     * @param offset(int) Key to determine which part of the temporary storage array to look into
-     * @param for_comp(bool) If true then the evaluation is for comparing features
+     * @param offset (int) Where the current node is in the binary expression tree relative to other nodes at the same depth
+     * @param for_comp (bool) If true then the evaluation is used for comparing features
      */
     inline void set_value(int offset=-1, const bool for_comp=false) const {set_value(_params.data(), offset, for_comp);}
 
     // DocString: log_param_node_set_test_value
     /**
-     * @brief Set the values of the test data for the feature inside of the value storage arrays
+     * @brief Set the value of all test samples for the feature inside the central data storage array
      *
-     * @param offset(int) Key to determine which part of the temporary storage array to look into
-     * @param for_comp(bool) If true then the evaluation is for comparing features
+     * @param offset (int) Where the current node is in the binary expression tree relative to other nodes at the same depth
+     * @param for_comp (bool) If true then the evaluation is used for comparing features
      */
     inline void set_test_value(int offset=-1, const bool for_comp=false) const {set_test_value(_params.data(), offset, for_comp);}
 
@@ -122,7 +122,7 @@ public:
      * @details If the feature is not already stored in memory, then calculate the feature and return the pointer to the data
      *
      * @param offset(int) the integer value to offset the location in the temporary storage array
-     * @param for_comp(bool) If true then the evaluation is for comparing features
+     * @param for_comp (bool) If true then the evaluation is used for comparing features
      *
      * @return pointer to the feature's test values
      */
@@ -133,7 +133,7 @@ public:
      * @details If the feature is not already stored in memory, then calculate the feature and return the pointer to the data
      *
      * @param offset(int) the integer value to offset the location in the temporary storage array
-     * @param for_comp(bool) If true then the evaluation is for comparing features
+     * @param for_comp (bool) If true then the evaluation is used for comparing features
      *
      * @return pointer to the feature's test values
      */
@@ -146,13 +146,13 @@ public:
 
     // DocString: log_param_node_expr
     /**
-     * @brief Get the expression for the overall feature (From root node down)
+     * @brief A human readable equation representing the feature
      */
     inline std::string expr() const {return expr(_params.data());}
 
     // DocString: log_param_node_get_latex_expr
     /**
-     * @brief Get the expression for the overall feature (From root node down)
+     * @brief Get the valid LaTeX expression that represents the feature
      */
     inline std::string get_latex_expr() const {return get_latex_expr(_params.data());}
 
@@ -170,7 +170,7 @@ public:
      * @param lb pointer to the lower bounds data
      * @param ub pointer to the upper bounds data
      * @param from_parent How many parameters are between the start of this node's parameters and its parent
-     * @param depth the current depth of the node on the Binary expression tree
+     * @param depth the current depth of the node on the binary expression tree
      */
     void set_bounds(double* lb, double* ub, const int depth=1) const;
 
@@ -178,7 +178,7 @@ public:
      * @brief Set the bounds for the nl parameterization
      *
      * @param params pointer to the parameters vector
-     * @param depth the current depth of the node on the Binary expression tree
+     * @param depth the current depth of the node on the binary expression tree
      */
     void initialize_params(double* params, const int depth = 1) const;
 
diff --git a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/mult/multiply.hpp b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/mult/multiply.hpp
index 18d162df841d8ddef941b30069d19479e5e055fb..e799f8ebab79eb87f43c8f60d9232de20f5a7101 100644
--- a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/mult/multiply.hpp
+++ b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/mult/multiply.hpp
@@ -43,9 +43,9 @@ public:
      * @brief Constructor
      * @details Constructs the Node from an array of features to operate on
      *
-     * @param feat_1 shared_ptr of the first feature to operate on (A)
-     * @param feat_2 shared_ptr of the second feature to operate on (B)
-     * @param feat_ind Index of the new feature
+     * @param feat_1 (Node) shared_ptr of the first feature to operate on (A)
+     * @param feat_2 (Node) shared_ptr of the second feature to operate on (B)
+     * @param feat_ind (int) Index of the new feature
      */
     MultNode(const node_ptr feat_1, const node_ptr feat_2, const unsigned long int feat_ind);
 
@@ -54,11 +54,11 @@ public:
      * @brief Constructor
      * @details Constructs the Node from node pointer of the features to operate on
      *
-     * @param feat_1 shared_ptr of the first feature to operate on (A)
-     * @param feat_2 shared_ptr of the second feature to operate on (B)
-     * @param feat_ind Index of the new feature
-     * @param l_bound Minimum absolute value allowed for the feature.
-     * @param u_bound Maximum absolute value allowed for the feature.
+     * @param feat_1 (Node) shared_ptr of the first feature to operate on (A)
+     * @param feat_2 (Node) shared_ptr of the second feature to operate on (B)
+     * @param feat_ind (int) Index of the new feature
+     * @param l_bound (double) Minimum absolute value allowed for the feature.
+     * @param u_bound (double) Maximum absolute value allowed for the feature.
      */
     MultNode(const node_ptr feat_1, const node_ptr feat_2, const unsigned long int feat_ind, const double l_bound, const double u_bound);
 
@@ -76,7 +76,7 @@ public:
 
     // DocString: mult_node_expr
     /**
-     * @brief Get the expression for the overall feature (From root node down)
+     * @brief A human readable equation representing the feature
      */
     inline std::string expr() const
     {
@@ -89,7 +89,7 @@ public:
 
     // DocString: mult_node_latex_expr
     /**
-     * @brief Get the expression for the overall feature (From root node down)
+     * @brief Get the valid LaTeX expression that represents the feature
      */
     inline std::string get_latex_expr() const
     {
@@ -102,27 +102,26 @@ public:
 
     // DocString: mult_node_set_value
     /**
-     * @brief Set the values of the training data for the feature inside of the value storage arrays
+     * @brief Set the value of all training samples for the feature inside the central data storage array
      *
-     * @param offset(int) Key to determine which part of the temporary storage array to look into
-     * @param for_comp(bool) If true then the evaluation is for comparing features
+     * @param offset (int) Where the current node is in the binary expression tree relative to other nodes at the same depth
+     * @param for_comp (bool) If true then the evaluation is used for comparing features
      */
     virtual void set_value(int offset=-1, const bool for_comp=false) const;
 
     // DocString: mult_node_set_test_value
     /**
-     * @brief Set the values of the test data for the feature inside of the value storage arrays
+     * @brief Set the value of all test samples for the feature inside the central data storage array
      *
-     * @param offset(int) Key to determine which part of the temporary storage array to look into
-     * @param for_comp(bool) If true then the evaluation is for comparing features
+     * @param offset (int) Where the current node is in the binary expression tree relative to other nodes at the same depth
+     * @param for_comp (bool) If true then the evaluation is used for comparing features
      */
     virtual void set_test_value(int offset=-1, const bool for_comp=false) const;
 
     // DocString: mult_node_rung
     /**
-     * @brief return the rung of the feature
+     * @brief return the rung of the feature (Height of the binary expression tree + 1)
      *
-     * @param cur_rung The rung current rung of the feature tree (used to recursively calculate rung)
      */
     inline int rung(const int cur_rung=0) const {return std::max(_feats[0]->rung(cur_rung + 1), _feats[1]->rung(cur_rung + 1));}
 
@@ -205,30 +204,30 @@ public:
     virtual void set_parameters(const double* params){return;}
 
     /**
-     * @brief Set the values of the training data for the feature inside of the value storage arrays
+     * @brief Set the value of all training samples for the feature inside the central data storage array
      *
-     * @param params pointer to the parameter values
-     * @param offset(int) Key to determine which part of the temporary storage array to look into
-     * @param for_comp(bool) If true then the evaluation is for comparing features
+     * @param params pointer to the non-linear parameter values
+     * @param offset (int) Where the current node is in the binary expression tree relative to other nodes at the same depth
+     * @param for_comp (bool) If true then the evaluation is used for comparing features
      * @param depth the max depth of paramterization (in the binary expression tree)
      */
     void set_value(const double* params, int offset=-1, const bool for_comp=false, const int depth=1) const;
 
     /**
-     * @brief Set the values of the test data for the feature inside of the value storage arrays
+     * @brief Set the value of all test samples for the feature inside the central data storage array
      *
-     * @param params pointer to the parameter values
-     * @param offset(int) Key to determine which part of the temporary storage array to look into
-     * @param for_comp(bool) If true then the evaluation is for comparing features
+     * @param params pointer to the non-linear parameter values
+     * @param offset (int) Where the current node is in the binary expression tree relative to other nodes at the same depth
+     * @param for_comp (bool) If true then the evaluation is used for comparing features
      * @param depth the max depth of paramterization (in the binary expression tree)
      */
     void set_test_value(const double* params, int offset=-1, const bool for_comp=false, const int depth=1) const;
 
     /**
-     * @brief The expression of the feature
+     * @brief A human readable equation representing the feature
      *
      * @param params parameter values for non-linear operations
-     * @param depth the current depth of the node on the Binary expression tree
+     * @param depth the current depth of the node on the binary expression tree
      * @return feature expression
      */
     inline std::string expr(const double* params, const int depth=1) const
@@ -242,10 +241,10 @@ public:
     }
 
     /**
-     * @brief Get the latexified expression for the overall feature (From root node down)
+     * @brief Get the valid LaTeX expression that represents the feature
      *
      * @param params parameter values for non-linear operations
-     * @param depth the current depth of the node on the Binary expression tree
+     * @param depth the current depth of the node on the binary expression tree
      * @return the latexified expression
      */
     inline std::string get_latex_expr(const double* params, const int depth=1) const
@@ -262,7 +261,7 @@ public:
      * @brief Get the string that corresponds to the code needed to evaluate the node in matlab
      *
      * @param params parameter values for non-linear operations
-     * @param depth the current depth of the node on the Binary expression tree
+     * @param depth the current depth of the node on the binary expression tree
      * @return The matlab code for the feature
      */
     inline std::string matlab_fxn_expr(const double* params, const int depth=1) const
@@ -281,7 +280,7 @@ public:
      * @param lb pointer to the lower bounds data
      * @param ub pointer to the upper bounds data
      * @param from_parent How many parameters are between the start of this node's parameters and its parent
-     * @param depth the current depth of the node on the Binary expression tree
+     * @param depth the current depth of the node on the binary expression tree
      */
     void set_bounds(double* lb, double* ub, const int depth=1) const;
 
@@ -289,7 +288,7 @@ public:
      * @brief Set the bounds for the nl parameterization
      *
      * @param params pointer to the parameters vector
-     * @param depth the current depth of the node on the Binary expression tree
+     * @param depth the current depth of the node on the binary expression tree
      */
     void initialize_params(double* params, const int depth = 1) const;
 
diff --git a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/mult/parameterized_multiply.hpp b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/mult/parameterized_multiply.hpp
index 10ab81fd693b4955ed8f978a9458c8caa4581276..14254b6865aabc41c221f56be1a0af5a3a4d138e 100644
--- a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/mult/parameterized_multiply.hpp
+++ b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/mult/parameterized_multiply.hpp
@@ -55,10 +55,10 @@ public:
      * @brief Constructor
      * @details Constructs the Node from node pointer of the feature to operate on
      *
-     * @param feat shared_ptr of the feature to operate on (A)
-     * @param feat_ind Index of the new feature
-     * @param l_bound Minimum absolute value allowed for the feature.
-     * @param u_bound Maximum absolute value allowed for the feature.
+     * @param feat (Node) shared_ptr of the feature to operate on (A)
+     * @param feat_ind (int) Index of the new feature
+     * @param l_bound (double) Minimum absolute value allowed for the feature.
+     * @param u_bound (double) Maximum absolute value allowed for the feature.
      * @param optimizer The optimizer to find the optimal parameters for the features
      */
     MultParamNode(
@@ -74,8 +74,8 @@ public:
      * @brief Constructor
      * @details Constructs the Node from node pointer of the feature to operate on
      *
-     * @param feat shared_ptr of the feature to operate on (A)
-     * @param feat_ind Index of the new feature
+     * @param feat (Node) shared_ptr of the feature to operate on (A)
+     * @param feat_ind (int) Index of the new feature
      * @param optimizer The optimizer to find the optimal parameters for the features
      */
     MultParamNode(const node_ptr feat_1, const node_ptr feat_2, const unsigned long int feat_ind, std::shared_ptr<NLOptimizer> optimizer);
@@ -85,10 +85,10 @@ public:
      * @brief Constructor
      * @details Constructs the Node from node pointer of the feature to operate on
      *
-     * @param feat shared_ptr of the feature to operate on (A)
-     * @param feat_ind Index of the new feature
-     * @param l_bound Minimum absolute value allowed for the feature.
-     * @param u_bound Maximum absolute value allowed for the feature.
+     * @param feat (Node) shared_ptr of the feature to operate on (A)
+     * @param feat_ind (int) Index of the new feature
+     * @param l_bound (double) Minimum absolute value allowed for the feature.
+     * @param u_bound (double) Maximum absolute value allowed for the feature.
      */
     MultParamNode(const node_ptr feat_1, const node_ptr feat_2, const unsigned long int feat_ind, double const l_bound=1e-50, const double u_bound=1e50);
 
@@ -100,19 +100,19 @@ public:
 
     // DocString: mult_param_node_set_value
     /**
-     * @brief Set the values of the training data for the feature inside of the value storage arrays
+     * @brief Set the value of all training samples for the feature inside the central data storage array
      *
-     * @param offset(int) Key to determine which part of the temporary storage array to look into
-     * @param for_comp(bool) If true then the evaluation is for comparing features
+     * @param offset (int) Where the current node is in the binary expression tree relative to other nodes at the same depth
+     * @param for_comp (bool) If true then the evaluation is used for comparing features
      */
     inline void set_value(int offset=-1, const bool for_comp=false) const {set_value(_params.data(), offset, for_comp);}
 
     // DocString: mult_param_node_set_test_value
     /**
-     * @brief Set the values of the test data for the feature inside of the value storage arrays
+     * @brief Set the value of all test samples for the feature inside the central data storage array
      *
-     * @param offset(int) Key to determine which part of the temporary storage array to look into
-     * @param for_comp(bool) If true then the evaluation is for comparing features
+     * @param offset (int) Where the current node is in the binary expression tree relative to other nodes at the same depth
+     * @param for_comp (bool) If true then the evaluation is used for comparing features
      */
     inline void set_test_value(int offset=-1, const bool for_comp=false) const {set_test_value(_params.data(), offset, for_comp);}
 
@@ -121,7 +121,7 @@ public:
      * @details If the feature is not already stored in memory, then calculate the feature and return the pointer to the data
      *
      * @param offset(int) the integer value to offset the location in the temporary storage array
-     * @param for_comp(bool) If true then the evaluation is for comparing features
+     * @param for_comp (bool) If true then the evaluation is used for comparing features
      *
      * @return pointer to the feature's test values
      */
@@ -132,7 +132,7 @@ public:
      * @details If the feature is not already stored in memory, then calculate the feature and return the pointer to the data
      *
      * @param offset(int) the integer value to offset the location in the temporary storage array
-     * @param for_comp(bool) If true then the evaluation is for comparing features
+     * @param for_comp (bool) If true then the evaluation is used for comparing features
      *
      * @return pointer to the feature's test values
      */
@@ -145,13 +145,13 @@ public:
 
     // DocString: mult_param_node_expr
     /**
-     * @brief Get the expression for the overall feature (From root node down)
+     * @brief A human readable equation representing the feature
      */
     inline std::string expr() const {return expr(_params.data());}
 
     // DocString: mult_param_node_get_latex_expr
     /**
-     * @brief Get the expression for the overall feature (From root node down)
+     * @brief Get the valid LaTeX expression that represents the feature
      */
     inline std::string get_latex_expr() const {return get_latex_expr(_params.data());}
 
diff --git a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/neg_exp/negative_exponential.hpp b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/neg_exp/negative_exponential.hpp
index e947a042512fc9851691dc19b413d79847717412..a959996d72368c88979664e7ca368c2b6a8275cf 100644
--- a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/neg_exp/negative_exponential.hpp
+++ b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/neg_exp/negative_exponential.hpp
@@ -43,8 +43,8 @@ public:
      * @brief Constructor
      * @details Constructs the Node from an array of features to operate on
      *
-     * @param feat shared_ptr of the feature to operate on (A)
-     * @param feat_ind Index of the new feature
+     * @param feat (Node) shared_ptr of the feature to operate on (A)
+     * @param feat_ind (int) Index of the new feature
      */
     NegExpNode(const node_ptr feat, const unsigned long int feat_ind);
 
@@ -53,10 +53,10 @@ public:
      * @brief Constructor
      * @details Constructs the Node from node pointer of the feature to operate on
      *
-     * @param feat shared_ptr of the feature to operate on (A)
-     * @param feat_ind Index of the new feature
-     * @param l_bound Minimum absolute value allowed for the feature.
-     * @param u_bound Maximum absolute value allowed for the feature.
+     * @param feat (Node) shared_ptr of the feature to operate on (A)
+     * @param feat_ind (int) Index of the new feature
+     * @param l_bound (double) Minimum absolute value allowed for the feature.
+     * @param u_bound (double) Maximum absolute value allowed for the feature.
      */
     NegExpNode(const node_ptr feat, const unsigned long int feat_ind, const double l_bound, const double u_bound);
 
@@ -74,7 +74,7 @@ public:
 
     // DocString: neg_exp_node_expr
     /**
-     * @brief Get the expression for the overall feature (From root node down)
+     * @brief A human readable equation representing the feature
      */
     inline std::string expr() const
     {
@@ -86,7 +86,7 @@ public:
 
     // DocString: neg_exp_node_latex_expr
     /**
-     * @brief Get the expression for the overall feature (From root node down)
+     * @brief Get the valid LaTeX expression that represents the feature
      */
     inline std::string get_latex_expr() const
     {
@@ -98,27 +98,26 @@ public:
 
     // DocString: neg_exp_node_set_value
     /**
-     * @brief Set the values of the training data for the feature inside of the value storage arrays
+     * @brief Set the value of all training samples for the feature inside the central data storage array
      *
-     * @param offset(int) Key to determine which part of the temporary storage array to look into
-     * @param for_comp(bool) If true then the evaluation is for comparing features
+     * @param offset (int) Where the current node is in the binary expression tree relative to other nodes at the same depth
+     * @param for_comp (bool) If true then the evaluation is used for comparing features
      */
     virtual void set_value(int offset=-1, const bool for_comp=false) const;
 
     // DocString: neg_exp_node_set_test_value
     /**
-     * @brief Set the values of the test data for the feature inside of the value storage arrays
+     * @brief Set the value of all test samples for the feature inside the central data storage array
      *
-     * @param offset(int) Key to determine which part of the temporary storage array to look into
-     * @param for_comp(bool) If true then the evaluation is for comparing features
+     * @param offset (int) Where the current node is in the binary expression tree relative to other nodes at the same depth
+     * @param for_comp (bool) If true then the evaluation is used for comparing features
      */
     virtual void set_test_value(int offset=-1, const bool for_comp=false) const;
 
     // DocString: neg_exp_node_rung
     /**
-     * @brief return the rung of the feature
+     * @brief return the rung of the feature (Height of the binary expression tree + 1)
      *
-     * @param cur_rung The rung current rung of the feature tree (used to recursively calculate rung)
      */
     inline int rung(const int cur_rung=0) const {return _feats[0]->rung(cur_rung + 1);}
 
@@ -194,27 +193,27 @@ public:
     virtual void set_parameters(const double* params){return;}
 
     /**
-     * @brief Set the values of the training data for the feature inside of the value storage arrays
+     * @brief Set the value of all training samples for the feature inside the central data storage array
      *
-     * @param params pointer to the parameter values
-     * @param offset(int) Key to determine which part of the temporary storage array to look into
-     * @param for_comp(bool) If true then the evaluation is for comparing features
+     * @param params pointer to the non-linear parameter values
+     * @param offset (int) Where the current node is in the binary expression tree relative to other nodes at the same depth
+     * @param for_comp (bool) If true then the evaluation is used for comparing features
      * @param depth the max depth of paramterization (in the binary expression tree)
      */
     void set_value(const double* params, int offset=-1, const bool for_comp=false, const int depth=1) const;
 
     /**
-     * @brief Set the values of the test data for the feature inside of the value storage arrays
+     * @brief Set the value of all test samples for the feature inside the central data storage array
      *
-     * @param params pointer to the parameter values
-     * @param offset(int) Key to determine which part of the temporary storage array to look into
-     * @param for_comp(bool) If true then the evaluation is for comparing features
+     * @param params pointer to the non-linear parameter values
+     * @param offset (int) Where the current node is in the binary expression tree relative to other nodes at the same depth
+     * @param for_comp (bool) If true then the evaluation is used for comparing features
      * @param depth the max depth of paramterization (in the binary expression tree)
      */
     void set_test_value(const double* params, int offset=-1, const bool for_comp=false, const int depth=1) const;
 
     /**
-     * @brief The expression of the feature
+     * @brief A human readable equation representing the feature
      *
      * @param params parameter values for non-linear operations
      * @return feature expression
@@ -229,10 +228,10 @@ public:
     }
 
     /**
-     * @brief Get the latexified expression for the overall feature (From root node down)
+     * @brief Get the valid LaTeX expression that represents the feature
      *
      * @param params parameter values for non-linear operations
-     * @param depth the current depth of the node on the Binary expression tree
+     * @param depth the current depth of the node on the binary expression tree
      * @return the latexified expression
      */
     inline std::string get_latex_expr(const double* params, const int depth=1) const
@@ -248,7 +247,7 @@ public:
      * @brief Get the string that corresponds to the code needed to evaluate the node in matlab
      *
      * @param params parameter values for non-linear operations
-     * @param depth the current depth of the node on the Binary expression tree
+     * @param depth the current depth of the node on the binary expression tree
      * @return The matlab code for the feature
      */
     inline std::string matlab_fxn_expr(const double* params, const int depth=1) const
@@ -266,7 +265,7 @@ public:
      * @param lb pointer to the lower bounds data
      * @param ub pointer to the upper bounds data
      * @param from_parent How many parameters are between the start of this node's parameters and its parent
-     * @param depth the current depth of the node on the Binary expression tree
+     * @param depth the current depth of the node on the binary expression tree
      */
     void set_bounds(double* lb, double* ub, const int depth=1) const;
 
@@ -274,7 +273,7 @@ public:
      * @brief Set the bounds for the nl parameterization
      *
      * @param params pointer to the parameters vector
-     * @param depth the current depth of the node on the Binary expression tree
+     * @param depth the current depth of the node on the binary expression tree
      */
     void initialize_params(double* params, const int depth = 1) const;
 
diff --git a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/neg_exp/parameterized_negative_exponential.hpp b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/neg_exp/parameterized_negative_exponential.hpp
index 5af26bd54374b954e605496c3acd086a4de3a519..24211df7d9bb1ac16f7d18f9e93172d1204c1534 100644
--- a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/neg_exp/parameterized_negative_exponential.hpp
+++ b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/neg_exp/parameterized_negative_exponential.hpp
@@ -55,10 +55,10 @@ public:
      * @brief Constructor
      * @details Constructs the Node from node pointer of the feature to operate on
      *
-     * @param feat shared_ptr of the feature to operate on (A)
-     * @param feat_ind Index of the new feature
-     * @param l_bound Minimum absolute value allowed for the feature.
-     * @param u_bound Maximum absolute value allowed for the feature.
+     * @param feat (Node) shared_ptr of the feature to operate on (A)
+     * @param feat_ind (int) Index of the new feature
+     * @param l_bound (double) Minimum absolute value allowed for the feature.
+     * @param u_bound (double) Maximum absolute value allowed for the feature.
      * @param optimizer The optimizer to find the optimal parameters for the features
      */
     NegExpParamNode(
@@ -73,8 +73,8 @@ public:
      * @brief Constructor
      * @details Constructs the Node from node pointer of the feature to operate on
      *
-     * @param feat shared_ptr of the feature to operate on (A)
-     * @param feat_ind Index of the new feature
+     * @param feat (Node) shared_ptr of the feature to operate on (A)
+     * @param feat_ind (int) Index of the new feature
      * @param optimizer The optimizer to find the optimal parameters for the features
      */
     NegExpParamNode(const node_ptr feat, const unsigned long int feat_ind, std::shared_ptr<NLOptimizer> optimizer);
@@ -84,10 +84,10 @@ public:
      * @brief Constructor
      * @details Constructs the Node from node pointer of the feature to operate on
      *
-     * @param feat shared_ptr of the feature to operate on (A)
-     * @param feat_ind Index of the new feature
-     * @param l_bound Minimum absolute value allowed for the feature.
-     * @param u_bound Maximum absolute value allowed for the feature.
+     * @param feat (Node) shared_ptr of the feature to operate on (A)
+     * @param feat_ind (int) Index of the new feature
+     * @param l_bound (double) Minimum absolute value allowed for the feature.
+     * @param u_bound (double) Maximum absolute value allowed for the feature.
      */
     NegExpParamNode(const node_ptr feat, const unsigned long int feat_ind, double const l_bound=1e-50, const double u_bound=1e50);
 
@@ -99,19 +99,19 @@ public:
 
     // DocString: neg_exp_param_node_set_value
     /**
-     * @brief Set the values of the training data for the feature inside of the value storage arrays
+     * @brief Set the value of all training samples for the feature inside the central data storage array
      *
-     * @param offset(int) Key to determine which part of the temporary storage array to look into
-     * @param for_comp(bool) If true then the evaluation is for comparing features
+     * @param offset (int) Where the current node is in the binary expression tree relative to other nodes at the same depth
+     * @param for_comp (bool) If true then the evaluation is used for comparing features
      */
     inline void set_value(int offset=-1, const bool for_comp=false) const {set_value(_params.data(), offset, for_comp);}
 
     // DocString: neg_exp_param_node_set_test_value
     /**
-     * @brief Set the values of the test data for the feature inside of the value storage arrays
+     * @brief Set the value of all test samples for the feature inside the central data storage array
      *
-     * @param offset(int) Key to determine which part of the temporary storage array to look into
-     * @param for_comp(bool) If true then the evaluation is for comparing features
+     * @param offset (int) Where the current node is in the binary expression tree relative to other nodes at the same depth
+     * @param for_comp (bool) If true then the evaluation is used for comparing features
      */
     inline void set_test_value(int offset=-1, const bool for_comp=false) const {set_test_value(_params.data(), offset, for_comp);}
 
@@ -128,7 +128,7 @@ public:
      * @details If the feature is not already stored in memory, then calculate the feature and return the pointer to the data
      *
      * @param offset(int) the integer value to offset the location in the temporary storage array
-     * @param for_comp(bool) If true then the evaluation is for comparing features
+     * @param for_comp (bool) If true then the evaluation is used for comparing features
      *
      * @return pointer to the feature's test values
      */
@@ -139,7 +139,7 @@ public:
      * @details If the feature is not already stored in memory, then calculate the feature and return the pointer to the data
      *
      * @param offset(int) the integer value to offset the location in the temporary storage array
-     * @param for_comp(bool) If true then the evaluation is for comparing features
+     * @param for_comp (bool) If true then the evaluation is used for comparing features
      *
      * @return pointer to the feature's test values
      */
@@ -152,13 +152,13 @@ public:
 
     // DocString: neg_exp_param_node_expr
     /**
-     * @brief Get the expression for the overall feature (From root node down)
+     * @brief A human readable equation representing the feature
      */
     inline std::string expr() const {return expr(_params.data());}
 
     // DocString: neg_exp_param_node_get_latex_expr
     /**
-     * @brief Get the expression for the overall feature (From root node down)
+     * @brief Get the valid LaTeX expression that represents the feature
      */
     inline std::string get_latex_expr() const {return get_latex_expr(_params.data());}
 
diff --git a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/sin/parameterized_sin.hpp b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/sin/parameterized_sin.hpp
index 2b2e06c661900428a2240e4a96f5d6ef6d8045db..8cd929bef06dfa493e2ad82a8189e82ef7e3c2c2 100644
--- a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/sin/parameterized_sin.hpp
+++ b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/sin/parameterized_sin.hpp
@@ -55,10 +55,10 @@ public:
      * @brief Constructor
      * @details Constructs the Node from node pointer of the feature to operate on
      *
-     * @param feat shared_ptr of the feature to operate on (A)
-     * @param feat_ind Index of the new feature
-     * @param l_bound Minimum absolute value allowed for the feature.
-     * @param u_bound Maximum absolute value allowed for the feature.
+     * @param feat (Node) shared_ptr of the feature to operate on (A)
+     * @param feat_ind (int) Index of the new feature
+     * @param l_bound (double) Minimum absolute value allowed for the feature.
+     * @param u_bound (double) Maximum absolute value allowed for the feature.
      * @param optimizer The optimizer to find the optimal parameters for the features
      */
     SinParamNode(
@@ -73,8 +73,8 @@ public:
      * @brief Constructor
      * @details Constructs the Node from node pointer of the feature to operate on
      *
-     * @param feat shared_ptr of the feature to operate on (A)
-     * @param feat_ind Index of the new feature
+     * @param feat (Node) shared_ptr of the feature to operate on (A)
+     * @param feat_ind (int) Index of the new feature
      * @param optimizer The optimizer to find the optimal parameters for the features
      */
     SinParamNode(const node_ptr feat, const unsigned long int feat_ind, std::shared_ptr<NLOptimizer> optimizer);
@@ -84,10 +84,10 @@ public:
      * @brief Constructor
      * @details Constructs the Node from node pointer of the feature to operate on
      *
-     * @param feat shared_ptr of the feature to operate on (A)
-     * @param feat_ind Index of the new feature
-     * @param l_bound Minimum absolute value allowed for the feature.
-     * @param u_bound Maximum absolute value allowed for the feature.
+     * @param feat (Node) shared_ptr of the feature to operate on (A)
+     * @param feat_ind (int) Index of the new feature
+     * @param l_bound (double) Minimum absolute value allowed for the feature.
+     * @param u_bound (double) Maximum absolute value allowed for the feature.
      */
     SinParamNode(const node_ptr feat, const unsigned long int feat_ind, double const l_bound=1e-50, const double u_bound=1e50);
 
@@ -99,19 +99,19 @@ public:
 
     // DocString: sin_param_node_set_value
     /**
-     * @brief Set the values of the training data for the feature inside of the value storage arrays
+     * @brief Set the value of all training samples for the feature inside the central data storage array
      *
-     * @param offset(int) Key to determine which part of the temporary storage array to look into
-     * @param for_comp(bool) If true then the evaluation is for comparing features
+     * @param offset (int) Where the current node is in the binary expression tree relative to other nodes at the same depth
+     * @param for_comp (bool) If true then the evaluation is used for comparing features
      */
     inline void set_value(int offset=-1, const bool for_comp=false) const {set_value(_params.data(), offset, for_comp);}
 
     // DocString: sin_param_node_set_test_value
     /**
-     * @brief Set the values of the test data for the feature inside of the value storage arrays
+     * @brief Set the value of all test samples for the feature inside the central data storage array
      *
-     * @param offset(int) Key to determine which part of the temporary storage array to look into
-     * @param for_comp(bool) If true then the evaluation is for comparing features
+     * @param offset (int) Where the current node is in the binary expression tree relative to other nodes at the same depth
+     * @param for_comp (bool) If true then the evaluation is used for comparing features
      */
     inline void set_test_value(int offset=-1, const bool for_comp=false) const {set_test_value(_params.data(), offset, for_comp);}
 
@@ -120,7 +120,7 @@ public:
      * @details If the feature is not already stored in memory, then calculate the feature and return the pointer to the data
      *
      * @param offset(int) the integer value to offset the location in the temporary storage array
-     * @param for_comp(bool) If true then the evaluation is for comparing features
+     * @param for_comp (bool) If true then the evaluation is used for comparing features
      *
      * @return pointer to the feature's test values
      */
@@ -131,7 +131,7 @@ public:
      * @details If the feature is not already stored in memory, then calculate the feature and return the pointer to the data
      *
      * @param offset(int) the integer value to offset the location in the temporary storage array
-     * @param for_comp(bool) If true then the evaluation is for comparing features
+     * @param for_comp (bool) If true then the evaluation is used for comparing features
      *
      * @return pointer to the feature's test values
      */
@@ -144,13 +144,13 @@ public:
 
     // DocString: sin_param_node_expr
     /**
-     * @brief Get the expression for the overall feature (From root node down)
+     * @brief A human readable equation representing the feature
      */
     inline std::string expr() const {return expr(_params.data());}
 
     // DocString: sin_param_node_get_latex_expr
     /**
-     * @brief Get the expression for the overall feature (From root node down)
+     * @brief Get the valid LaTeX expression that represents the feature
      */
     inline std::string get_latex_expr() const {return get_latex_expr(_params.data());}
 
diff --git a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/sin/sin.hpp b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/sin/sin.hpp
index dd2a85fda96ea52e6b6cf355e3d65e77e982cfa4..4a0a4508a32b150aef1e4ee31aa08ca238d63df4 100644
--- a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/sin/sin.hpp
+++ b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/sin/sin.hpp
@@ -43,8 +43,8 @@ public:
      * @brief Constructor
      * @details Constructs the Node from an array of features to operate on
      *
-     * @param feat shared_ptr of the feature to operate on (A)
-     * @param feat_ind Index of the new feature
+     * @param feat (Node) shared_ptr of the feature to operate on (A)
+     * @param feat_ind (int) Index of the new feature
      */
     SinNode(const node_ptr feat, const unsigned long int feat_ind);
 
@@ -53,10 +53,10 @@ public:
      * @brief Constructor
      * @details Constructs the Node from node pointer of the feature to operate on
      *
-     * @param feat shared_ptr of the feature to operate on (A)
-     * @param feat_ind Index of the new feature
-     * @param l_bound Minimum absolute value allowed for the feature.
-     * @param u_bound Maximum absolute value allowed for the feature.
+     * @param feat (Node) shared_ptr of the feature to operate on (A)
+     * @param feat_ind (int) Index of the new feature
+     * @param l_bound (double) Minimum absolute value allowed for the feature.
+     * @param u_bound (double) Maximum absolute value allowed for the feature.
      */
     SinNode(const node_ptr feat, const unsigned long int feat_ind, const double l_bound, const double u_bound);
 
@@ -74,7 +74,7 @@ public:
 
     // DocString: sin_node_expr
     /**
-     * @brief Get the expression for the overall feature (From root node down)
+     * @brief A human readable equation representing the feature
      */
     inline std::string expr() const
     {
@@ -86,7 +86,7 @@ public:
 
     // DocString: sin_node_latex_expr
     /**
-     * @brief Get the expression for the overall feature (From root node down)
+     * @brief Get the valid LaTeX expression that represents the feature
      */
     inline std::string get_latex_expr() const
     {
@@ -98,27 +98,26 @@ public:
 
     // DocString: sin_node_set_value
     /**
-     * @brief Set the values of the training data for the feature inside of the value storage arrays
+     * @brief Set the value of all training samples for the feature inside the central data storage array
      *
-     * @param offset(int) Key to determine which part of the temporary storage array to look into
-     * @param for_comp(bool) If true then the evaluation is for comparing features
+     * @param offset (int) Where the current node is in the binary expression tree relative to other nodes at the same depth
+     * @param for_comp (bool) If true then the evaluation is used for comparing features
      */
     virtual void set_value(int offset=-1, const bool for_comp=false) const;
 
     // DocString: sin_node_set_test_value
     /**
-     * @brief Set the values of the test data for the feature inside of the value storage arrays
+     * @brief Set the value of all test samples for the feature inside the central data storage array
      *
-     * @param offset(int) Key to determine which part of the temporary storage array to look into
-     * @param for_comp(bool) If true then the evaluation is for comparing features
+     * @param offset (int) Where the current node is in the binary expression tree relative to other nodes at the same depth
+     * @param for_comp (bool) If true then the evaluation is used for comparing features
      */
     virtual void set_test_value(int offset=-1, const bool for_comp=false) const;
 
     // DocString: sin_node_rung
     /**
-     * @brief return the rung of the feature
+     * @brief return the rung of the feature (Height of the binary expression tree + 1)
      *
-     * @param cur_rung The rung current rung of the feature tree (used to recursively calculate rung)
      */
     inline int rung(const int cur_rung=0) const {return _feats[0]->rung(cur_rung + 1);}
 
@@ -194,27 +193,27 @@ public:
     virtual void set_parameters(const double* params){return;}
 
     /**
-     * @brief Set the values of the training data for the feature inside of the value storage arrays
+     * @brief Set the value of all training samples for the feature inside the central data storage array
      *
-     * @param params pointer to the parameter values
-     * @param offset(int) Key to determine which part of the temporary storage array to look into
-     * @param for_comp(bool) If true then the evaluation is for comparing features
+     * @param params pointer to the non-linear parameter values
+     * @param offset (int) Where the current node is in the binary expression tree relative to other nodes at the same depth
+     * @param for_comp (bool) If true then the evaluation is used for comparing features
      * @param depth the max depth of paramterization (in the binary expression tree)
      */
     void set_value(const double* params, int offset=-1, const bool for_comp=false, const int depth=1) const;
 
     /**
-     * @brief Set the values of the test data for the feature inside of the value storage arrays
+     * @brief Set the value of all test samples for the feature inside the central data storage array
      *
-     * @param params pointer to the parameter values
-     * @param offset(int) Key to determine which part of the temporary storage array to look into
-     * @param for_comp(bool) If true then the evaluation is for comparing features
+     * @param params pointer to the non-linear parameter values
+     * @param offset (int) Where the current node is in the binary expression tree relative to other nodes at the same depth
+     * @param for_comp (bool) If true then the evaluation is used for comparing features
      * @param depth the max depth of paramterization (in the binary expression tree)
      */
     void set_test_value(const double* params, int offset=-1, const bool for_comp=false, const int depth=1) const;
 
     /**
-     * @brief The expression of the feature
+     * @brief A human readable equation representing the feature
      *
      * @param params parameter values for non-linear operations
      * @return feature expression
@@ -230,10 +229,10 @@ public:
     }
 
     /**
-     * @brief Get the latexified expression for the overall feature (From root node down)
+     * @brief Get the valid LaTeX expression that represents the feature
      *
      * @param params parameter values for non-linear operations
-     * @param depth the current depth of the node on the Binary expression tree
+     * @param depth the current depth of the node on the binary expression tree
      * @return the latexified expression
      */
     inline std::string get_latex_expr(const double* params, const int depth=1) const
@@ -250,7 +249,7 @@ public:
      * @brief Get the string that corresponds to the code needed to evaluate the node in matlab
      *
      * @param params parameter values for non-linear operations
-     * @param depth the current depth of the node on the Binary expression tree
+     * @param depth the current depth of the node on the binary expression tree
      * @return The matlab code for the feature
      */
     inline std::string matlab_fxn_expr(const double* params, const int depth=1) const
@@ -269,7 +268,7 @@ public:
      * @param lb pointer to the lower bounds data
      * @param ub pointer to the upper bounds data
      * @param from_parent How many parameters are between the start of this node's parameters and its parent
-     * @param depth the current depth of the node on the Binary expression tree
+     * @param depth the current depth of the node on the binary expression tree
      */
     void set_bounds(double* lb, double* ub, const int depth=1) const;
 
@@ -277,7 +276,7 @@ public:
      * @brief Set the bounds for the nl parameterization
      *
      * @param params pointer to the parameters vector
-     * @param depth the current depth of the node on the Binary expression tree
+     * @param depth the current depth of the node on the binary expression tree
      */
     void initialize_params(double* params, const int depth = 1) const;
 
diff --git a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/six_pow/parameterized_sixth_power.hpp b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/six_pow/parameterized_sixth_power.hpp
index e54a36fa3cead2fa6d8fbb7d62498f1a787aa5d2..b99c5e6b667136978c529ae8d1ee60e6b1561a47 100644
--- a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/six_pow/parameterized_sixth_power.hpp
+++ b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/six_pow/parameterized_sixth_power.hpp
@@ -55,10 +55,10 @@ public:
      * @brief Constructor
      * @details Constructs the Node from node pointer of the feature to operate on
      *
-     * @param feat shared_ptr of the feature to operate on (A)
-     * @param feat_ind Index of the new feature
-     * @param l_bound Minimum absolute value allowed for the feature.
-     * @param u_bound Maximum absolute value allowed for the feature.
+     * @param feat (Node) shared_ptr of the feature to operate on (A)
+     * @param feat_ind (int) Index of the new feature
+     * @param l_bound (double) Minimum absolute value allowed for the feature.
+     * @param u_bound (double) Maximum absolute value allowed for the feature.
      * @param optimizer The optimizer to find the optimal parameters for the features
      */
     SixPowParamNode(
@@ -73,8 +73,8 @@ public:
      * @brief Constructor
      * @details Constructs the Node from node pointer of the feature to operate on
      *
-     * @param feat shared_ptr of the feature to operate on (A)
-     * @param feat_ind Index of the new feature
+     * @param feat (Node) shared_ptr of the feature to operate on (A)
+     * @param feat_ind (int) Index of the new feature
      * @param optimizer The optimizer to find the optimal parameters for the features
      */
     SixPowParamNode(const node_ptr feat, const unsigned long int feat_ind, std::shared_ptr<NLOptimizer> optimizer);
@@ -84,10 +84,10 @@ public:
      * @brief Constructor
      * @details Constructs the Node from node pointer of the feature to operate on
      *
-     * @param feat shared_ptr of the feature to operate on (A)
-     * @param feat_ind Index of the new feature
-     * @param l_bound Minimum absolute value allowed for the feature.
-     * @param u_bound Maximum absolute value allowed for the feature.
+     * @param feat (Node) shared_ptr of the feature to operate on (A)
+     * @param feat_ind (int) Index of the new feature
+     * @param l_bound (double) Minimum absolute value allowed for the feature.
+     * @param u_bound (double) Maximum absolute value allowed for the feature.
      */
     SixPowParamNode(const node_ptr feat, const unsigned long int feat_ind, double const l_bound=1e-50, const double u_bound=1e50);
 
@@ -99,19 +99,19 @@ public:
 
     // DocString: six_pow_param_node_set_value
     /**
-     * @brief Set the values of the training data for the feature inside of the value storage arrays
+     * @brief Set the value of all training samples for the feature inside the central data storage array
      *
-     * @param offset(int) Key to determine which part of the temporary storage array to look into
-     * @param for_comp(bool) If true then the evaluation is for comparing features
+     * @param offset (int) Where the current node is in the binary expression tree relative to other nodes at the same depth
+     * @param for_comp (bool) If true then the evaluation is used for comparing features
      */
     inline void set_value(int offset=-1, const bool for_comp=false) const {set_value(_params.data(), offset, for_comp);}
 
     // DocString: six_pow_param_node_set_test_value
     /**
-     * @brief Set the values of the test data for the feature inside of the value storage arrays
+     * @brief Set the value of all test samples for the feature inside the central data storage array
      *
-     * @param offset(int) Key to determine which part of the temporary storage array to look into
-     * @param for_comp(bool) If true then the evaluation is for comparing features
+     * @param offset (int) Where the current node is in the binary expression tree relative to other nodes at the same depth
+     * @param for_comp (bool) If true then the evaluation is used for comparing features
      */
     inline void set_test_value(int offset=-1, const bool for_comp=false) const {set_test_value(_params.data(), offset, for_comp);}
 
@@ -120,7 +120,7 @@ public:
      * @details If the feature is not already stored in memory, then calculate the feature and return the pointer to the data
      *
      * @param offset(int) the integer value to offset the location in the temporary storage array
-     * @param for_comp(bool) If true then the evaluation is for comparing features
+     * @param for_comp (bool) If true then the evaluation is used for comparing features
      *
      * @return pointer to the feature's test values
      */
@@ -131,7 +131,7 @@ public:
      * @details If the feature is not already stored in memory, then calculate the feature and return the pointer to the data
      *
      * @param offset(int) the integer value to offset the location in the temporary storage array
-     * @param for_comp(bool) If true then the evaluation is for comparing features
+     * @param for_comp (bool) If true then the evaluation is used for comparing features
      *
      * @return pointer to the feature's test values
      */
@@ -144,13 +144,13 @@ public:
 
     // DocString: six_pow_param_node_expr
     /**
-     * @brief Get the expression for the overall feature (From root node down)
+     * @brief A human readable equation representing the feature
      */
     inline std::string expr() const {return expr(_params.data());}
 
     // DocString: six_pow_param_node_get_latex_expr
     /**
-     * @brief Get the expression for the overall feature (From root node down)
+     * @brief Get the valid LaTeX expression that represents the feature
      */
     inline std::string get_latex_expr() const {return get_latex_expr(_params.data());}
 
diff --git a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/six_pow/sixth_power.hpp b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/six_pow/sixth_power.hpp
index 668e2e5218d285cbe9e63814aa8bc8ff4b81c7b3..f1b4fd7559ccc9d7da140bfe2e8473bc92ad2715 100644
--- a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/six_pow/sixth_power.hpp
+++ b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/six_pow/sixth_power.hpp
@@ -43,8 +43,8 @@ public:
      * @brief Constructor
      * @details Constructs the Node from an array of features to operate on
      *
-     * @param feat shared_ptr of the feature to operate on (A)
-     * @param feat_ind Index of the new feature
+     * @param feat (Node) shared_ptr of the feature to operate on (A)
+     * @param feat_ind (int) Index of the new feature
      */
     SixPowNode(const node_ptr feat, const unsigned long int feat_ind);
 
@@ -53,10 +53,10 @@ public:
      * @brief Constructor
      * @details Constructs the Node from node pointer of the feature to operate on
      *
-     * @param feat shared_ptr of the feature to operate on (A)
-     * @param feat_ind Index of the new feature
-     * @param l_bound Minimum absolute value allowed for the feature.
-     * @param u_bound Maximum absolute value allowed for the feature.
+     * @param feat (Node) shared_ptr of the feature to operate on (A)
+     * @param feat_ind (int) Index of the new feature
+     * @param l_bound (double) Minimum absolute value allowed for the feature.
+     * @param u_bound (double) Maximum absolute value allowed for the feature.
      */
     SixPowNode(const node_ptr feat, const unsigned long int feat_ind, const double l_bound, const double u_bound);
 
@@ -74,7 +74,7 @@ public:
 
     // DocString: six_pow_node_expr
     /**
-     * @brief Get the expression for the overall feature (From root node down)
+     * @brief A human readable equation representing the feature
      */
     inline std::string expr() const
     {
@@ -86,7 +86,7 @@ public:
 
     // DocString: six_pow_node_latex_expr
     /**
-     * @brief Get the expression for the overall feature (From root node down)
+     * @brief Get the valid LaTeX expression that represents the feature
      */
     inline std::string get_latex_expr() const
     {
@@ -98,27 +98,26 @@ public:
 
     // DocString: six_pow_node_set_value
     /**
-     * @brief Set the values of the training data for the feature inside of the value storage arrays
+     * @brief Set the value of all training samples for the feature inside the central data storage array
      *
-     * @param offset(int) Key to determine which part of the temporary storage array to look into
-     * @param for_comp(bool) If true then the evaluation is for comparing features
+     * @param offset (int) Where the current node is in the binary expression tree relative to other nodes at the same depth
+     * @param for_comp (bool) If true then the evaluation is used for comparing features
      */
     virtual void set_value(int offset=-1, const bool for_comp=false) const;
 
     // DocString: six_pow_node_set_test_value
     /**
-     * @brief Set the values of the test data for the feature inside of the value storage arrays
+     * @brief Set the value of all test samples for the feature inside the central data storage array
      *
-     * @param offset(int) Key to determine which part of the temporary storage array to look into
-     * @param for_comp(bool) If true then the evaluation is for comparing features
+     * @param offset (int) Where the current node is in the binary expression tree relative to other nodes at the same depth
+     * @param for_comp (bool) If true then the evaluation is used for comparing features
      */
     virtual void set_test_value(int offset=-1, const bool for_comp=false) const;
 
     // DocString: six_pow_node_rung
     /**
-     * @brief return the rung of the feature
+     * @brief return the rung of the feature (Height of the binary expression tree + 1)
      *
-     * @param cur_rung The rung current rung of the feature tree (used to recursively calculate rung)
      */
     inline int rung(const int cur_rung=0) const {return _feats[0]->rung(cur_rung + 1);}
 
@@ -194,30 +193,30 @@ public:
     virtual void set_parameters(const double* params){return;}
 
     /**
-     * @brief Set the values of the training data for the feature inside of the value storage arrays
+     * @brief Set the value of all training samples for the feature inside the central data storage array
      *
-     * @param params pointer to the parameter values
-     * @param offset(int) Key to determine which part of the temporary storage array to look into
-     * @param for_comp(bool) If true then the evaluation is for comparing features
+     * @param params pointer to the non-linear parameter values
+     * @param offset (int) Where the current node is in the binary expression tree relative to other nodes at the same depth
+     * @param for_comp (bool) If true then the evaluation is used for comparing features
      * @param depth the max depth of paramterization (in the binary expression tree)
      */
     void set_value(const double* params, int offset=-1, const bool for_comp=false, const int depth=1) const;
 
     /**
-     * @brief Set the values of the test data for the feature inside of the value storage arrays
+     * @brief Set the value of all test samples for the feature inside the central data storage array
      *
-     * @param params pointer to the parameter values
-     * @param offset(int) Key to determine which part of the temporary storage array to look into
-     * @param for_comp(bool) If true then the evaluation is for comparing features
+     * @param params pointer to the non-linear parameter values
+     * @param offset (int) Where the current node is in the binary expression tree relative to other nodes at the same depth
+     * @param for_comp (bool) If true then the evaluation is used for comparing features
      * @param depth the max depth of paramterization (in the binary expression tree)
      */
     void set_test_value(const double* params, int offset=-1, const bool for_comp=false, const int depth=1) const;
 
     /**
-     * @brief The expression of the feature
+     * @brief A human readable equation representing the feature
      *
      * @param params parameter values for non-linear operations
-     * @param depth the current depth of the node on the Binary expression tree
+     * @param depth the current depth of the node on the binary expression tree
      * @return feature expression
      */
     inline std::string expr(const double* params, const int depth=1) const
@@ -230,10 +229,10 @@ public:
     }
 
     /**
-     * @brief Get the latexified expression for the overall feature (From root node down)
+     * @brief Get the valid LaTeX expression that represents the feature
      *
      * @param params parameter values for non-linear operations
-     * @param depth the current depth of the node on the Binary expression tree
+     * @param depth the current depth of the node on the binary expression tree
      * @return the latexified expression
      */
     inline std::string get_latex_expr(const double* params, const int depth=1) const
@@ -249,7 +248,7 @@ public:
      * @brief Get the string that corresponds to the code needed to evaluate the node in matlab
      *
      * @param params parameter values for non-linear operations
-     * @param depth the current depth of the node on the Binary expression tree
+     * @param depth the current depth of the node on the binary expression tree
      * @return The matlab code for the feature
      */
     inline std::string matlab_fxn_expr(const double* params, const int depth=1) const
@@ -267,7 +266,7 @@ public:
      * @param lb pointer to the lower bounds data
      * @param ub pointer to the upper bounds data
      * @param from_parent How many parameters are between the start of this node's parameters and its parent
-     * @param depth the current depth of the node on the Binary expression tree
+     * @param depth the current depth of the node on the binary expression tree
      */
     void set_bounds(double* lb, double* ub, const int depth=1) const;
 
@@ -275,7 +274,7 @@ public:
      * @brief Set the bounds for the nl parameterization
      *
      * @param params pointer to the parameters vector
-     * @param depth the current depth of the node on the Binary expression tree
+     * @param depth the current depth of the node on the binary expression tree
      */
     void initialize_params(double* params, const int depth = 1) const;
 
diff --git a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/sq/parameterized_square.hpp b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/sq/parameterized_square.hpp
index 30198b979a2980dd47dfbde821c5c3f9c65ca059..1a66c53cda148616907ec7abd4b2732154fe2595 100644
--- a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/sq/parameterized_square.hpp
+++ b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/sq/parameterized_square.hpp
@@ -55,10 +55,10 @@ public:
      * @brief Constructor
      * @details Constructs the Node from node pointer of the feature to operate on
      *
-     * @param feat shared_ptr of the feature to operate on (A)
-     * @param feat_ind Index of the new feature
-     * @param l_bound Minimum absolute value allowed for the feature.
-     * @param u_bound Maximum absolute value allowed for the feature.
+     * @param feat (Node) shared_ptr of the feature to operate on (A)
+     * @param feat_ind (int) Index of the new feature
+     * @param l_bound (double) Minimum absolute value allowed for the feature.
+     * @param u_bound (double) Maximum absolute value allowed for the feature.
      * @param optimizer The optimizer to find the optimal parameters for the features
      */
     SqParamNode(
@@ -73,8 +73,8 @@ public:
      * @brief Constructor
      * @details Constructs the Node from node pointer of the feature to operate on
      *
-     * @param feat shared_ptr of the feature to operate on (A)
-     * @param feat_ind Index of the new feature
+     * @param feat (Node) shared_ptr of the feature to operate on (A)
+     * @param feat_ind (int) Index of the new feature
      * @param optimizer The optimizer to find the optimal parameters for the features
      */
     SqParamNode(const node_ptr feat, const unsigned long int feat_ind, std::shared_ptr<NLOptimizer> optimizer);
@@ -84,10 +84,10 @@ public:
      * @brief Constructor
      * @details Constructs the Node from node pointer of the feature to operate on
      *
-     * @param feat shared_ptr of the feature to operate on (A)
-     * @param feat_ind Index of the new feature
-     * @param l_bound Minimum absolute value allowed for the feature.
-     * @param u_bound Maximum absolute value allowed for the feature.
+     * @param feat (Node) shared_ptr of the feature to operate on (A)
+     * @param feat_ind (int) Index of the new feature
+     * @param l_bound (double) Minimum absolute value allowed for the feature.
+     * @param u_bound (double) Maximum absolute value allowed for the feature.
      */
     SqParamNode(const node_ptr feat, const unsigned long int feat_ind, double const l_bound=1e-50, const double u_bound=1e50);
 
@@ -99,19 +99,19 @@ public:
 
     // DocString: sq_param_node_set_value
     /**
-     * @brief Set the values of the training data for the feature inside of the value storage arrays
+     * @brief Set the value of all training samples for the feature inside the central data storage array
      *
-     * @param offset(int) Key to determine which part of the temporary storage array to look into
-     * @param for_comp(bool) If true then the evaluation is for comparing features
+     * @param offset (int) Where the current node is in the binary expression tree relative to other nodes at the same depth
+     * @param for_comp (bool) If true then the evaluation is used for comparing features
      */
     inline void set_value(int offset=-1, const bool for_comp=false) const {set_value(_params.data(), offset, for_comp);}
 
     // DocString: sq_param_node_set_test_value
     /**
-     * @brief Set the values of the test data for the feature inside of the value storage arrays
+     * @brief Set the value of all test samples for the feature inside the central data storage array
      *
-     * @param offset(int) Key to determine which part of the temporary storage array to look into
-     * @param for_comp(bool) If true then the evaluation is for comparing features
+     * @param offset (int) Where the current node is in the binary expression tree relative to other nodes at the same depth
+     * @param for_comp (bool) If true then the evaluation is used for comparing features
      */
     inline void set_test_value(int offset=-1, const bool for_comp=false) const {set_test_value(_params.data(), offset, for_comp);}
 
@@ -120,7 +120,7 @@ public:
      * @details If the feature is not already stored in memory, then calculate the feature and return the pointer to the data
      *
      * @param offset(int) the integer value to offset the location in the temporary storage array
-     * @param for_comp(bool) If true then the evaluation is for comparing features
+     * @param for_comp (bool) If true then the evaluation is used for comparing features
      *
      * @return pointer to the feature's test values
      */
@@ -131,7 +131,7 @@ public:
      * @details If the feature is not already stored in memory, then calculate the feature and return the pointer to the data
      *
      * @param offset(int) the integer value to offset the location in the temporary storage array
-     * @param for_comp(bool) If true then the evaluation is for comparing features
+     * @param for_comp (bool) If true then the evaluation is used for comparing features
      *
      * @return pointer to the feature's test values
      */
@@ -144,13 +144,13 @@ public:
 
     // DocString: sq_param_node_expr
     /**
-     * @brief Get the expression for the overall feature (From root node down)
+     * @brief A human readable equation representing the feature
      */
     inline std::string expr() const {return expr(_params.data());}
 
     // DocString: sq_param_node_get_latex_expr
     /**
-     * @brief Get the expression for the overall feature (From root node down)
+     * @brief Get the valid LaTeX expression that represents the feature
      */
     inline std::string get_latex_expr() const {return get_latex_expr(_params.data());}
 
diff --git a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/sq/square.hpp b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/sq/square.hpp
index 65fed71412768780e025fed499ae46c1cf3a393d..e02e0920c0c441fe079812e7614e37f44c86095e 100644
--- a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/sq/square.hpp
+++ b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/sq/square.hpp
@@ -42,8 +42,8 @@ public:
      * @brief Constructor
      * @details Constructs the Node from an array of features to operate on
      *
-     * @param feat shared_ptr of the feature to operate on (A)
-     * @param feat_ind Index of the new feature
+     * @param feat (Node) shared_ptr of the feature to operate on (A)
+     * @param feat_ind (int) Index of the new feature
      */
     SqNode(const node_ptr feat, const unsigned long int feat_ind);
 
@@ -52,10 +52,10 @@ public:
      * @brief Constructor
      * @details Constructs the Node from node pointer of the feature to operate on
      *
-     * @param feat shared_ptr of the feature to operate on (A)
-     * @param feat_ind Index of the new feature
-     * @param l_bound Minimum absolute value allowed for the feature.
-     * @param u_bound Maximum absolute value allowed for the feature.
+     * @param feat (Node) shared_ptr of the feature to operate on (A)
+     * @param feat_ind (int) Index of the new feature
+     * @param l_bound (double) Minimum absolute value allowed for the feature.
+     * @param u_bound (double) Maximum absolute value allowed for the feature.
      */
     SqNode(const node_ptr feat, const unsigned long int feat_ind, const double l_bound, const double u_bound);
 
@@ -73,7 +73,7 @@ public:
 
     // DocString: sq_node_expr
     /**
-     * @brief Get the expression for the overall feature (From root node down)
+     * @brief A human readable equation representing the feature
      */
     inline std::string expr() const
     {
@@ -85,7 +85,7 @@ public:
 
     // DocString: sq_node_latex_expr
     /**
-     * @brief Get the expression for the overall feature (From root node down)
+     * @brief Get the valid LaTeX expression that represents the feature
      */
     inline std::string get_latex_expr() const
     {
@@ -97,27 +97,26 @@ public:
 
     // DocString: sq_node_set_value
     /**
-     * @brief Set the values of the training data for the feature inside of the value storage arrays
+     * @brief Set the value of all training samples for the feature inside the central data storage array
      *
-     * @param offset(int) Key to determine which part of the temporary storage array to look into
-     * @param for_comp(bool) If true then the evaluation is for comparing features
+     * @param offset (int) Where the current node is in the binary expression tree relative to other nodes at the same depth
+     * @param for_comp (bool) If true then the evaluation is used for comparing features
      */
     virtual void set_value(int offset=-1, const bool for_comp=false) const;
 
     // DocString: sq_node_set_test_value
     /**
-     * @brief Set the values of the test data for the feature inside of the value storage arrays
+     * @brief Set the value of all test samples for the feature inside the central data storage array
      *
-     * @param offset(int) Key to determine which part of the temporary storage array to look into
-     * @param for_comp(bool) If true then the evaluation is for comparing features
+     * @param offset (int) Where the current node is in the binary expression tree relative to other nodes at the same depth
+     * @param for_comp (bool) If true then the evaluation is used for comparing features
      */
     virtual void set_test_value(int offset=-1, const bool for_comp=false) const;
 
     // DocString: sq_node_rung
     /**
-     * @brief return the rung of the feature
+     * @brief return the rung of the feature (Height of the binary expression tree + 1)
      *
-     * @param cur_rung The rung current rung of the feature tree (used to recursively calculate rung)
      */
     inline int rung(const int cur_rung=0) const {return _feats[0]->rung(cur_rung + 1);}
 
@@ -193,30 +192,30 @@ public:
     virtual void set_parameters(const double* params){return;}
 
     /**
-     * @brief Set the values of the training data for the feature inside of the value storage arrays
+     * @brief Set the value of all training samples for the feature inside the central data storage array
      *
-     * @param params pointer to the parameter values
-     * @param offset(int) Key to determine which part of the temporary storage array to look into
-     * @param for_comp(bool) If true then the evaluation is for comparing features
+     * @param params pointer to the non-linear parameter values
+     * @param offset (int) Where the current node is in the binary expression tree relative to other nodes at the same depth
+     * @param for_comp (bool) If true then the evaluation is used for comparing features
      * @param depth the max depth of paramterization (in the binary expression tree)
      */
     void set_value(const double* params, int offset=-1, const bool for_comp=false, const int depth=1) const;
 
     /**
-     * @brief Set the values of the test data for the feature inside of the value storage arrays
+     * @brief Set the value of all test samples for the feature inside the central data storage array
      *
-     * @param params pointer to the parameter values
-     * @param offset(int) Key to determine which part of the temporary storage array to look into
-     * @param for_comp(bool) If true then the evaluation is for comparing features
+     * @param params pointer to the non-linear parameter values
+     * @param offset (int) Where the current node is in the binary expression tree relative to other nodes at the same depth
+     * @param for_comp (bool) If true then the evaluation is used for comparing features
      * @param depth the max depth of paramterization (in the binary expression tree)
      */
     void set_test_value(const double* params, int offset=-1, const bool for_comp=false, const int depth=1) const;
 
     /**
-     * @brief The expression of the feature
+     * @brief A human readable equation representing the feature
      *
      * @param params parameter values for non-linear operations
-     * @param depth the current depth of the node on the Binary expression tree
+     * @param depth the current depth of the node on the binary expression tree
      * @return feature expression
      */
     inline std::string expr(const double* params, const int depth=1) const
@@ -229,10 +228,10 @@ public:
     }
 
     /**
-     * @brief Get the latexified expression for the overall feature (From root node down)
+     * @brief Get the valid LaTeX expression that represents the feature
      *
      * @param params parameter values for non-linear operations
-     * @param depth the current depth of the node on the Binary expression tree
+     * @param depth the current depth of the node on the binary expression tree
      * @return the latexified expression
      */
     inline std::string get_latex_expr(const double* params, const int depth=1) const
@@ -248,7 +247,7 @@ public:
      * @brief Get the string that corresponds to the code needed to evaluate the node in matlab
      *
      * @param params parameter values for non-linear operations
-     * @param depth the current depth of the node on the Binary expression tree
+     * @param depth the current depth of the node on the binary expression tree
      * @return The matlab code for the feature
      */
     inline std::string matlab_fxn_expr(const double* params, const int depth=1) const
@@ -266,7 +265,7 @@ public:
      * @param lb pointer to the lower bounds data
      * @param ub pointer to the upper bounds data
      * @param from_parent How many parameters are between the start of this node's parameters and its parent
-     * @param depth the current depth of the node on the Binary expression tree
+     * @param depth the current depth of the node on the binary expression tree
      */
     void set_bounds(double* lb, double* ub, const int depth=1) const;
 
@@ -274,7 +273,7 @@ public:
      * @brief Set the bounds for the nl parameterization
      *
      * @param params pointer to the parameters vector
-     * @param depth the current depth of the node on the Binary expression tree
+     * @param depth the current depth of the node on the binary expression tree
      */
     void initialize_params(double* params, const int depth = 1) const;
 
diff --git a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/sqrt/parameterized_square_root.hpp b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/sqrt/parameterized_square_root.hpp
index 05b00d56bec4fe62ea475c37fa1b825083c0f98f..f3e4e22ec8682172568fb85bfe064e61377e5979 100644
--- a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/sqrt/parameterized_square_root.hpp
+++ b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/sqrt/parameterized_square_root.hpp
@@ -57,10 +57,10 @@ public:
      * @brief Constructor
      * @details Constructs the Node from node pointer of the feature to operate on
      *
-     * @param feat shared_ptr of the feature to operate on (A)
-     * @param feat_ind Index of the new feature
-     * @param l_bound Minimum absolute value allowed for the feature.
-     * @param u_bound Maximum absolute value allowed for the feature.
+     * @param feat (Node) shared_ptr of the feature to operate on (A)
+     * @param feat_ind (int) Index of the new feature
+     * @param l_bound (double) Minimum absolute value allowed for the feature.
+     * @param u_bound (double) Maximum absolute value allowed for the feature.
      * @param optimizer The optimizer to find the optimal parameters for the features
      */
     SqrtParamNode(
@@ -75,8 +75,8 @@ public:
      * @brief Constructor
      * @details Constructs the Node from node pointer of the feature to operate on
      *
-     * @param feat shared_ptr of the feature to operate on (A)
-     * @param feat_ind Index of the new feature
+     * @param feat (Node) shared_ptr of the feature to operate on (A)
+     * @param feat_ind (int) Index of the new feature
      * @param optimizer The optimizer to find the optimal parameters for the features
      */
     SqrtParamNode(const node_ptr feat, const unsigned long int feat_ind, std::shared_ptr<NLOptimizer> optimizer);
@@ -86,10 +86,10 @@ public:
      * @brief Constructor
      * @details Constructs the Node from node pointer of the feature to operate on
      *
-     * @param feat shared_ptr of the feature to operate on (A)
-     * @param feat_ind Index of the new feature
-     * @param l_bound Minimum absolute value allowed for the feature.
-     * @param u_bound Maximum absolute value allowed for the feature.
+     * @param feat (Node) shared_ptr of the feature to operate on (A)
+     * @param feat_ind (int) Index of the new feature
+     * @param l_bound (double) Minimum absolute value allowed for the feature.
+     * @param u_bound (double) Maximum absolute value allowed for the feature.
      */
     SqrtParamNode(const node_ptr feat, const unsigned long int feat_ind, double const l_bound=1e-50, const double u_bound=1e50);
 
@@ -101,19 +101,19 @@ public:
 
     // DocString: sqrt_param_node_set_value
     /**
-     * @brief Set the values of the training data for the feature inside of the value storage arrays
+     * @brief Set the value of all training samples for the feature inside the central data storage array
      *
-     * @param offset(int) Key to determine which part of the temporary storage array to look into
-     * @param for_comp(bool) If true then the evaluation is for comparing features
+     * @param offset (int) Where the current node is in the binary expression tree relative to other nodes at the same depth
+     * @param for_comp (bool) If true then the evaluation is used for comparing features
      */
     inline void set_value(int offset=-1, const bool for_comp=false) const {set_value(_params.data(), offset, for_comp);}
 
     // DocString: sqrt_param_node_set_test_value
     /**
-     * @brief Set the values of the test data for the feature inside of the value storage arrays
+     * @brief Set the value of all test samples for the feature inside the central data storage array
      *
-     * @param offset(int) Key to determine which part of the temporary storage array to look into
-     * @param for_comp(bool) If true then the evaluation is for comparing features
+     * @param offset (int) Where the current node is in the binary expression tree relative to other nodes at the same depth
+     * @param for_comp (bool) If true then the evaluation is used for comparing features
      */
     inline void set_test_value(int offset=-1, const bool for_comp=false) const {set_test_value(_params.data(), offset, for_comp);}
 
@@ -122,7 +122,7 @@ public:
      * @details If the feature is not already stored in memory, then calculate the feature and return the pointer to the data
      *
      * @param offset(int) the integer value to offset the location in the temporary storage array
-     * @param for_comp(bool) If true then the evaluation is for comparing features
+     * @param for_comp (bool) If true then the evaluation is used for comparing features
      *
      * @return pointer to the feature's test values
      */
@@ -133,7 +133,7 @@ public:
      * @details If the feature is not already stored in memory, then calculate the feature and return the pointer to the data
      *
      * @param offset(int) the integer value to offset the location in the temporary storage array
-     * @param for_comp(bool) If true then the evaluation is for comparing features
+     * @param for_comp (bool) If true then the evaluation is used for comparing features
      *
      * @return pointer to the feature's test values
      */
@@ -146,13 +146,13 @@ public:
 
     // DocString: sqrt_param_node_expr
     /**
-     * @brief Get the expression for the overall feature (From root node down)
+     * @brief A human readable equation representing the feature
      */
     inline std::string expr() const {return expr(_params.data());}
 
     // DocString: sqrt_param_node_get_latex_expr
     /**
-     * @brief Get the expression for the overall feature (From root node down)
+     * @brief Get the valid LaTeX expression that represents the feature
      */
     inline std::string get_latex_expr() const {return get_latex_expr(_params.data());}
 
@@ -207,7 +207,7 @@ public:
      * @param lb pointer to the lower bounds data
      * @param ub pointer to the upper bounds data
      * @param from_parent How many parameters are between the start of this node's parameters and its parent
-     * @param depth the current depth of the node on the Binary expression tree
+     * @param depth the current depth of the node on the binary expression tree
      */
     void set_bounds(double* lb, double* ub, const int depth=1) const;
 
@@ -215,7 +215,7 @@ public:
      * @brief Set the bounds for the nl parameterization
      *
      * @param params pointer to the parameters vector
-     * @param depth the current depth of the node on the Binary expression tree
+     * @param depth the current depth of the node on the binary expression tree
      */
     void initialize_params(double* params, const int depth = 1) const;
 
diff --git a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/sqrt/square_root.hpp b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/sqrt/square_root.hpp
index 5a5479442f6585c4e28ce2355cf2dec7e3bb34c0..fcbe0e767d0fd931ba9ff708bd785541bb3b7380 100644
--- a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/sqrt/square_root.hpp
+++ b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/sqrt/square_root.hpp
@@ -43,8 +43,8 @@ public:
      * @brief Constructor
      * @details Constructs the Node from an array of features to operate on
      *
-     * @param feat shared_ptr of the feature to operate on (A)
-     * @param feat_ind Index of the new feature
+     * @param feat (Node) shared_ptr of the feature to operate on (A)
+     * @param feat_ind (int) Index of the new feature
      */
     SqrtNode(const node_ptr feat, const unsigned long int feat_ind);
 
@@ -53,10 +53,10 @@ public:
      * @brief Constructor
      * @details Constructs the Node from node pointer of the feature to operate on
      *
-     * @param feat shared_ptr of the feature to operate on (A)
-     * @param feat_ind Index of the new feature
-     * @param l_bound Minimum absolute value allowed for the feature.
-     * @param u_bound Maximum absolute value allowed for the feature.
+     * @param feat (Node) shared_ptr of the feature to operate on (A)
+     * @param feat_ind (int) Index of the new feature
+     * @param l_bound (double) Minimum absolute value allowed for the feature.
+     * @param u_bound (double) Maximum absolute value allowed for the feature.
      */
     SqrtNode(const node_ptr feat, const unsigned long int feat_ind, const double l_bound, const double u_bound);
 
@@ -74,7 +74,7 @@ public:
 
     // DocString: sqrt_node_expr
     /**
-     * @brief Get the expression for the overall feature (From root node down)
+     * @brief A human readable equation representing the feature
      */
     inline std::string expr() const
     {
@@ -86,7 +86,7 @@ public:
 
     // DocString: sqrt_node_latex_expr
     /**
-     * @brief Get the expression for the overall feature (From root node down)
+     * @brief Get the valid LaTeX expression that represents the feature
      */
     inline std::string get_latex_expr() const
     {
@@ -98,27 +98,26 @@ public:
 
     // DocString: sqrt_node_set_value
     /**
-     * @brief Set the values of the training data for the feature inside of the value storage arrays
+     * @brief Set the value of all training samples for the feature inside the central data storage array
      *
-     * @param offset(int) Key to determine which part of the temporary storage array to look into
-     * @param for_comp(bool) If true then the evaluation is for comparing features
+     * @param offset (int) Where the current node is in the binary expression tree relative to other nodes at the same depth
+     * @param for_comp (bool) If true then the evaluation is used for comparing features
      */
     virtual void set_value(int offset=-1, const bool for_comp=false) const;
 
     // DocString: sqrt_node_set_test_value
     /**
-     * @brief Set the values of the test data for the feature inside of the value storage arrays
+     * @brief Set the value of all test samples for the feature inside the central data storage array
      *
-     * @param offset(int) Key to determine which part of the temporary storage array to look into
-     * @param for_comp(bool) If true then the evaluation is for comparing features
+     * @param offset (int) Where the current node is in the binary expression tree relative to other nodes at the same depth
+     * @param for_comp (bool) If true then the evaluation is used for comparing features
      */
     virtual void set_test_value(int offset=-1, const bool for_comp=false) const;
 
     // DocString: sqrt_node_rung
     /**
-     * @brief return the rung of the feature
+     * @brief return the rung of the feature (Height of the binary expression tree + 1)
      *
-     * @param cur_rung The rung current rung of the feature tree (used to recursively calculate rung)
      */
     inline int rung(const int cur_rung=0) const {return _feats[0]->rung(cur_rung + 1);}
 
@@ -194,27 +193,27 @@ public:
     virtual void set_parameters(const double* params){return;}
 
     /**
-     * @brief Set the values of the training data for the feature inside of the value storage arrays
+     * @brief Set the value of all training samples for the feature inside the central data storage array
      *
-     * @param params pointer to the parameter values
-     * @param offset(int) Key to determine which part of the temporary storage array to look into
-     * @param for_comp(bool) If true then the evaluation is for comparing features
+     * @param params pointer to the non-linear parameter values
+     * @param offset (int) Where the current node is in the binary expression tree relative to other nodes at the same depth
+     * @param for_comp (bool) If true then the evaluation is used for comparing features
      * @param depth the max depth of paramterization (in the binary expression tree)
      */
     void set_value(const double* params, int offset=-1, const bool for_comp=false, const int depth=1) const;
 
     /**
-     * @brief Set the values of the test data for the feature inside of the value storage arrays
+     * @brief Set the value of all test samples for the feature inside the central data storage array
      *
-     * @param params pointer to the parameter values
-     * @param offset(int) Key to determine which part of the temporary storage array to look into
-     * @param for_comp(bool) If true then the evaluation is for comparing features
+     * @param params pointer to the non-linear parameter values
+     * @param offset (int) Where the current node is in the binary expression tree relative to other nodes at the same depth
+     * @param for_comp (bool) If true then the evaluation is used for comparing features
      * @param depth the max depth of paramterization (in the binary expression tree)
      */
     void set_test_value(const double* params, int offset=-1, const bool for_comp=false, const int depth=1) const;
 
     /**
-     * @brief The expression of the feature
+     * @brief A human readable equation representing the feature
      *
      * @param params parameter values for non-linear operations
      * @return feature expression
@@ -230,10 +229,10 @@ public:
     }
 
     /**
-     * @brief Get the latexified expression for the overall feature (From root node down)
+     * @brief Get the valid LaTeX expression that represents the feature
      *
      * @param params parameter values for non-linear operations
-     * @param depth the current depth of the node on the Binary expression tree
+     * @param depth the current depth of the node on the binary expression tree
      * @return the latexified expression
      */
     inline std::string get_latex_expr(const double* params, const int depth=1) const
@@ -250,7 +249,7 @@ public:
      * @brief Get the string that corresponds to the code needed to evaluate the node in matlab
      *
      * @param params parameter values for non-linear operations
-     * @param depth the current depth of the node on the Binary expression tree
+     * @param depth the current depth of the node on the binary expression tree
      * @return The matlab code for the feature
      */
     inline std::string matlab_fxn_expr(const double* params, const int depth=1) const
@@ -269,7 +268,7 @@ public:
      * @param lb pointer to the lower bounds data
      * @param ub pointer to the upper bounds data
      * @param from_parent How many parameters are between the start of this node's parameters and its parent
-     * @param depth the current depth of the node on the Binary expression tree
+     * @param depth the current depth of the node on the binary expression tree
      */
     virtual void set_bounds(double* lb, double* ub, const int depth=1) const;
 
@@ -277,7 +276,7 @@ public:
      * @brief Set the bounds for the nl parameterization
      *
      * @param params pointer to the parameters vector
-     * @param depth the current depth of the node on the Binary expression tree
+     * @param depth the current depth of the node on the binary expression tree
      */
     virtual void initialize_params(double* params, const int depth = 1) const;
 
diff --git a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/sub/parameterized_subtract.hpp b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/sub/parameterized_subtract.hpp
index 01e2fda464d2dfa8dce1930ecaa06c690e4c4e0c..075b18ab67ed7111ab25ac6409e6a30a717f872b 100644
--- a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/sub/parameterized_subtract.hpp
+++ b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/sub/parameterized_subtract.hpp
@@ -57,9 +57,9 @@ public:
      *
      * @param feat_1 shared_ptr of the feature to operate on (A)
      * @param feat_2 shared_ptr of the feature to operate on (B)
-     * @param feat_ind Index of the new feature
-     * @param l_bound Minimum absolute value allowed for the feature.
-     * @param u_bound Maximum absolute value allowed for the feature.
+     * @param feat_ind (int) Index of the new feature
+     * @param l_bound (double) Minimum absolute value allowed for the feature.
+     * @param u_bound (double) Maximum absolute value allowed for the feature.
      * @param optimizer The optimizer to find the optimal parameters for the features
      */
     SubParamNode(
@@ -77,7 +77,7 @@ public:
      *
      * @param feat_1 shared_ptr of the feature to operate on (A)
      * @param feat_2 shared_ptr of the feature to operate on (B)
-     * @param feat_ind Index of the new feature
+     * @param feat_ind (int) Index of the new feature
      * @param optimizer The optimizer to find the optimal parameters for the features
      */
     SubParamNode(const node_ptr feat_1, const node_ptr feat_2, const unsigned long int feat_ind, std::shared_ptr<NLOptimizer> optimizer);
@@ -89,9 +89,9 @@ public:
      *
      * @param feat_1 shared_ptr of the feature to operate on (A)
      * @param feat_2 shared_ptr of the feature to operate on (B)
-     * @param feat_ind Index of the new feature
-     * @param l_bound Minimum absolute value allowed for the feature.
-     * @param u_bound Maximum absolute value allowed for the feature.
+     * @param feat_ind (int) Index of the new feature
+     * @param l_bound (double) Minimum absolute value allowed for the feature.
+     * @param u_bound (double) Maximum absolute value allowed for the feature.
      */
     SubParamNode(const node_ptr feat_1, const node_ptr feat_2, const unsigned long int feat_ind, double const l_bound=1e-50, const double u_bound=1e50);
 
@@ -103,19 +103,19 @@ public:
 
     // DocString: sub_param_node_set_value
     /**
-     * @brief Set the values of the training data for the feature inside of the value storage arrays
+     * @brief Set the value of all training samples for the feature inside the central data storage array
      *
-     * @param offset(int) Key to determine which part of the temporary storage array to look into
-     * @param for_comp(bool) If true then the evaluation is for comparing features
+     * @param offset (int) Where the current node is in the binary expression tree relative to other nodes at the same depth
+     * @param for_comp (bool) If true then the evaluation is used for comparing features
      */
     inline void set_value(int offset=-1, const bool for_comp=false) const {set_value(_params.data(), offset, for_comp);}
 
     // DocString: sub_param_node_set_test_value
     /**
-     * @brief Set the values of the test data for the feature inside of the value storage arrays
+     * @brief Set the value of all test samples for the feature inside the central data storage array
      *
-     * @param offset(int) Key to determine which part of the temporary storage array to look into
-     * @param for_comp(bool) If true then the evaluation is for comparing features
+     * @param offset (int) Where the current node is in the binary expression tree relative to other nodes at the same depth
+     * @param for_comp (bool) If true then the evaluation is used for comparing features
      */
     inline void set_test_value(int offset=-1, const bool for_comp=false) const {set_test_value(_params.data(), offset, for_comp);}
 
@@ -124,7 +124,7 @@ public:
      * @details If the feature is not already stored in memory, then calculate the feature and return the pointer to the data
      *
      * @param offset(int) the integer value to offset the location in the temporary storage array
-     * @param for_comp(bool) If true then the evaluation is for comparing features
+     * @param for_comp (bool) If true then the evaluation is used for comparing features
      *
      * @return pointer to the feature's test values
      */
@@ -135,7 +135,7 @@ public:
      * @details If the feature is not already stored in memory, then calculate the feature and return the pointer to the data
      *
      * @param offset(int) the integer value to offset the location in the temporary storage array
-     * @param for_comp(bool) If true then the evaluation is for comparing features
+     * @param for_comp (bool) If true then the evaluation is used for comparing features
      *
      * @return pointer to the feature's test values
      */
@@ -148,13 +148,13 @@ public:
 
     // DocString: sub_param_node_expr
     /**
-     * @brief Get the expression for the overall feature (From root node down)
+     * @brief A human readable equation representing the feature
      */
     inline std::string expr() const {return expr(_params.data());}
 
     // DocString: sub_param_node_get_latex_expr
     /**
-     * @brief Get the expression for the overall feature (From root node down)
+     * @brief Get the valid LaTeX expression that represents the feature
      */
     inline std::string get_latex_expr() const {return get_latex_expr(_params.data());}
 
diff --git a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/sub/subtract.hpp b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/sub/subtract.hpp
index 4bee0cbcf27f196a1275fdaca94a0587bbc8f160..982c81d9dc32c3e37b4d719a6c76564d7ce320b8 100644
--- a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/sub/subtract.hpp
+++ b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/sub/subtract.hpp
@@ -44,9 +44,9 @@ public:
      * @brief Constructor
      * @details Constructs the Node from an array of features to operate on
      *
-     * @param feat_1 shared_ptr of the first feature to operate on (A)
-     * @param feat_2 shared_ptr of the second feature to operate on (B)
-     * @param feat_ind Index of the new feature
+     * @param feat_1 (Node) shared_ptr of the first feature to operate on (A)
+     * @param feat_2 (Node) shared_ptr of the second feature to operate on (B)
+     * @param feat_ind (int) Index of the new feature
      */
     SubNode(const node_ptr feat_1, const node_ptr feat_2, const unsigned long int feat_ind);
 
@@ -55,11 +55,11 @@ public:
      * @brief Constructor
      * @details Constructs the Node from node pointer of the features to operate on
      *
-     * @param feat_1 shared_ptr of the first feature to operate on (A)
-     * @param feat_2 shared_ptr of the second feature to operate on (B)
-     * @param feat_ind Index of the new feature
-     * @param l_bound Minimum absolute value allowed for the feature.
-     * @param u_bound Maximum absolute value allowed for the feature.
+     * @param feat_1 (Node) shared_ptr of the first feature to operate on (A)
+     * @param feat_2 (Node) shared_ptr of the second feature to operate on (B)
+     * @param feat_ind (int) Index of the new feature
+     * @param l_bound (double) Minimum absolute value allowed for the feature.
+     * @param u_bound (double) Maximum absolute value allowed for the feature.
      */
     SubNode(const node_ptr feat_1, const node_ptr feat_2, const unsigned long int feat_ind, const double l_bound, const double u_bound);
 
@@ -77,7 +77,7 @@ public:
 
     // DocString: sub_node_expr
     /**
-     * @brief Get the expression for the overall feature (From root node down)
+     * @brief A human readable equation representing the feature
      */
     inline std::string expr() const
     {
@@ -90,7 +90,7 @@ public:
 
     // DocString: sub_node_latex_expr
     /**
-     * @brief Get the expression for the overall feature (From root node down)
+     * @brief Get the valid LaTeX expression that represents the feature
      */
     inline std::string get_latex_expr() const
     {
@@ -103,27 +103,26 @@ public:
 
     // DocString: sub_node_set_value
     /**
-     * @brief Set the values of the training data for the feature inside of the value storage arrays
+     * @brief Set the value of all training samples for the feature inside the central data storage array
      *
-     * @param offset(int) Key to determine which part of the temporary storage array to look into
-     * @param for_comp(bool) If true then the evaluation is for comparing features
+     * @param offset (int) Where the current node is in the binary expression tree relative to other nodes at the same depth
+     * @param for_comp (bool) If true then the evaluation is used for comparing features
      */
     virtual void set_value(int offset=-1, const bool for_comp=false) const;
 
     // DocString: sub_node_set_test_value
     /**
-     * @brief Set the values of the test data for the feature inside of the value storage arrays
+     * @brief Set the value of all test samples for the feature inside the central data storage array
      *
-     * @param offset(int) Key to determine which part of the temporary storage array to look into
-     * @param for_comp(bool) If true then the evaluation is for comparing features
+     * @param offset (int) Where the current node is in the binary expression tree relative to other nodes at the same depth
+     * @param for_comp (bool) If true then the evaluation is used for comparing features
      */
     virtual void set_test_value(int offset=-1, const bool for_comp=false) const;
 
     // DocString: sub_node_rung
     /**
-     * @brief return the rung of the feature
+     * @brief return the rung of the feature (Height of the binary expression tree + 1)
      *
-     * @param cur_rung The rung current rung of the feature tree (used to recursively calculate rung)
      */
     inline int rung(const int cur_rung=0) const {return std::max(_feats[0]->rung(cur_rung + 1), _feats[1]->rung(cur_rung + 1));}
 
@@ -206,30 +205,30 @@ public:
     virtual void set_parameters(const double* params){return;}
 
     /**
-     * @brief Set the values of the training data for the feature inside of the value storage arrays
+     * @brief Set the value of all training samples for the feature inside the central data storage array
      *
-     * @param params pointer to the parameter values
-     * @param offset(int) Key to determine which part of the temporary storage array to look into
-     * @param for_comp(bool) If true then the evaluation is for comparing features
+     * @param params pointer to the non-linear parameter values
+     * @param offset (int) Where the current node is in the binary expression tree relative to other nodes at the same depth
+     * @param for_comp (bool) If true then the evaluation is used for comparing features
      * @param depth the max depth of paramterization (in the binary expression tree)
      */
     void set_value(const double* params, int offset=-1, const bool for_comp=false, const int depth=1) const;
 
     /**
-     * @brief Set the values of the test data for the feature inside of the value storage arrays
+     * @brief Set the value of all test samples for the feature inside the central data storage array
      *
-     * @param params pointer to the parameter values
-     * @param offset(int) Key to determine which part of the temporary storage array to look into
-     * @param for_comp(bool) If true then the evaluation is for comparing features
+     * @param params pointer to the non-linear parameter values
+     * @param offset (int) Where the current node is in the binary expression tree relative to other nodes at the same depth
+     * @param for_comp (bool) If true then the evaluation is used for comparing features
      * @param depth the max depth of paramterization (in the binary expression tree)
      */
     void set_test_value(const double* params, int offset=-1, const bool for_comp=false, const int depth=1) const;
 
     /**
-     * @brief The expression of the feature
+     * @brief A human readable equation representing the feature
      *
      * @param params parameter values for non-linear operations
-     * @param depth the current depth of the node on the Binary expression tree
+     * @param depth the current depth of the node on the binary expression tree
      * @return feature expression
      */
     inline std::string expr(const double* params, const int depth=1) const
@@ -243,10 +242,10 @@ public:
     }
 
     /**
-     * @brief Get the latexified expression for the overall feature (From root node down)
+     * @brief Get the valid LaTeX expression that represents the feature
      *
      * @param params parameter values for non-linear operations
-     * @param depth the current depth of the node on the Binary expression tree
+     * @param depth the current depth of the node on the binary expression tree
      * @return the latexified expression
      */
     inline std::string get_latex_expr(const double* params, const int depth=1) const
@@ -263,7 +262,7 @@ public:
      * @brief Get the string that corresponds to the code needed to evaluate the node in matlab
      *
      * @param params parameter values for non-linear operations
-     * @param depth the current depth of the node on the Binary expression tree
+     * @param depth the current depth of the node on the binary expression tree
      * @return The matlab code for the feature
      */
     inline std::string matlab_fxn_expr(const double* params, const int depth=1) const
@@ -282,7 +281,7 @@ public:
      * @param lb pointer to the lower bounds data
      * @param ub pointer to the upper bounds data
      * @param from_parent How many parameters are between the start of this node's parameters and its parent
-     * @param depth the current depth of the node on the Binary expression tree
+     * @param depth the current depth of the node on the binary expression tree
      */
     void set_bounds(double* lb, double* ub, const int depth=1) const;
 
@@ -290,7 +289,7 @@ public:
      * @brief Set the bounds for the nl parameterization
      *
      * @param params pointer to the parameters vector
-     * @param depth the current depth of the node on the Binary expression tree
+     * @param depth the current depth of the node on the binary expression tree
      */
     void initialize_params(double* params, const int depth = 1) const;
 
diff --git a/src/python/bindings_docstring_keyed.cpp b/src/python/bindings_docstring_keyed.cpp
index e0dba2a75344513f929a491f27b476693f220035..67754695a521fe4dec42bae8392d629313592a77 100644
--- a/src/python/bindings_docstring_keyed.cpp
+++ b/src/python/bindings_docstring_keyed.cpp
@@ -341,9 +341,9 @@ void sisso::feature_creation::registerUnit()
         .def(init<std::map<std::string, double>>((arg("self"), arg("dct")), "@DocString_unit_init_dict@"))
         .def(init<std::string>((arg("self"), arg("unit_str")), "@DocString_unit_init_str@"))
         .def(init<Unit&>((arg("self"), arg("o")), "@DocString_unit_init_unit@"))
-        .def("__str__", &Unit::toString, "@DocString_unit_str@")
-        .def("__repr__", &Unit::toString, "@DocString_unit_str@")
-        .def("inverse", &Unit::inverse, "@DocString_unit_inverse@")
+        .def("__str__", &Unit::toString, (arg("self")), "@DocString_unit_str@")
+        .def("__repr__", &Unit::toString, (arg("self")), "@DocString_unit_str@")
+        .def("inverse", &Unit::inverse, (arg("self")), "@DocString_unit_inverse@")
         .def(self * self)
         .def(self / self)
         .def(self *= self)
@@ -385,8 +385,8 @@ void sisso::feature_creation::registerUnit()
             .add_property("n_test_samp", &Node::n_test_samp, "@DocString_node_n_test_samp@")
             .add_property("feat_ind", &Node::feat_ind, "@DocString_node_feat_ind@")
             .add_property("arr_ind", &Node::arr_ind, "@DocString_node_arr_ind@")
-            .add_property("selected", &Node::selected, &Node::set_selected, "@DocString_node_set_selected@")
-            .add_property("d_mat_ind", &Node::d_mat_ind, &Node::set_d_mat_ind, "@DocString_node_set_d_mat_ind@")
+            .add_property("selected", &Node::selected, &Node::set_selected, "@DocString_node_selected@")
+            .add_property("d_mat_ind", &Node::d_mat_ind, &Node::set_d_mat_ind, "@DocString_node_d_mat_ind@")
             .add_property("value", &Node::value_py, "@DocString_node_value_py@")
             .add_property("test_value", &Node::test_value_py, "@DocString_node_test_value_py@")
             .add_property("primary_feat_decomp", &Node::primary_feature_decomp_py, "@DocString_node_primary_feature_decomp@")
@@ -399,10 +399,10 @@ void sisso::feature_creation::registerUnit()
             .def("is_nan", pure_virtual(&Node::is_nan), (arg("self")), "@DocString_node_is_nan@")
             .def("is_const", pure_virtual(&Node::is_const), (arg("self")), "@DocString_node_is_const@")
             .def("rung", pure_virtual(&Node::rung), (arg("self"), arg("cur_rung")), "@DocString_node_rung@")
-            .def("n_feats", pure_virtual(&Node::n_feats), "@DocString_node_n_feats@")
+            .def("n_feats", pure_virtual(&Node::n_feats), (arg("self")), "@DocString_node_n_feats@")
             .def("feat", pure_virtual(&Node::feat), (arg("self"), arg("feat_ind")), "@DocString_node_feat@")
             .def("n_leaves", pure_virtual(&Node::n_leaves), (arg("self"), arg("cur_n_leaves")), "@DocString_node_n_leaves@")
-            .def("x_in_expr_list", pure_virtual(&Node::get_x_in_expr_list), "@DocString_node_x_in_expr@")
+            .def("x_in_expr_list", pure_virtual(&Node::get_x_in_expr_list), (arg("self")), "@DocString_node_x_in_expr@")
         ;
     }
 #else
@@ -424,14 +424,14 @@ void sisso::feature_creation::registerUnit()
             .add_property("latex_expr", &Node::latex_expr, "@DocString_node_latex_expr@")
             .def("reindex", reindex_1, (arg("self"), arg("feat_ind")), "@DocString_node_reindex_1@")
             .def("reindex", reindex_2, (arg("self"), arg("feat_ind"), arg("arr_ind")), "@DocString_node_reindex_2@")
-            .def("unit", pure_virtual(&Node::unit), "@DocString_node_unit@")
-            .def("is_nan", pure_virtual(&Node::is_nan), "@DocString_node_is_nan@")
-            .def("is_const", pure_virtual(&Node::is_const), "@DocString_node_is_const@")
-            .def("rung", pure_virtual(&Node::rung), "@DocString_node_rung@")
-            .def("n_feats", pure_virtual(&Node::n_feats), "@DocString_node_n_feats@")
+            .def("unit", pure_virtual(&Node::unit), (arg("self")), "@DocString_node_unit@")
+            .def("is_nan", pure_virtual(&Node::is_nan), (arg("self")), "@DocString_node_is_nan@")
+            .def("is_const", pure_virtual(&Node::is_const), (arg("self")), "@DocString_node_is_const@")
+            .def("rung", pure_virtual(&Node::rung), (arg("self"), arg("cur_rung")), "@DocString_node_rung@")
+            .def("n_feats", pure_virtual(&Node::n_feats), (arg("self")), "@DocString_node_n_feats@")
             .def("feat", pure_virtual(&Node::feat), (arg("self"), arg("feat_ind")), "@DocString_node_feat@")
             .def("n_leaves", pure_virtual(&Node::n_leaves), (arg("self"), arg("cur_n_leaves")), "@DocString_node_n_leaves@")
-            .def("x_in_expr_list", pure_virtual(&Node::get_x_in_expr_list), "@DocString_node_x_in_expr@")
+            .def("x_in_expr_list", pure_virtual(&Node::get_x_in_expr_list), (arg("self")), "@DocString_node_x_in_expr@")
         ;
     }
 #endif
@@ -443,16 +443,27 @@ void sisso::feature_creation::node::registerFeatureNode()
     std::string (FeatureNode::*expr_no_param)() const = &FeatureNode::expr;
 
     using namespace boost::python;
-    class_<FeatureNode, bases<Node>>("FeatureNode", init<int, std::string, np::ndarray, np::ndarray, Unit>( ))
-        .def(init<int, std::string, py::list, py::list, Unit>())
-        .def("is_nan", &FeatureNode::is_nan, "@DocString_feat_node_is_nan@")
-        .def("is_const", &FeatureNode::is_const, "@DocString_feat_node_is_const@")
+    class_<FeatureNode, bases<Node>>(
+        "FeatureNode",
+        init<int, std::string, np::ndarray, np::ndarray, Unit>(
+            (arg("self"), arg("feat_ind"), arg("expr"), arg("value"), arg("test_value"), arg("unit")),
+            "@DocString_feat_node_init_arr@"
+        )
+    )
+        .def(
+            init<int, std::string, py::list, py::list, Unit>(
+                (arg("self"), arg("feat_ind"), arg("expr"), arg("value"), arg("test_value"), arg("unit")),
+                "@DocString_feat_node_init_list@"
+            )
+        )
+        .def("is_nan", &FeatureNode::is_nan, (arg("self")), "@DocString_feat_node_is_nan@")
+        .def("is_const", &FeatureNode::is_const, (arg("self")), "@DocString_feat_node_is_const@")
         .def("set_value", set_value_no_param, (arg("self"), arg("offset"), arg("for_comp")), "@DocString_feat_node_set_value@")
         .def("set_test_value", set_test_value_no_param, (arg("self"), arg("offset"), arg("for_comp")), "@DocString_feat_node_set_test_value@")
         .add_property("expr", expr_no_param, "@DocString_feat_node_expr_const@")
         .add_property("unit", &FeatureNode::unit, "@DocString_feat_node_unit@")
         .add_property("rung", &FeatureNode::rung, "@DocString_feat_node_rung@")
-        .add_property("n_leaves", &FeatureNode::n_leaves, "@DocString_node_n_leaves@")
+        .add_property("n_leaves", &FeatureNode::n_leaves, "@DocString_feat_node_n_leaves@")
         .add_property("x_in_expr_list", &FeatureNode::get_x_in_expr_list, "@DocString_node_x_in_expr@")
     ;
 }
@@ -467,10 +478,22 @@ void sisso::feature_creation::node::registerModelNode()
     double (ModelNode::*eval_dict)(py::dict) = &ModelNode::eval_py;
 
     using namespace boost::python;
-    class_<ModelNode, bases<FeatureNode>>("ModelNode", init<int, int, std::string, std::string, std::string, std::string, std::vector<double>, std::vector<double>, std::vector<std::string>, Unit>((arg("self"), arg("feat_ind"), arg("rung"), arg("expr"), arg("latex_expr"), arg("expr_postfix"), arg("matlab_fxn_expr"), arg("value"), arg("test_value"), arg("x_in_expr_list"), arg("unit")), "@DocString_model_node_init@"))
+    class_<ModelNode, bases<FeatureNode>>(
+        "ModelNode",
+        init<int, int, std::string, std::string, std::string, std::string, py::list, py::list, py::list, Unit>(
+            (arg("self"), arg("feat_ind"), arg("rung"), arg("expr"), arg("latex_expr"), arg("expr_postfix"), arg("matlab_fxn_expr"), arg("value"), arg("test_value"), arg("x_in_expr_list"), arg("unit")),
+            "@DocString_model_node_init_list@"
+        )
+    )
+        .def(
+            init<int, int, std::string, std::string, std::string, std::string, np::ndarray, np::ndarray, py::list, Unit>(
+                (arg("self"), arg("feat_ind"), arg("rung"), arg("expr"), arg("latex_expr"), arg("expr_postfix"), arg("matlab_fxn_expr"), arg("value"), arg("test_value"), arg("x_in_expr_list"), arg("unit")),
+                "@DocString_model_node_init_arr@"
+            )
+        )
         .def(init<node_ptr>((arg("self"), arg("in_node")), "@DocString_model_node_init_node_ptr@"))
-        .def("is_nan", &ModelNode::is_nan, "@DocString_model_node_is_nan@")
-        .def("is_const", &ModelNode::is_const, "@DocString_model_node_is_const@")
+        .def("is_nan", &ModelNode::is_nan, (arg("self")), "@DocString_model_node_is_nan@")
+        .def("is_const", &ModelNode::is_const, (arg("self")), "@DocString_model_node_is_const@")
         .def("set_value", &ModelNode::set_value, (arg("self"), arg("offset"), arg("for_comp")), "@DocString_model_node_set_value@")
         .def("set_test_value", &ModelNode::set_test_value, (arg("self"), arg("offset"), arg("for_comp")), "@DocString_model_node_set_test_value@")
         .def("eval_many", eval_many_dict, (arg("self"), arg("x_in")), "@DocString_model_node_eval_many_dict@")
@@ -490,12 +513,19 @@ void sisso::feature_creation::node::registerAddNode()
     void (AddNode::*set_test_value_no_param)(int, bool) const = &AddNode::set_test_value;
     std::string (AddNode::*expr_no_param)() const = &AddNode::expr;
 
-    class_<AddNode, bases<OperatorNode<2>>>("AddNode", init<node_ptr, node_ptr, int, double, double>((arg("self"), arg("feat_1"), arg("feat_2"), arg("feat_ind"), arg("min_abs_feat_val"), arg("max_abs_feat_val")), "@DocString_add_node_init@"))
+    class_<AddNode, bases<OperatorNode<2>>>(
+        "AddNode",
+        init<node_ptr, node_ptr, int, double, double>(
+            (arg("self"), arg("feat_1"), arg("feat_2"), arg("feat_ind"), arg("min_abs_feat_val"), arg("max_abs_feat_val")),
+            "@DocString_add_node_init@"
+        )
+    )
         .def("set_value", set_value_no_param, (arg("self"), arg("offset"), arg("for_comp")), "@DocString_add_node_set_value@")
-        .def("set_test_value", set_test_value_no_param, "@DocString_add_node_set_test_value@")
+        .def("set_test_value", set_test_value_no_param, (arg("self"), arg("offset"), arg("for_comp")), "@DocString_add_node_set_test_value@")
         .add_property("expr", expr_no_param, "@DocString_add_node_expr@")
         .add_property("unit", &AddNode::unit, "@DocString_add_node_unit@")
         .add_property("rung", &AddNode::rung, "@DocString_add_node_rung@")
+
     ;
 }
 
@@ -505,9 +535,15 @@ void sisso::feature_creation::node::registerSubNode()
     void (SubNode::*set_test_value_no_param)(int, bool) const = &SubNode::set_test_value;
     std::string (SubNode::*expr_no_param)() const = &SubNode::expr;
 
-    class_<SubNode, bases<OperatorNode<2>>>("SubNode", init<node_ptr, node_ptr, int, double, double>((arg("self"), arg("feat_1"), arg("feat_2"), arg("feat_ind"), arg("min_abs_feat_val"), arg("max_abs_feat_val")), "@DocString_sub_node_init@"))
+    class_<SubNode, bases<OperatorNode<2>>>(
+        "SubNode",
+        init<node_ptr, node_ptr, int, double, double>(
+            (arg("self"), arg("feat_1"), arg("feat_2"), arg("feat_ind"), arg("min_abs_feat_val"), arg("max_abs_feat_val")),
+            "@DocString_sub_node_init@"
+        )
+    )
         .def("set_value", set_value_no_param, (arg("self"), arg("offset"), arg("for_comp")), "@DocString_sub_node_set_value@")
-        .def("set_test_value", set_test_value_no_param, "@DocString_sub_node_set_test_value@")
+        .def("set_test_value", set_test_value_no_param, (arg("self"), arg("offset"), arg("for_comp")), "@DocString_sub_node_set_test_value@")
         .add_property("expr", expr_no_param, "@DocString_sub_node_expr@")
         .add_property("unit", &SubNode::unit, "@DocString_sub_node_unit@")
         .add_property("rung", &SubNode::rung, "@DocString_sub_node_rung@")
@@ -520,9 +556,15 @@ void sisso::feature_creation::node::registerDivNode()
     void (DivNode::*set_test_value_no_param)(int, bool) const = &DivNode::set_test_value;
     std::string (DivNode::*expr_no_param)() const = &DivNode::expr;
 
-    class_<DivNode, bases<OperatorNode<2>>>("DivNode", init<node_ptr, node_ptr, int, double, double>((arg("self"), arg("feat_1"), arg("feat_2"), arg("feat_ind"), arg("min_abs_feat_val"), arg("max_abs_feat_val")), "@DocString_div_node_init@"))
+    class_<DivNode, bases<OperatorNode<2>>>(
+        "DivNode",
+        init<node_ptr, node_ptr, int, double, double>(
+            (arg("self"), arg("feat_1"), arg("feat_2"), arg("feat_ind"), arg("min_abs_feat_val"), arg("max_abs_feat_val")),
+            "@DocString_div_node_init@"
+        )
+    )
         .def("set_value", set_value_no_param, (arg("self"), arg("offset"), arg("for_comp")), "@DocString_div_node_set_value@")
-        .def("set_test_value", set_test_value_no_param, "@DocString_div_node_set_test_value@")
+        .def("set_test_value", set_test_value_no_param, (arg("self"), arg("offset"), arg("for_comp")), "@DocString_div_node_set_test_value@")
         .add_property("expr", expr_no_param, "@DocString_div_node_expr@")
         .add_property("unit", &DivNode::unit, "@DocString_div_node_unit@")
         .add_property("rung", &DivNode::rung, "@DocString_div_node_rung@")
@@ -535,9 +577,15 @@ void sisso::feature_creation::node::registerMultNode()
     void (MultNode::*set_test_value_no_param)(int, bool) const = &MultNode::set_test_value;
     std::string (MultNode::*expr_no_param)() const = &MultNode::expr;
 
-    class_<MultNode, bases<OperatorNode<2>>>("MultNode", init<node_ptr, node_ptr, int, double, double>((arg("self"), arg("feat_1"), arg("feat_2"), arg("feat_ind"), arg("min_abs_feat_val"), arg("max_abs_feat_val")), "@DocString_mult_node_init@"))
+    class_<MultNode, bases<OperatorNode<2>>>(
+        "MultNode",
+        init<node_ptr, node_ptr, int, double, double>(
+            (arg("self"), arg("feat_1"), arg("feat_2"), arg("feat_ind"), arg("min_abs_feat_val"), arg("max_abs_feat_val")),
+            "@DocString_mult_node_init@"
+        )
+    )
         .def("set_value", set_value_no_param, (arg("self"), arg("offset"), arg("for_comp")), "@DocString_mult_node_set_value@")
-        .def("set_test_value", set_test_value_no_param, "@DocString_mult_node_set_test_value@")
+        .def("set_test_value", set_test_value_no_param, (arg("self"), arg("offset"), arg("for_comp")), "@DocString_mult_node_set_test_value@")
         .add_property("expr", expr_no_param, "@DocString_mult_node_expr@")
         .add_property("unit", &MultNode::unit, "@DocString_mult_node_unit@")
         .add_property("rung", &MultNode::rung, "@DocString_mult_node_rung@")
@@ -550,9 +598,15 @@ void sisso::feature_creation::node::registerAbsDiffNode()
     void (AbsDiffNode::*set_test_value_no_param)(int, bool) const = &AbsDiffNode::set_test_value;
     std::string (AbsDiffNode::*expr_no_param)() const = &AbsDiffNode::expr;
 
-    class_<AbsDiffNode, bases<OperatorNode<2>>>("AbsDiffNode", init<node_ptr, node_ptr, int, double, double>((arg("self"), arg("feat_1"), arg("feat_2"), arg("feat_ind"), arg("min_abs_feat_val"), arg("max_abs_feat_val")), "@DocString_abs_diff_node_init@"))
+    class_<AbsDiffNode, bases<OperatorNode<2>>>(
+        "AbsDiffNode",
+        init<node_ptr, node_ptr, int, double, double>(
+            (arg("self"), arg("feat_1"), arg("feat_2"), arg("feat_ind"), arg("min_abs_feat_val"), arg("max_abs_feat_val")),
+            "@DocString_abs_diff_node_init@"
+        )
+    )
         .def("set_value", set_value_no_param, (arg("self"), arg("offset"), arg("for_comp")), "@DocString_abs_diff_node_set_value@")
-        .def("set_test_value", set_test_value_no_param, "@DocString_abs_diff_node_set_test_value@")
+        .def("set_test_value", set_test_value_no_param, (arg("self"), arg("offset"), arg("for_comp")), "@DocString_abs_diff_node_set_test_value@")
         .add_property("expr", expr_no_param, "@DocString_abs_diff_node_expr@")
         .add_property("unit", &AbsDiffNode::unit, "@DocString_abs_diff_node_unit@")
         .add_property("rung", &AbsDiffNode::rung, "@DocString_abs_diff_node_rung@")
@@ -565,9 +619,15 @@ void sisso::feature_creation::node::registerAbsNode()
     void (AbsNode::*set_test_value_no_param)(int, bool) const = &AbsNode::set_test_value;
     std::string (AbsNode::*expr_no_param)() const = &AbsNode::expr;
 
-    class_<AbsNode, bases<OperatorNode<1>>>("AbsNode", init<node_ptr, int, double, double>((arg("self"), arg("feat"), arg("feat_ind"), arg("min_abs_feat_val"), arg("max_abs_feat_val")), "@DocString_abs_node_init@"))
+    class_<AbsNode, bases<OperatorNode<1>>>(
+        "AbsNode",
+        init<node_ptr, int, double, double>(
+            (arg("self"), arg("feat"), arg("feat_ind"), arg("min_abs_feat_val"), arg("max_abs_feat_val")),
+            "@DocString_abs_node_init@"
+        )
+    )
         .def("set_value", set_value_no_param, (arg("self"), arg("offset"), arg("for_comp")), "@DocString_abs_node_set_value@")
-        .def("set_test_value", set_test_value_no_param, "@DocString_abs_node_set_test_value@")
+        .def("set_test_value", set_test_value_no_param, (arg("self"), arg("offset"), arg("for_comp")), "@DocString_abs_node_set_test_value@")
         .add_property("expr", expr_no_param, "@DocString_abs_node_expr@")
         .add_property("unit", &AbsNode::unit, "@DocString_abs_node_unit@")
         .add_property("rung", &AbsNode::rung, "@DocString_abs_node_rung@")
@@ -580,9 +640,15 @@ void sisso::feature_creation::node::registerInvNode()
     void (InvNode::*set_test_value_no_param)(int, bool) const = &InvNode::set_test_value;
     std::string (InvNode::*expr_no_param)() const = &InvNode::expr;
 
-    class_<InvNode, bases<OperatorNode<1>>>("InvNode", init<node_ptr, int, double, double>((arg("self"), arg("feat"), arg("feat_ind"), arg("min_abs_feat_val"), arg("max_abs_feat_val")), "@DocString_inv_node_init@"))
+    class_<InvNode, bases<OperatorNode<1>>>(
+        "InvNode",
+        init<node_ptr, int, double, double>(
+            (arg("self"), arg("feat"), arg("feat_ind"), arg("min_abs_feat_val"), arg("max_abs_feat_val")),
+            "@DocString_inv_node_init@"
+        )
+    )
         .def("set_value", set_value_no_param, (arg("self"), arg("offset"), arg("for_comp")), "@DocString_inv_node_set_value@")
-        .def("set_test_value", set_test_value_no_param, "@DocString_inv_node_set_test_value@")
+        .def("set_test_value", set_test_value_no_param, (arg("self"), arg("offset"), arg("for_comp")), "@DocString_inv_node_set_test_value@")
         .add_property("expr", expr_no_param, "@DocString_inv_node_expr@")
         .add_property("unit", &InvNode::unit, "@DocString_inv_node_unit@")
         .add_property("rung", &InvNode::rung, "@DocString_inv_node_rung@")
@@ -595,9 +661,15 @@ void sisso::feature_creation::node::registerLogNode()
     void (LogNode::*set_test_value_no_param)(int, bool) const = &LogNode::set_test_value;
     std::string (LogNode::*expr_no_param)() const = &LogNode::expr;
 
-    class_<LogNode, bases<OperatorNode<1>>>("LogNode", init<node_ptr, int, double, double>((arg("self"), arg("feat"), arg("feat_ind"), arg("min_abs_feat_val"), arg("max_abs_feat_val")), "@DocString_log_node_init@"))
+    class_<LogNode, bases<OperatorNode<1>>>(
+        "LogNode",
+        init<node_ptr, int, double, double>(
+            (arg("self"), arg("feat"), arg("feat_ind"), arg("min_abs_feat_val"), arg("max_abs_feat_val")),
+            "@DocString_log_node_init@"
+        )
+    )
         .def("set_value", set_value_no_param, (arg("self"), arg("offset"), arg("for_comp")), "@DocString_log_node_set_value@")
-        .def("set_test_value", set_test_value_no_param, "@DocString_log_node_set_test_value@")
+        .def("set_test_value", set_test_value_no_param, (arg("self"), arg("offset"), arg("for_comp")), "@DocString_log_node_set_test_value@")
         .add_property("expr", expr_no_param, "@DocString_log_node_expr@")
         .add_property("unit", &LogNode::unit, "@DocString_log_node_unit@")
         .add_property("rung", &LogNode::rung, "@DocString_log_node_rung@")
@@ -610,9 +682,15 @@ void sisso::feature_creation::node::registerExpNode()
     void (ExpNode::*set_test_value_no_param)(int, bool) const = &ExpNode::set_test_value;
     std::string (ExpNode::*expr_no_param)() const = &ExpNode::expr;
 
-    class_<ExpNode, bases<OperatorNode<1>>>("ExpNode", init<node_ptr, int, double, double>((arg("self"), arg("feat"), arg("feat_ind"), arg("min_abs_feat_val"), arg("max_abs_feat_val")), "@DocString_exp_node_init@"))
+    class_<ExpNode, bases<OperatorNode<1>>>(
+        "ExpNode",
+        init<node_ptr, int, double, double>(
+            (arg("self"), arg("feat"), arg("feat_ind"), arg("min_abs_feat_val"), arg("max_abs_feat_val")),
+            "@DocString_exp_node_init@"
+        )
+    )
         .def("set_value", set_value_no_param, (arg("self"), arg("offset"), arg("for_comp")), "@DocString_exp_node_set_value@")
-        .def("set_test_value", set_test_value_no_param, "@DocString_exp_node_set_test_value@")
+        .def("set_test_value", set_test_value_no_param, (arg("self"), arg("offset"), arg("for_comp")), "@DocString_exp_node_set_test_value@")
         .add_property("expr", expr_no_param, "@DocString_exp_node_expr@")
         .add_property("unit", &ExpNode::unit, "@DocString_exp_node_unit@")
         .add_property("rung", &ExpNode::rung, "@DocString_exp_node_rung@")
@@ -625,9 +703,15 @@ void sisso::feature_creation::node::registerNegExpNode()
     void (NegExpNode::*set_test_value_no_param)(int, bool) const = &NegExpNode::set_test_value;
     std::string (NegExpNode::*expr_no_param)() const = &NegExpNode::expr;
 
-    class_<NegExpNode, bases<OperatorNode<1>>>("NegExpNode", init<node_ptr, int, double, double>((arg("self"), arg("feat"), arg("feat_ind"), arg("min_abs_feat_val"), arg("max_abs_feat_val")), "@DocString_neg_exp_node_init@"))
+    class_<NegExpNode, bases<OperatorNode<1>>>(
+        "NegExpNode",
+        init<node_ptr, int, double, double>(
+            (arg("self"), arg("feat"), arg("feat_ind"), arg("min_abs_feat_val"), arg("max_abs_feat_val")),
+            "@DocString_neg_exp_node_init@"
+        )
+    )
         .def("set_value", set_value_no_param, (arg("self"), arg("offset"), arg("for_comp")), "@DocString_neg_exp_node_set_value@")
-        .def("set_test_value", set_test_value_no_param, "@DocString_neg_exp_node_set_test_value@")
+        .def("set_test_value", set_test_value_no_param, (arg("self"), arg("offset"), arg("for_comp")), "@DocString_neg_exp_node_set_test_value@")
         .add_property("expr", expr_no_param, "@DocString_neg_exp_node_expr@")
         .add_property("unit", &NegExpNode::unit, "@DocString_neg_exp_node_unit@")
         .add_property("rung", &NegExpNode::rung, "@DocString_neg_exp_node_rung@")
@@ -640,9 +724,15 @@ void sisso::feature_creation::node::registerSinNode()
     void (SinNode::*set_test_value_no_param)(int, bool) const = &SinNode::set_test_value;
     std::string (SinNode::*expr_no_param)() const = &SinNode::expr;
 
-    class_<SinNode, bases<OperatorNode<1>>>("SinNode", init<node_ptr, int, double, double>((arg("self"), arg("feat"), arg("feat_ind"), arg("min_abs_feat_val"), arg("max_abs_feat_val")), "@DocString_sin_node_init@"))
+    class_<SinNode, bases<OperatorNode<1>>>(
+        "SinNode",
+        init<node_ptr, int, double, double>(
+            (arg("self"), arg("feat"), arg("feat_ind"), arg("min_abs_feat_val"), arg("max_abs_feat_val")),
+            "@DocString_sin_node_init@"
+        )
+    )
         .def("set_value", set_value_no_param, (arg("self"), arg("offset"), arg("for_comp")), "@DocString_sin_node_set_value@")
-        .def("set_test_value", set_test_value_no_param, "@DocString_sin_node_set_test_value@")
+        .def("set_test_value", set_test_value_no_param, (arg("self"), arg("offset"), arg("for_comp")), "@DocString_sin_node_set_test_value@")
         .add_property("expr", expr_no_param, "@DocString_sin_node_expr@")
         .add_property("unit", &SinNode::unit, "@DocString_sin_node_unit@")
         .add_property("rung", &SinNode::rung, "@DocString_sin_node_rung@")
@@ -655,9 +745,15 @@ void sisso::feature_creation::node::registerCosNode()
     void (CosNode::*set_test_value_no_param)(int, bool) const = &CosNode::set_test_value;
     std::string (CosNode::*expr_no_param)() const = &CosNode::expr;
 
-    class_<CosNode, bases<OperatorNode<1>>>("CosNode", init<node_ptr, int, double, double>((arg("self"), arg("feat"), arg("feat_ind"), arg("min_abs_feat_val"), arg("max_abs_feat_val")), "@DocString_cos_node_init@"))
+    class_<CosNode, bases<OperatorNode<1>>>(
+        "CosNode",
+        init<node_ptr, int, double, double>(
+            (arg("self"), arg("feat"), arg("feat_ind"), arg("min_abs_feat_val"), arg("max_abs_feat_val")),
+            "@DocString_cos_node_init@"
+        )
+    )
         .def("set_value", set_value_no_param, (arg("self"), arg("offset"), arg("for_comp")), "@DocString_cos_node_set_value@")
-        .def("set_test_value", set_test_value_no_param, "@DocString_cos_node_set_test_value@")
+        .def("set_test_value", set_test_value_no_param, (arg("self"), arg("offset"), arg("for_comp")), "@DocString_cos_node_set_test_value@")
         .add_property("expr", expr_no_param, "@DocString_cos_node_expr@")
         .add_property("unit", &CosNode::unit, "@DocString_cos_node_unit@")
         .add_property("rung", &CosNode::rung, "@DocString_cos_node_rung@")
@@ -670,9 +766,15 @@ void sisso::feature_creation::node::registerCbNode()
     void (CbNode::*set_test_value_no_param)(int, bool) const = &CbNode::set_test_value;
     std::string (CbNode::*expr_no_param)() const = &CbNode::expr;
 
-    class_<CbNode, bases<OperatorNode<1>>>("CbNode", init<node_ptr, int, double, double>((arg("self"), arg("feat"), arg("feat_ind"), arg("min_abs_feat_val"), arg("max_abs_feat_val")), "@DocString_cb_node_init@"))
+    class_<CbNode, bases<OperatorNode<1>>>(
+        "CbNode",
+        init<node_ptr, int, double, double>(
+            (arg("self"), arg("feat"), arg("feat_ind"), arg("min_abs_feat_val"), arg("max_abs_feat_val")),
+            "@DocString_cb_node_init@"
+        )
+    )
         .def("set_value", set_value_no_param, (arg("self"), arg("offset"), arg("for_comp")), "@DocString_cb_node_set_value@")
-        .def("set_test_value", set_test_value_no_param, "@DocString_cb_node_set_test_value@")
+        .def("set_test_value", set_test_value_no_param, (arg("self"), arg("offset"), arg("for_comp")), "@DocString_cb_node_set_test_value@")
         .add_property("expr", expr_no_param, "@DocString_cb_node_expr@")
         .add_property("unit", &CbNode::unit, "@DocString_cb_node_unit@")
         .add_property("rung", &CbNode::rung, "@DocString_cb_node_rung@")
@@ -685,9 +787,15 @@ void sisso::feature_creation::node::registerCbrtNode()
     void (CbrtNode::*set_test_value_no_param)(int, bool) const = &CbrtNode::set_test_value;
     std::string (CbrtNode::*expr_no_param)() const = &CbrtNode::expr;
 
-    class_<CbrtNode, bases<OperatorNode<1>>>("CbrtNode", init<node_ptr, int, double, double>((arg("self"), arg("feat"), arg("feat_ind"), arg("min_abs_feat_val"), arg("max_abs_feat_val")), "@DocString_cbrt_node_init@"))
+    class_<CbrtNode, bases<OperatorNode<1>>>(
+        "CbrtNode",
+        init<node_ptr, int, double, double>(
+            (arg("self"), arg("feat"), arg("feat_ind"), arg("min_abs_feat_val"), arg("max_abs_feat_val")),
+            "@DocString_cbrt_node_init@"
+        )
+    )
         .def("set_value", set_value_no_param, (arg("self"), arg("offset"), arg("for_comp")), "@DocString_cbrt_node_set_value@")
-        .def("set_test_value", set_test_value_no_param, "@DocString_cbrt_node_set_test_value@")
+        .def("set_test_value", set_test_value_no_param, (arg("self"), arg("offset"), arg("for_comp")), "@DocString_cbrt_node_set_test_value@")
         .add_property("expr", expr_no_param, "@DocString_cbrt_node_expr@")
         .add_property("unit", &CbrtNode::unit, "@DocString_cbrt_node_unit@")
         .add_property("rung", &CbrtNode::rung, "@DocString_cbrt_node_rung@")
@@ -700,9 +808,15 @@ void sisso::feature_creation::node::registerSqNode()
     void (SqNode::*set_test_value_no_param)(int, bool) const = &SqNode::set_test_value;
     std::string (SqNode::*expr_no_param)() const = &SqNode::expr;
 
-    class_<SqNode, bases<OperatorNode<1>>>("SqNode", init<node_ptr, int, double, double>((arg("self"), arg("feat"), arg("feat_ind"), arg("min_abs_feat_val"), arg("max_abs_feat_val")), "@DocString_sq_node_init@"))
+    class_<SqNode, bases<OperatorNode<1>>>(
+        "SqNode",
+        init<node_ptr, int, double, double>(
+            (arg("self"), arg("feat"), arg("feat_ind"), arg("min_abs_feat_val"), arg("max_abs_feat_val")),
+            "@DocString_sq_node_init@"
+        )
+    )
         .def("set_value", set_value_no_param, (arg("self"), arg("offset"), arg("for_comp")), "@DocString_sq_node_set_value@")
-        .def("set_test_value", set_test_value_no_param, "@DocString_sq_node_set_test_value@")
+        .def("set_test_value", set_test_value_no_param, (arg("self"), arg("offset"), arg("for_comp")), "@DocString_sq_node_set_test_value@")
         .add_property("expr", expr_no_param, "@DocString_sq_node_expr@")
         .add_property("unit", &SqNode::unit, "@DocString_sq_node_unit@")
         .add_property("rung", &SqNode::rung, "@DocString_sq_node_rung@")
@@ -715,9 +829,15 @@ void sisso::feature_creation::node::registerSqrtNode()
     void (SqrtNode::*set_test_value_no_param)(int, bool) const = &SqrtNode::set_test_value;
     std::string (SqrtNode::*expr_no_param)() const = &SqrtNode::expr;
 
-    class_<SqrtNode, bases<OperatorNode<1>>>("SqrtNode", init<node_ptr, int, double, double>((arg("self"), arg("feat"), arg("feat_ind"), arg("min_abs_feat_val"), arg("max_abs_feat_val")), "@DocString_sqrt_node_init@"))
+    class_<SqrtNode, bases<OperatorNode<1>>>(
+        "SqrtNode",
+        init<node_ptr, int, double, double>(
+            (arg("self"), arg("feat"), arg("feat_ind"), arg("min_abs_feat_val"), arg("max_abs_feat_val")),
+            "@DocString_sqrt_node_init@"
+        )
+    )
         .def("set_value", set_value_no_param, (arg("self"), arg("offset"), arg("for_comp")), "@DocString_sqrt_node_set_value@")
-        .def("set_test_value", set_test_value_no_param, "@DocString_sqrt_node_set_test_value@")
+        .def("set_test_value", set_test_value_no_param, (arg("self"), arg("offset"), arg("for_comp")), "@DocString_sqrt_node_set_test_value@")
         .add_property("expr", expr_no_param, "@DocString_sqrt_node_expr@")
         .add_property("unit", &SqrtNode::unit, "@DocString_sqrt_node_unit@")
         .add_property("rung", &SqrtNode::rung, "@DocString_sqrt_node_rung@")
@@ -730,9 +850,15 @@ void sisso::feature_creation::node::registerSixPowNode()
     void (SixPowNode::*set_test_value_no_param)(int, bool) const = &SixPowNode::set_test_value;
     std::string (SixPowNode::*expr_no_param)() const = &SixPowNode::expr;
 
-    class_<SixPowNode, bases<OperatorNode<1>>>("SixPowNode", init<node_ptr, int, double, double>((arg("self"), arg("feat"), arg("feat_ind"), arg("min_abs_feat_val"), arg("max_abs_feat_val")), "@DocString_six_pow_node_init@"))
+    class_<SixPowNode, bases<OperatorNode<1>>>(
+        "SixPowNode",
+        init<node_ptr, int, double, double>(
+            (arg("self"), arg("feat"), arg("feat_ind"), arg("min_abs_feat_val"), arg("max_abs_feat_val")),
+            "@DocString_six_pow_node_init@"
+        )
+    )
         .def("set_value", set_value_no_param, (arg("self"), arg("offset"), arg("for_comp")), "@DocString_six_pow_node_set_value@")
-        .def("set_test_value", set_test_value_no_param, "@DocString_six_pow_node_set_test_value@")
+        .def("set_test_value", set_test_value_no_param, (arg("self"), arg("offset"), arg("for_comp")), "@DocString_six_pow_node_set_test_value@")
         .add_property("expr", expr_no_param, "@DocString_six_pow_node_expr@")
         .add_property("unit", &SixPowNode::unit, "@DocString_six_pow_node_unit@")
         .add_property("rung", &SixPowNode::rung, "@DocString_six_pow_node_rung@")
@@ -745,12 +871,18 @@ void sisso::feature_creation::node::registerSixPowNode()
         void (AddParamNode::*set_test_value_no_param)(int, bool) const = &AddParamNode::set_test_value;
         std::string (AddParamNode::*expr_no_param)() const = &AddParamNode::expr;
 
-        class_<AddParamNode, bases<AddNode>>("AddParamNode", init<node_ptr, node_ptr, int, double, double>((arg("self"), arg("feat_1"), arg("feat_2"), arg("feat_ind"), arg("min_abs_feat_val"), arg("max_abs_feat_val")), "@DocString_add_param_node_init@"))
+        class_<AddParamNode, bases<AddNode>>(
+            "AddParamNode",
+            init<node_ptr, node_ptr, int, double, double>(
+                (arg("self"), arg("feat_1"), arg("feat_2"), arg("feat_ind"), arg("min_abs_feat_val"), arg("max_abs_feat_val")),
+                "@DocString_add_param_node_init@"
+            )
+        )
             .def("set_value", set_value_no_param, (arg("self"), arg("offset"), arg("for_comp")), "@DocString_add_param_node_set_value@")
-            .def("set_test_value", set_test_value_no_param, "@DocString_add_param_node_set_test_value@")
+            .def("set_test_value", set_test_value_no_param, (arg("self"), arg("offset"), arg("for_comp")), "@DocString_add_param_node_set_test_value@")
             .add_property("expr", expr_no_param, "@DocString_add_param_node_expr@")
-            .add_property("unit", &AddParamNode::unit, "@DocString_add_param_node_unit@")
-            .add_property("rung", &AddParamNode::rung, "@DocString_add_param_node_rung@")
+            // .add_property("unit", &AddParamNode::unit, "@DocString_add_param_node_unit@")
+            // .add_property("rung", &AddParamNode::rung, "@DocString_add_param_node_rung@")
         ;
     }
 
@@ -760,12 +892,18 @@ void sisso::feature_creation::node::registerSixPowNode()
         void (SubParamNode::*set_test_value_no_param)(int, bool) const = &SubParamNode::set_test_value;
         std::string (SubParamNode::*expr_no_param)() const = &SubParamNode::expr;
 
-        class_<SubParamNode, bases<SubNode>>("SubParamNode", init<node_ptr, node_ptr, int, double, double>((arg("self"), arg("feat_1"), arg("feat_2"), arg("feat_ind"), arg("min_abs_feat_val"), arg("max_abs_feat_val")), "@DocString_sub_param_node_init@"))
+        class_<SubParamNode, bases<SubNode>>(
+            "SubParamNode",
+            init<node_ptr, node_ptr, int, double, double>(
+                (arg("self"), arg("feat_1"), arg("feat_2"), arg("feat_ind"), arg("min_abs_feat_val"), arg("max_abs_feat_val")),
+                "@DocString_sub_param_node_init@"
+            )
+        )
             .def("set_value", set_value_no_param, (arg("self"), arg("offset"), arg("for_comp")), "@DocString_sub_param_node_set_value@")
-            .def("set_test_value", set_test_value_no_param, "@DocString_sub_param_node_set_test_value@")
+            .def("set_test_value", set_test_value_no_param, (arg("self"), arg("offset"), arg("for_comp")), "@DocString_sub_param_node_set_test_value@")
             .add_property("expr", expr_no_param, "@DocString_sub_param_node_expr@")
-            .add_property("unit", &SubParamNode::unit, "@DocString_sub_param_node_unit@")
-            .add_property("rung", &SubParamNode::rung, "@DocString_sub_param_node_rung@")
+            // .add_property("unit", &SubParamNode::unit, "@DocString_sub_param_node_unit@")
+            // .add_property("rung", &SubParamNode::rung, "@DocString_sub_param_node_rung@")
         ;
     }
 
@@ -775,12 +913,18 @@ void sisso::feature_creation::node::registerSixPowNode()
         void (DivParamNode::*set_test_value_no_param)(int, bool) const = &DivParamNode::set_test_value;
         std::string (DivParamNode::*expr_no_param)() const = &DivParamNode::expr;
 
-        class_<DivParamNode, bases<DivNode>>("DivParamNode", init<node_ptr, node_ptr, int, double, double>((arg("self"), arg("feat_1"), arg("feat_2"), arg("feat_ind"), arg("min_abs_feat_val"), arg("max_abs_feat_val")), "@DocString_div_param_node_init@"))
+        class_<DivParamNode, bases<DivNode>>(
+            "DivParamNode",
+            init<node_ptr, node_ptr, int, double, double>(
+                (arg("self"), arg("feat_1"), arg("feat_2"), arg("feat_ind"), arg("min_abs_feat_val"), arg("max_abs_feat_val")),
+                "@DocString_div_param_node_init@"
+            )
+        )
             .def("set_value", set_value_no_param, (arg("self"), arg("offset"), arg("for_comp")), "@DocString_div_param_node_set_value@")
-            .def("set_test_value", set_test_value_no_param, "@DocString_div_param_node_set_test_value@")
+            .def("set_test_value", set_test_value_no_param, (arg("self"), arg("offset"), arg("for_comp")), "@DocString_div_param_node_set_test_value@")
             .add_property("expr", expr_no_param, "@DocString_div_param_node_expr@")
-            .add_property("unit", &DivParamNode::unit, "@DocString_div_param_node_unit@")
-            .add_property("rung", &DivParamNode::rung, "@DocString_div_param_node_rung@")
+            // .add_property("unit", &DivParamNode::unit, "@DocString_div_param_node_unit@")
+            // .add_property("rung", &DivParamNode::rung, "@DocString_div_param_node_rung@")
         ;
     }
 
@@ -790,12 +934,18 @@ void sisso::feature_creation::node::registerSixPowNode()
         void (MultParamNode::*set_test_value_no_param)(int, bool) const = &MultParamNode::set_test_value;
         std::string (MultParamNode::*expr_no_param)() const = &MultParamNode::expr;
 
-        class_<MultParamNode, bases<MultNode>>("MultParamNode", init<node_ptr, node_ptr, int, double, double>((arg("self"), arg("feat_1"), arg("feat_2"), arg("feat_ind"), arg("min_abs_feat_val"), arg("max_abs_feat_val")), "@DocString_mult_param_node_init@"))
+        class_<MultParamNode, bases<MultNode>>(
+            "MultParamNode",
+            init<node_ptr, node_ptr, int, double, double>(
+                (arg("self"), arg("feat_1"), arg("feat_2"), arg("feat_ind"), arg("min_abs_feat_val"), arg("max_abs_feat_val")),
+                "@DocString_mult_param_node_init@"
+            )
+        )
             .def("set_value", set_value_no_param, (arg("self"), arg("offset"), arg("for_comp")), "@DocString_mult_param_node_set_value@")
-            .def("set_test_value", set_test_value_no_param, "@DocString_mult_param_node_set_test_value@")
+            .def("set_test_value", set_test_value_no_param, (arg("self"), arg("offset"), arg("for_comp")), "@DocString_mult_param_node_set_test_value@")
             .add_property("expr", expr_no_param, "@DocString_mult_param_node_expr@")
-            .add_property("unit", &MultParamNode::unit, "@DocString_mult_param_node_unit@")
-            .add_property("rung", &MultParamNode::rung, "@DocString_mult_param_node_rung@")
+            // .add_property("unit", &MultParamNode::unit, "@DocString_mult_param_node_unit@")
+            // .add_property("rung", &MultParamNode::rung, "@DocString_mult_param_node_rung@")
         ;
     }
 
@@ -805,12 +955,18 @@ void sisso::feature_creation::node::registerSixPowNode()
         void (AbsDiffParamNode::*set_test_value_no_param)(int, bool) const = &AbsDiffParamNode::set_test_value;
         std::string (AbsDiffParamNode::*expr_no_param)() const = &AbsDiffParamNode::expr;
 
-        class_<AbsDiffParamNode, bases<AbsDiffNode>>("AbsDiffParamNode", init<node_ptr, node_ptr, int, double, double>((arg("self"), arg("feat_1"), arg("feat_2"), arg("feat_ind"), arg("min_abs_feat_val"), arg("max_abs_feat_val")), "@DocString_abs_diff_param_node_init@"))
+        class_<AbsDiffParamNode, bases<AbsDiffNode>>(
+            "AbsDiffParamNode",
+            init<node_ptr, node_ptr, int, double, double>(
+                (arg("self"), arg("feat_1"), arg("feat_2"), arg("feat_ind"), arg("min_abs_feat_val"), arg("max_abs_feat_val")),
+                "@DocString_abs_diff_param_node_init@"
+            )
+        )
             .def("set_value", set_value_no_param, (arg("self"), arg("offset"), arg("for_comp")), "@DocString_abs_diff_param_node_set_value@")
-            .def("set_test_value", set_test_value_no_param, "@DocString_abs_diff_param_node_set_test_value@")
+            .def("set_test_value", set_test_value_no_param, (arg("self"), arg("offset"), arg("for_comp")), "@DocString_abs_diff_param_node_set_test_value@")
             .add_property("expr", expr_no_param, "@DocString_abs_diff_param_node_expr@")
-            .add_property("unit", &AbsDiffParamNode::unit, "@DocString_abs_diff_param_node_unit@")
-            .add_property("rung", &AbsDiffParamNode::rung, "@DocString_abs_diff_param_node_rung@")
+            // .add_property("unit", &AbsDiffParamNode::unit, "@DocString_abs_diff_param_node_unit@")
+            // .add_property("rung", &AbsDiffParamNode::rung, "@DocString_abs_diff_param_node_rung@")
         ;
     }
 
@@ -820,12 +976,18 @@ void sisso::feature_creation::node::registerSixPowNode()
         void (AbsParamNode::*set_test_value_no_param)(int, bool) const = &AbsParamNode::set_test_value;
         std::string (AbsParamNode::*expr_no_param)() const = &AbsParamNode::expr;
 
-        class_<AbsParamNode, bases<AbsNode>>("AbsParamNode", init<node_ptr, int, double, double>((arg("self"), arg("feat"), arg("feat_ind"), arg("min_abs_feat_val"), arg("max_abs_feat_val")), "@DocString_abs_param_node_init@"))
+        class_<AbsParamNode, bases<AbsNode>>(
+            "AbsParamNode",
+            init<node_ptr, int, double, double>(
+                (arg("self"), arg("feat"), arg("feat_ind"), arg("min_abs_feat_val"), arg("max_abs_feat_val")),
+                "@DocString_abs_param_node_init@"
+            )
+        )
             .def("set_value", set_value_no_param, (arg("self"), arg("offset"), arg("for_comp")), "@DocString_abs_param_node_set_value@")
-            .def("set_test_value", set_test_value_no_param, "@DocString_abs_param_node_set_test_value@")
+            .def("set_test_value", set_test_value_no_param, (arg("self"), arg("offset"), arg("for_comp")), "@DocString_abs_param_node_set_test_value@")
             .add_property("expr", expr_no_param, "@DocString_abs_param_node_expr@")
-            .add_property("unit", &AbsParamNode::unit, "@DocString_abs_param_node_unit@")
-            .add_property("rung", &AbsParamNode::rung, "@DocString_abs_param_node_rung@")
+            // .add_property("unit", &AbsParamNode::unit, "@DocString_abs_param_node_unit@")
+            // .add_property("rung", &AbsParamNode::rung, "@DocString_abs_param_node_rung@")
         ;
     }
 
@@ -835,12 +997,18 @@ void sisso::feature_creation::node::registerSixPowNode()
         void (InvParamNode::*set_test_value_no_param)(int, bool) const = &InvParamNode::set_test_value;
         std::string (InvParamNode::*expr_no_param)() const = &InvParamNode::expr;
 
-        class_<InvParamNode, bases<InvNode>>("InvParamNode", init<node_ptr, int, double, double>((arg("self"), arg("feat"), arg("feat_ind"), arg("min_abs_feat_val"), arg("max_abs_feat_val")), "@DocString_inv_param_node_init@"))
+        class_<InvParamNode, bases<InvNode>>(
+            "InvParamNode",
+            init<node_ptr, int, double, double>(
+                (arg("self"), arg("feat"), arg("feat_ind"), arg("min_abs_feat_val"), arg("max_abs_feat_val")),
+                "@DocString_inv_param_node_init@"
+            )
+        )
             .def("set_value", set_value_no_param, (arg("self"), arg("offset"), arg("for_comp")), "@DocString_inv_param_node_set_value@")
-            .def("set_test_value", set_test_value_no_param, "@DocString_inv_param_node_set_test_value@")
+            .def("set_test_value", set_test_value_no_param, (arg("self"), arg("offset"), arg("for_comp")), "@DocString_inv_param_node_set_test_value@")
             .add_property("expr", expr_no_param, "@DocString_inv_param_node_expr@")
-            .add_property("unit", &InvParamNode::unit, "@DocString_inv_param_node_unit@")
-            .add_property("rung", &InvParamNode::rung, "@DocString_inv_param_node_rung@")
+            // .add_property("unit", &InvParamNode::unit, "@DocString_inv_param_node_unit@")
+            // .add_property("rung", &InvParamNode::rung, "@DocString_inv_param_node_rung@")
         ;
     }
 
@@ -850,12 +1018,18 @@ void sisso::feature_creation::node::registerSixPowNode()
         void (LogParamNode::*set_test_value_no_param)(int, bool) const = &LogParamNode::set_test_value;
         std::string (LogParamNode::*expr_no_param)() const = &LogParamNode::expr;
 
-        class_<LogParamNode, bases<LogNode>>("LogParamNode", init<node_ptr, int, double, double>((arg("self"), arg("feat"), arg("feat_ind"), arg("min_abs_feat_val"), arg("max_abs_feat_val")), "@DocString_log_param_node_init@"))
+        class_<LogParamNode, bases<LogNode>>(
+            "LogParamNode",
+            init<node_ptr, int, double, double>(
+                (arg("self"), arg("feat"), arg("feat_ind"), arg("min_abs_feat_val"), arg("max_abs_feat_val")),
+                "@DocString_log_param_node_init@"
+            )
+        )
             .def("set_value", set_value_no_param, (arg("self"), arg("offset"), arg("for_comp")), "@DocString_log_param_node_set_value@")
-            .def("set_test_value", set_test_value_no_param, "@DocString_log_param_node_set_test_value@")
+            .def("set_test_value", set_test_value_no_param, (arg("self"), arg("offset"), arg("for_comp")), "@DocString_log_param_node_set_test_value@")
             .add_property("expr", expr_no_param, "@DocString_log_param_node_expr@")
-            .add_property("unit", &LogParamNode::unit, "@DocString_log_param_node_unit@")
-            .add_property("rung", &LogParamNode::rung, "@DocString_log_param_node_rung@")
+            // .add_property("unit", &LogParamNode::unit, "@DocString_log_param_node_unit@")
+            // .add_property("rung", &LogParamNode::rung, "@DocString_log_param_node_rung@")
         ;
     }
 
@@ -865,12 +1039,18 @@ void sisso::feature_creation::node::registerSixPowNode()
         void (ExpParamNode::*set_test_value_no_param)(int, bool) const = &ExpParamNode::set_test_value;
         std::string (ExpParamNode::*expr_no_param)() const = &ExpParamNode::expr;
 
-        class_<ExpParamNode, bases<ExpNode>>("ExpParamNode", init<node_ptr, int, double, double>((arg("self"), arg("feat"), arg("feat_ind"), arg("min_abs_feat_val"), arg("max_abs_feat_val")), "@DocString_exp_param_node_init@"))
+        class_<ExpParamNode, bases<ExpNode>>(
+            "ExpParamNode",
+            init<node_ptr, int, double, double>(
+                (arg("self"), arg("feat"), arg("feat_ind"), arg("min_abs_feat_val"), arg("max_abs_feat_val")),
+                "@DocString_exp_param_node_init@"
+            )
+        )
             .def("set_value", set_value_no_param, (arg("self"), arg("offset"), arg("for_comp")), "@DocString_exp_param_node_set_value@")
-            .def("set_test_value", set_test_value_no_param, "@DocString_exp_param_node_set_test_value@")
+            .def("set_test_value", set_test_value_no_param, (arg("self"), arg("offset"), arg("for_comp")), "@DocString_exp_param_node_set_test_value@")
             .add_property("expr", expr_no_param, "@DocString_exp_param_node_expr@")
-            .add_property("unit", &ExpParamNode::unit, "@DocString_exp_param_node_unit@")
-            .add_property("rung", &ExpParamNode::rung, "@DocString_exp_param_node_rung@")
+            // .add_property("unit", &ExpParamNode::unit, "@DocString_exp_param_node_unit@")
+            // .add_property("rung", &ExpParamNode::rung, "@DocString_exp_param_node_rung@")
         ;
     }
 
@@ -880,12 +1060,18 @@ void sisso::feature_creation::node::registerSixPowNode()
         void (NegExpParamNode::*set_test_value_no_param)(int, bool) const = &NegExpParamNode::set_test_value;
         std::string (NegExpParamNode::*expr_no_param)() const = &NegExpParamNode::expr;
 
-        class_<NegExpParamNode, bases<NegExpNode>>("NegExpParamNode", init<node_ptr, int, double, double>((arg("self"), arg("feat"), arg("feat_ind"), arg("min_abs_feat_val"), arg("max_abs_feat_val")), "@DocString_neg_exp_param_node_init@"))
+        class_<NegExpParamNode, bases<NegExpNode>>(
+            "NegExpParamNode",
+            init<node_ptr, int, double, double>(
+                (arg("self"), arg("feat"), arg("feat_ind"), arg("min_abs_feat_val"), arg("max_abs_feat_val")),
+                "@DocString_neg_exp_param_node_init@"
+            )
+        )
             .def("set_value", set_value_no_param, (arg("self"), arg("offset"), arg("for_comp")), "@DocString_neg_exp_param_node_set_value@")
-            .def("set_test_value", set_test_value_no_param, "@DocString_neg_exp_param_node_set_test_value@")
+            .def("set_test_value", set_test_value_no_param, (arg("self"), arg("offset"), arg("for_comp")), "@DocString_neg_exp_param_node_set_test_value@")
             .add_property("expr", expr_no_param, "@DocString_neg_exp_param_node_expr@")
-            .add_property("unit", &NegExpParamNode::unit, "@DocString_neg_exp_param_node_unit@")
-            .add_property("rung", &NegExpParamNode::rung, "@DocString_neg_exp_param_node_rung@")
+            // .add_property("unit", &NegExpParamNode::unit, "@DocString_neg_exp_param_node_unit@")
+            // .add_property("rung", &NegExpParamNode::rung, "@DocString_neg_exp_param_node_rung@")
         ;
     }
 
@@ -895,12 +1081,18 @@ void sisso::feature_creation::node::registerSixPowNode()
         void (SinParamNode::*set_test_value_no_param)(int, bool) const = &SinParamNode::set_test_value;
         std::string (SinParamNode::*expr_no_param)() const = &SinParamNode::expr;
 
-        class_<SinParamNode, bases<SinNode>>("SinParamNode", init<node_ptr, int, double, double>((arg("self"), arg("feat"), arg("feat_ind"), arg("min_abs_feat_val"), arg("max_abs_feat_val")), "@DocString_sin_param_node_init@"))
+        class_<SinParamNode, bases<SinNode>>(
+            "SinParamNode",
+            init<node_ptr, int, double, double>(
+                (arg("self"), arg("feat"), arg("feat_ind"), arg("min_abs_feat_val"), arg("max_abs_feat_val")),
+                "@DocString_sin_param_node_init@"
+            )
+        )
             .def("set_value", set_value_no_param, (arg("self"), arg("offset"), arg("for_comp")), "@DocString_sin_param_node_set_value@")
-            .def("set_test_value", set_test_value_no_param, "@DocString_sin_param_node_set_test_value@")
+            .def("set_test_value", set_test_value_no_param, (arg("self"), arg("offset"), arg("for_comp")), "@DocString_sin_param_node_set_test_value@")
             .add_property("expr", expr_no_param, "@DocString_sin_param_node_expr@")
-            .add_property("unit", &SinParamNode::unit, "@DocString_sin_param_node_unit@")
-            .add_property("rung", &SinParamNode::rung, "@DocString_sin_param_node_rung@")
+            // .add_property("unit", &SinParamNode::unit, "@DocString_sin_param_node_unit@")
+            // .add_property("rung", &SinParamNode::rung, "@DocString_sin_param_node_rung@")
         ;
     }
 
@@ -910,12 +1102,18 @@ void sisso::feature_creation::node::registerSixPowNode()
         void (CosParamNode::*set_test_value_no_param)(int, bool) const = &CosParamNode::set_test_value;
         std::string (CosParamNode::*expr_no_param)() const = &CosParamNode::expr;
 
-        class_<CosParamNode, bases<CosNode>>("CosParamNode", init<node_ptr, int, double, double>((arg("self"), arg("feat"), arg("feat_ind"), arg("min_abs_feat_val"), arg("max_abs_feat_val")), "@DocString_cos_param_node_init@"))
+        class_<CosParamNode, bases<CosNode>>(
+            "CosParamNode",
+            init<node_ptr, int, double, double>(
+                (arg("self"), arg("feat"), arg("feat_ind"), arg("min_abs_feat_val"), arg("max_abs_feat_val")),
+                "@DocString_cos_param_node_init@"
+            )
+        )
             .def("set_value", set_value_no_param, (arg("self"), arg("offset"), arg("for_comp")), "@DocString_cos_param_node_set_value@")
-            .def("set_test_value", set_test_value_no_param, "@DocString_cos_param_node_set_test_value@")
+            .def("set_test_value", set_test_value_no_param, (arg("self"), arg("offset"), arg("for_comp")), "@DocString_cos_param_node_set_test_value@")
             .add_property("expr", expr_no_param, "@DocString_cos_param_node_expr@")
-            .add_property("unit", &CosParamNode::unit, "@DocString_cos_param_node_unit@")
-            .add_property("rung", &CosParamNode::rung, "@DocString_cos_param_node_rung@")
+            // .add_property("unit", &CosParamNode::unit, "@DocString_cos_param_node_unit@")
+            // .add_property("rung", &CosParamNode::rung, "@DocString_cos_param_node_rung@")
         ;
     }
 
@@ -925,12 +1123,18 @@ void sisso::feature_creation::node::registerSixPowNode()
         void (CbParamNode::*set_test_value_no_param)(int, bool) const = &CbParamNode::set_test_value;
         std::string (CbParamNode::*expr_no_param)() const = &CbParamNode::expr;
 
-        class_<CbParamNode, bases<CbNode>>("CbParamNode", init<node_ptr, int, double, double>((arg("self"), arg("feat"), arg("feat_ind"), arg("min_abs_feat_val"), arg("max_abs_feat_val")), "@DocString_cb_param_node_init@"))
+        class_<CbParamNode, bases<CbNode>>(
+            "CbParamNode",
+            init<node_ptr, int, double, double>(
+                (arg("self"), arg("feat"), arg("feat_ind"), arg("min_abs_feat_val"), arg("max_abs_feat_val")),
+                "@DocString_cb_param_node_init@"
+            )
+        )
             .def("set_value", set_value_no_param, (arg("self"), arg("offset"), arg("for_comp")), "@DocString_cb_param_node_set_value@")
-            .def("set_test_value", set_test_value_no_param, "@DocString_cb_param_node_set_test_value@")
+            .def("set_test_value", set_test_value_no_param, (arg("self"), arg("offset"), arg("for_comp")), "@DocString_cb_param_node_set_test_value@")
             .add_property("expr", expr_no_param, "@DocString_cb_param_node_expr@")
-            .add_property("unit", &CbParamNode::unit, "@DocString_cb_param_node_unit@")
-            .add_property("rung", &CbParamNode::rung, "@DocString_cb_param_node_rung@")
+            // .add_property("unit", &CbParamNode::unit, "@DocString_cb_param_node_unit@")
+            // .add_property("rung", &CbParamNode::rung, "@DocString_cb_param_node_rung@")
         ;
     }
 
@@ -940,12 +1144,18 @@ void sisso::feature_creation::node::registerSixPowNode()
         void (CbrtParamNode::*set_test_value_no_param)(int, bool) const = &CbrtParamNode::set_test_value;
         std::string (CbrtParamNode::*expr_no_param)() const = &CbrtParamNode::expr;
 
-        class_<CbrtParamNode, bases<CbrtNode>>("CbrtParamNode", init<node_ptr, int, double, double>((arg("self"), arg("feat"), arg("feat_ind"), arg("min_abs_feat_val"), arg("max_abs_feat_val")), "@DocString_cbrt_param_node_init@"))
+        class_<CbrtParamNode, bases<CbrtNode>>(
+            "CbrtParamNode",
+            init<node_ptr, int, double, double>(
+                (arg("self"), arg("feat"), arg("feat_ind"), arg("min_abs_feat_val"), arg("max_abs_feat_val")),
+                "@DocString_cbrt_param_node_init@"
+            )
+        )
             .def("set_value", set_value_no_param, (arg("self"), arg("offset"), arg("for_comp")), "@DocString_cbrt_param_node_set_value@")
-            .def("set_test_value", set_test_value_no_param, "@DocString_cbrt_param_node_set_test_value@")
+            .def("set_test_value", set_test_value_no_param, (arg("self"), arg("offset"), arg("for_comp")), "@DocString_cbrt_param_node_set_test_value@")
             .add_property("expr", expr_no_param, "@DocString_cbrt_param_node_expr@")
-            .add_property("unit", &CbrtParamNode::unit, "@DocString_cbrt_param_node_unit@")
-            .add_property("rung", &CbrtParamNode::rung, "@DocString_cbrt_param_node_rung@")
+            // .add_property("unit", &CbrtParamNode::unit, "@DocString_cbrt_param_node_unit@")
+            // .add_property("rung", &CbrtParamNode::rung, "@DocString_cbrt_param_node_rung@")
         ;
     }
 
@@ -955,12 +1165,18 @@ void sisso::feature_creation::node::registerSixPowNode()
         void (SqParamNode::*set_test_value_no_param)(int, bool) const = &SqParamNode::set_test_value;
         std::string (SqParamNode::*expr_no_param)() const = &SqParamNode::expr;
 
-        class_<SqParamNode, bases<SqNode>>("SqParamNode", init<node_ptr, int, double, double>((arg("self"), arg("feat"), arg("feat_ind"), arg("min_abs_feat_val"), arg("max_abs_feat_val")), "@DocString_sq_param_node_init@"))
+        class_<SqParamNode, bases<SqNode>>(
+            "SqParamNode",
+            init<node_ptr, int, double, double>(
+                (arg("self"), arg("feat"), arg("feat_ind"), arg("min_abs_feat_val"), arg("max_abs_feat_val")),
+                "@DocString_sq_param_node_init@"
+            )
+        )
             .def("set_value", set_value_no_param, (arg("self"), arg("offset"), arg("for_comp")), "@DocString_sq_param_node_set_value@")
-            .def("set_test_value", set_test_value_no_param, "@DocString_sq_param_node_set_test_value@")
+            .def("set_test_value", set_test_value_no_param, (arg("self"), arg("offset"), arg("for_comp")), "@DocString_sq_param_node_set_test_value@")
             .add_property("expr", expr_no_param, "@DocString_sq_param_node_expr@")
-            .add_property("unit", &SqParamNode::unit, "@DocString_sq_param_node_unit@")
-            .add_property("rung", &SqParamNode::rung, "@DocString_sq_param_node_rung@")
+            // .add_property("unit", &SqParamNode::unit, "@DocString_sq_param_node_unit@")
+            // .add_property("rung", &SqParamNode::rung, "@DocString_sq_param_node_rung@")
         ;
     }
 
@@ -970,12 +1186,18 @@ void sisso::feature_creation::node::registerSixPowNode()
         void (SqrtParamNode::*set_test_value_no_param)(int, bool) const = &SqrtParamNode::set_test_value;
         std::string (SqrtParamNode::*expr_no_param)() const = &SqrtParamNode::expr;
 
-        class_<SqrtParamNode, bases<SqrtNode>>("SqrtParamNode", init<node_ptr, int, double, double>((arg("self"), arg("feat"), arg("feat_ind"), arg("min_abs_feat_val"), arg("max_abs_feat_val")), "@DocString_sqrt_param_node_init@"))
+        class_<SqrtParamNode, bases<SqrtNode>>(
+            "SqrtParamNode",
+            init<node_ptr, int, double, double>(
+                (arg("self"), arg("feat"), arg("feat_ind"), arg("min_abs_feat_val"), arg("max_abs_feat_val")),
+                "@DocString_sqrt_param_node_init@"
+            )
+        )
             .def("set_value", set_value_no_param, (arg("self"), arg("offset"), arg("for_comp")), "@DocString_sqrt_param_node_set_value@")
-            .def("set_test_value", set_test_value_no_param, "@DocString_sqrt_param_node_set_test_value@")
+            .def("set_test_value", set_test_value_no_param, (arg("self"), arg("offset"), arg("for_comp")), "@DocString_sqrt_param_node_set_test_value@")
             .add_property("expr", expr_no_param, "@DocString_sqrt_param_node_expr@")
-            .add_property("unit", &SqrtParamNode::unit, "@DocString_sqrt_param_node_unit@")
-            .add_property("rung", &SqrtParamNode::rung, "@DocString_sqrt_param_node_rung@")
+            // .add_property("unit", &SqrtParamNode::unit, "@DocString_sqrt_param_node_unit@")
+            // .add_property("rung", &SqrtParamNode::rung, "@DocString_sqrt_param_node_rung@")
         ;
     }
 
@@ -985,12 +1207,18 @@ void sisso::feature_creation::node::registerSixPowNode()
         void (SixPowParamNode::*set_test_value_no_param)(int, bool) const = &SixPowParamNode::set_test_value;
         std::string (SixPowParamNode::*expr_no_param)() const = &SixPowParamNode::expr;
 
-        class_<SixPowParamNode, bases<SixPowNode>>("SixPowParamNode", init<node_ptr, int, double, double>((arg("self"), arg("feat"), arg("feat_ind"), arg("min_abs_feat_val"), arg("max_abs_feat_val")), "@DocString_six_pow_param_node_init@"))
+        class_<SixPowParamNode, bases<SixPowNode>>(
+            "SixPowParamNode",
+            init<node_ptr, int, double, double>(
+                (arg("self"), arg("feat"), arg("feat_ind"), arg("min_abs_feat_val"), arg("max_abs_feat_val")),
+                "@DocString_six_pow_param_node_init@"
+            )
+        )
             .def("set_value", set_value_no_param, (arg("self"), arg("offset"), arg("for_comp")), "@DocString_six_pow_param_node_set_value@")
-            .def("set_test_value", set_test_value_no_param, "@DocString_six_pow_param_node_set_test_value@")
+            .def("set_test_value", set_test_value_no_param, (arg("self"), arg("offset"), arg("for_comp")), "@DocString_six_pow_param_node_set_test_value@")
             .add_property("expr", expr_no_param, "@DocString_six_pow_param_node_expr@")
-            .add_property("unit", &SixPowParamNode::unit, "@DocString_six_pow_param_node_unit@")
-            .add_property("rung", &SixPowParamNode::rung, "@DocString_six_pow_param_node_rung@")
+            // .add_property("unit", &SixPowParamNode::unit, "@DocString_six_pow_param_node_unit@")
+            // .add_property("rung", &SixPowParamNode::rung, "@DocString_six_pow_param_node_rung@")
         ;
     }
 #endif
@@ -1037,8 +1265,8 @@ void sisso::descriptor_identifier::registerModelRegressor()
 {
     class_<ModelRegressor, bases<Model>>("ModelRegressor", init<std::string>((arg("self"), arg("train_file")), "@DocString_model_reg_init_train@"))
         .def(init<std::string, std::string>((arg("self"), arg("train_file"), arg("test_file")), "@DocString_model_reg_init_test_train@"))
-        .def("__str__", &ModelRegressor::toString, "@DocString_model_reg_str@")
-        .def("__repr__", &ModelRegressor::toString, "@DocString_model_reg_str@")
+        .def("__str__", &ModelRegressor::toString, (arg("self")), "@DocString_model_reg_str@")
+        .def("__repr__", &ModelRegressor::toString, (arg("self")), "@DocString_model_reg_str@")
         .add_property("latex_str", &ModelRegressor::toLatexString, "@DocString_model_reg_latex_str@")
         .add_property("r2", &ModelRegressor::r2, "@DocString_model_reg_r2@")
         .add_property("test_r2", &ModelRegressor::test_r2, "@DocString_model_reg_test_r2@")
@@ -1065,8 +1293,8 @@ void sisso::descriptor_identifier::registerModelLogRegressor()
 {
     class_<ModelLogRegressor, bases<ModelRegressor>>("ModelLogRegressor", init<std::string>((arg("self"), arg("train_file")), "@DocString_model_log_reg_init_train@"))
         .def(init<std::string, std::string>((arg("self"), arg("train_file"), arg("test_file")), "@DocString_model_log_reg_init_test_train@"))
-        .def("__str__", &ModelLogRegressor::toString, "@DocString_model_log_reg_str@")
-        .def("__repr__", &ModelLogRegressor::toString, "@DocString_model_log_reg_str@")
+        .def("__str__", &ModelLogRegressor::toString, (arg("self")), "@DocString_model_log_reg_str@")
+        .def("__repr__", &ModelLogRegressor::toString, (arg("self")), "@DocString_model_log_reg_str@")
         .add_property("latex_str", &ModelLogRegressor::toLatexString, "@DocString_model_log_reg_latex_str@")
     ;
 }
@@ -1077,8 +1305,8 @@ void sisso::descriptor_identifier::registerModelClassifier()
         .def(init<std::string, std::string>((arg("self"), arg("train_file"), arg("test_file")), "@DocString_model_class_init_test_train@"))
         .def(init<ModelClassifier, py::list, np::ndarray, np::ndarray>((arg("self"), arg("o"), arg("new_coefs"), arg("prop_train_est"), arg("prop_test_est")), "@DocString_model_class_init_new_coefs_list@"))
         .def(init<ModelClassifier, np::ndarray, np::ndarray, np::ndarray>((arg("self"), arg("o"), arg("new_coefs"), arg("prop_train_est"), arg("prop_test_est")), "@DocString_model_class_init_new_coefs_arr@"))
-        .def("__str__", &ModelClassifier::toString, "@DocString_model_class_str@")
-        .def("__repr__", &ModelClassifier::toString, "@DocString_model_class_str@")
+        .def("__str__", &ModelClassifier::toString, (arg("self")), "@DocString_model_class_str@")
+        .def("__repr__", &ModelClassifier::toString, (arg("self")), "@DocString_model_class_str@")
         .add_property("latex_str", &ModelClassifier::toLatexString, "@DocString_model_class_latex_str@")
         .add_property("percent_error", &ModelClassifier::percent_train_error, "@DocString_model_class_precent_train_error@")
         .add_property("percent_test_error", &ModelClassifier::percent_test_error, "@DocString_model_class_precent_test_error@")
@@ -1106,26 +1334,59 @@ void sisso::descriptor_identifier::registerSISSO_DI()
 
 void sisso::descriptor_identifier::registerSISSORegressor()
 {
-    class_<SISSORegressor, bases<SISSO_DI>>("SISSORegressor", init<std::shared_ptr<FeatureSpace>, std::string, Unit, np::ndarray, np::ndarray, py::list, py::list, py::list, int, int, int, optional<bool>>((arg("self"), arg("feat_space"),arg("prop_label"),arg("prop_unit"),arg("prop"),arg("prop_test"),arg("task_sizes_train"),arg("task_sizes_test"),arg("leave_out_inds"),arg("n_dim"),arg("n_residual"),arg("n_models_store"),arg("fix_intercept")), "@DocString_model_reg_init_arr@"))
-        .def(init<std::shared_ptr<FeatureSpace>, std::string, Unit, py::list, py::list, py::list, py::list, py::list, int, int, int, optional<bool>>((arg("self"), arg("feat_space"),arg("prop_label"),arg("prop_unit"),arg("prop"),arg("prop_test"),arg("task_sizes_train"),arg("task_sizes_test"),arg("leave_out_inds"),arg("n_dim"),arg("n_residual"),arg("n_models_store"),arg("fix_intercept")), "@DocString_model_reg_init_list@"))
-        .def("fit", &SISSORegressor::fit, "@DocString_sisso_reg_fit@")
+    class_<SISSORegressor, bases<SISSO_DI>>(
+        "SISSORegressor",
+        init<std::shared_ptr<FeatureSpace>, std::string, Unit, np::ndarray, np::ndarray, py::list, py::list, py::list, int, int, int, optional<bool>>(
+            (arg("self"), arg("feat_space"), arg("prop_label"), arg("prop_unit"), arg("prop"), arg("prop_test"), arg("task_sizes_train"), arg("task_sizes_test"), arg("leave_out_inds"), arg("n_dim"), arg("n_residual"), arg("n_models_store"), arg("fix_intercept")),
+            "@DocString_model_reg_init_arr@"
+        )
+    )
+        .def(
+            init<std::shared_ptr<FeatureSpace>, std::string, Unit, py::list, py::list, py::list, py::list, py::list, int, int, int, optional<bool>>(
+                (arg("self"), arg("feat_space"), arg("prop_label"), arg("prop_unit"), arg("prop"), arg("prop_test"), arg("task_sizes_train"), arg("task_sizes_test"), arg("leave_out_inds"), arg("n_dim"), arg("n_residual"), arg("n_models_store"), arg("fix_intercept")),
+                "@DocString_model_reg_init_list@"
+            )
+        )
+        .def("fit", &SISSORegressor::fit, (arg("self")), "@DocString_sisso_reg_fit@")
         .add_property("models", &SISSORegressor::models_py, "@DocString_sisso_reg_models_py@")
     ;
 }
 
 void sisso::descriptor_identifier::registerSISSOLogRegressor()
 {
-    class_<SISSOLogRegressor, bases<SISSORegressor>>("SISSOLogRegressor", init<std::shared_ptr<FeatureSpace>, std::string, Unit, np::ndarray, np::ndarray, py::list, py::list, py::list, int, int, int, optional<bool>>((arg("self"), arg("feat_space"),arg("prop_label"),arg("prop_unit"),arg("prop"),arg("prop_test"),arg("task_sizes_train"),arg("task_sizes_test"),arg("leave_out_inds"),arg("n_dim"),arg("n_residual"),arg("n_models_store"),arg("fix_intercept")), "@DocString_model_log_reg_init_arr@"))
-        .def(init<std::shared_ptr<FeatureSpace>, std::string, Unit, py::list, py::list, py::list, py::list, py::list, int, int, int, optional<bool>>((arg("self"), arg("feat_space"),arg("prop_label"),arg("prop_unit"),arg("prop"),arg("prop_test"),arg("task_sizes_train"),arg("task_sizes_test"),arg("leave_out_inds"),arg("n_dim"),arg("n_residual"),arg("n_models_store"),arg("fix_intercept")), "@DocString_model_log_reg_init_list@"))
+    class_<SISSOLogRegressor, bases<SISSORegressor>>(
+        "SISSOLogRegressor"
+        , init<std::shared_ptr<FeatureSpace>, std::string, Unit, np::ndarray, np::ndarray, py::list, py::list, py::list, int, int, int, optional<bool>>(
+            (arg("self"), arg("feat_space"), arg("prop_label"), arg("prop_unit"), arg("prop"), arg("prop_test"), arg("task_sizes_train"), arg("task_sizes_test"), arg("leave_out_inds"), arg("n_dim"), arg("n_residual"), arg("n_models_store"), arg("fix_intercept")),
+            "@DocString_model_log_reg_init_arr@"
+        )
+    )
+        .def(
+            init<std::shared_ptr<FeatureSpace>, std::string, Unit, py::list, py::list, py::list, py::list, py::list, int, int, int, optional<bool>>(
+                (arg("self"), arg("feat_space"), arg("prop_label"), arg("prop_unit"), arg("prop"), arg("prop_test"), arg("task_sizes_train"), arg("task_sizes_test"), arg("leave_out_inds"), arg("n_dim"), arg("n_residual"), arg("n_models_store"), arg("fix_intercept")),
+                "@DocString_model_log_reg_init_list@"
+            )
+        )
         .add_property("models", &SISSOLogRegressor::models_log_reg_py, "@DocString_sisso_log_reg_models_py@")
     ;
 }
 
 void sisso::descriptor_identifier::registerSISSOClassifier()
 {
-    class_<SISSOClassifier, bases<SISSO_DI>>("SISSOClassifier", init<std::shared_ptr<FeatureSpace>, std::string, Unit, np::ndarray, np::ndarray, py::list, py::list, py::list, int, int, int>((arg("self"), arg("feat_space"), arg("prop_label"), arg("prop_unit"), arg("prop"), arg("prop_test"), arg("task_sizes_train"), arg("task_sizes_test"), arg("leave_out_inds"), arg("n_dim"), arg("n_residual"), arg("n_models_store")), "@DocString_sisso_class_init_arr@"))
-        .def(init<std::shared_ptr<FeatureSpace>, std::string, Unit, py::list, py::list, py::list, py::list, py::list, int, int, int>((arg("self"), arg("feat_space"), arg("prop_label"), arg("prop_unit"), arg("prop"), arg("prop_test"), arg("task_sizes_train"), arg("task_sizes_test"), arg("leave_out_inds"), arg("n_dim"), arg("n_residual"), arg("n_models_store")), "@DocString_sisso_class_init_list@"))
-        .def("fit", &SISSOClassifier::fit, "@DocString_sisso_class_fit@")
+    class_<SISSOClassifier, bases<SISSO_DI>>(
+        "SISSOClassifier",
+        init<std::shared_ptr<FeatureSpace>, std::string, Unit, np::ndarray, np::ndarray, py::list, py::list, py::list, int, int, int>(
+            (arg("self"), arg("feat_space"), arg("prop_label"), arg("prop_unit"), arg("prop"), arg("prop_test"), arg("task_sizes_train"), arg("task_sizes_test"), arg("leave_out_inds"), arg("n_dim"), arg("n_residual"), arg("n_models_store")),
+            "@DocString_sisso_class_init_arr@"
+        )
+    )
+        .def(
+            init<std::shared_ptr<FeatureSpace>, std::string, Unit, py::list, py::list, py::list, py::list, py::list, int, int, int>(
+                (arg("self"), arg("feat_space"), arg("prop_label"), arg("prop_unit"), arg("prop"), arg("prop_test"), arg("task_sizes_train"), arg("task_sizes_test"), arg("leave_out_inds"), arg("n_dim"), arg("n_residual"), arg("n_models_store")),
+                "@DocString_sisso_class_init_list@"
+            )
+        )
+        .def("fit", &SISSOClassifier::fit, (arg("self")), "@DocString_sisso_class_fit@")
         .add_property("models", &SISSOClassifier::models_py, "@DocString_sisso_class_models_py@")
     ;
 }
diff --git a/src/python/bindings_docstring_keyed.hpp b/src/python/bindings_docstring_keyed.hpp
index 71752347f3ca7dfab8912f437ad9c10a268b4bca..bea2f818aeba9023c3a81689f72509db1a05323d 100644
--- a/src/python/bindings_docstring_keyed.hpp
+++ b/src/python/bindings_docstring_keyed.hpp
@@ -317,12 +317,12 @@ namespace sisso
                 template<int N>
                 static void registerOperatorNode()
                 {
-                    py::class_<OperatorNodeWrap<N>, py::bases<Node>, boost::noncopyable>("OperatorNode")
-                        .def("is_nan", &OperatorNode<N>::is_nan, "@DocString_op_node_is_nan@")
-                        .def("is_const", &OperatorNode<N>::is_const, "@DocString_op_node_is_const@")
+                    py::class_<OperatorNodeWrap<N>, py::bases<Node>, boost::noncopyable>("OperatorNode", py::no_init)
+                        .def("is_nan", &OperatorNode<N>::is_nan, (py::arg("self")), "@DocString_op_node_is_nan@")
+                        .def("is_const", &OperatorNode<N>::is_const, (py::arg("self")), "@DocString_op_node_is_const@")
+                        .add_property("n_leaves", &OperatorNode<N>::n_leaves, "@DocString_op_node_n_leaves@")
                         .add_property("n_feats", &OperatorNode<N>::n_feats, "@DocString_op_node_n_feats@")
                         .add_property("feat", &OperatorNode<N>::feat, (py::arg("ind")), "@DocString_op_node_feat@")
-                        .add_property("n_leaves", &OperatorNode<N>::n_leaves, (py::arg("cur_n_leaves")), "@DocString_op_node_n_leaves@")
                     ;
                 }
             #else
@@ -331,15 +331,15 @@ namespace sisso
                 {
                     void (OperatorNode<N>::*set_params_list)(py::list) = &OperatorNode<N>::set_parameters;
                     void (OperatorNode<N>::*set_params_arr)(np::ndarray) = &OperatorNode<N>::set_parameters;
-                    py::class_<OperatorNodeWrap<N>, py::bases<Node>, boost::noncopyable>("OperatorNode")
-                        .def("is_nan", &OperatorNode<N>::is_nan, "@DocString_op_node_is_nan@")
-                        .def("is_const", &OperatorNode<N>::is_const, "@DocString_op_node_is_const@")
-                        .def("get_parameters", py::pure_virtual(&OperatorNode<N>::get_parameters), "@DocString_op_node_get_params@")
-                        .def("set_parameters", set_params_arr, "@DocString_op_node_set_param_arr@")
-                        .def("set_parameters", set_params_list, "@DocString_op_node_set_param_list@")
+                    py::class_<OperatorNodeWrap<N>, py::bases<Node>, boost::noncopyable>("OperatorNode", py::no_init)
+                        .def("is_nan", &OperatorNode<N>::is_nan, (py::arg("self")), "@DocString_op_node_is_nan@")
+                        .def("is_const", &OperatorNode<N>::is_const, (py::arg("self")), "@DocString_op_node_is_const@")
+                        .def("get_parameters", py::pure_virtual(&OperatorNode<N>::get_parameters), (py::arg("self"), py::arg("optimizer")), "@DocString_op_node_get_params@")
+                        .def("set_parameters", set_params_arr, (py::arg("self"), py::arg("params")), "@DocString_op_node_set_param_arr@")
+                        .def("set_parameters", set_params_list, (py::arg("self"), py::arg("params")), "@DocString_op_node_set_param_list@")
+                        .def("feat", &OperatorNode<N>::feat, (py::arg("self"), py::arg("ind")), "@DocString_op_node_feat@")
+                        .add_property("n_leaves", &OperatorNode<N>::n_leaves, "@DocString_op_node_n_leaves@")
                         .add_property("n_feats", &OperatorNode<N>::n_feats, "@DocString_op_node_n_feats@")
-                        .add_property("feat", &OperatorNode<N>::feat, "@DocString_op_node_feat@")
-                        .add_property("n_leaves", &OperatorNode<N>::n_leaves)
                     ;
                 }
 
diff --git a/src/python/feature_creation/ModelNode.cpp b/src/python/feature_creation/ModelNode.cpp
index e48203c344137d7bb1cd9702dedd24691fae5e0e..561ce00be967391b86b912dc67d7aea01d126879 100644
--- a/src/python/feature_creation/ModelNode.cpp
+++ b/src/python/feature_creation/ModelNode.cpp
@@ -1,5 +1,57 @@
 #include "feature_creation/node/ModelNode.hpp"
 
+ModelNode::ModelNode(
+    const unsigned long int feat_ind,
+    const unsigned long int rung,
+    const std::string expr,
+    const std::string latex_expr,
+    const std::string expr_postfix,
+    const std::string matlab_fxn_expr,
+    const np::ndarray value,
+    const np::ndarray test_value,
+    const py::list x_in_expr_list,
+    const Unit unit
+) :
+ModelNode(
+    feat_ind,
+    rung,
+    expr,
+    latex_expr,
+    expr_postfix,
+    matlab_fxn_expr,
+    python_conv_utils::from_ndarray<double>(value),
+    python_conv_utils::from_ndarray<double>(test_value),
+    python_conv_utils::from_list<std::string>(x_in_expr_list),
+    unit
+)
+{}
+
+ModelNode::ModelNode(
+    const unsigned long int feat_ind,
+    const unsigned long int rung,
+    const std::string expr,
+    const std::string latex_expr,
+    const std::string expr_postfix,
+    const std::string matlab_fxn_expr,
+    const py::list value,
+    const py::list test_value,
+    const py::list x_in_expr_list,
+    const Unit unit
+) :
+ModelNode(
+    feat_ind,
+    rung,
+    expr,
+    latex_expr,
+    expr_postfix,
+    matlab_fxn_expr,
+    python_conv_utils::from_list<double>(value),
+    python_conv_utils::from_list<double>(test_value),
+    python_conv_utils::from_list<std::string>(x_in_expr_list),
+    unit
+)
+{}
+
 np::ndarray ModelNode::eval_many_py(np::ndarray x_in)
 {
     if(x_in.get_nd() != 2)