From dd390d8b7a874c953789b67ef1b997678b0ba778 Mon Sep 17 00:00:00 2001
From: Thomas <purcell@fhi-berlin.mpg.de>
Date: Wed, 10 Feb 2021 15:27:03 +0100
Subject: [PATCH] Updated files relative to sissopp changes

see https://gitlab.com/sissopp_developers/sissopp
commits: up to 763e40e4deaf7a45dba3f0d8052cca053c1fa957
---
 .gitlab-ci.yml                                |  32 +--
 CMakeLists.txt                                |   7 +-
 src/CMakeLists.txt                            |  29 ++-
 src/descriptor_identifier/Model/Model.cpp     |   3 +-
 src/descriptor_identifier/Model/Model.hpp     |  13 +-
 .../Model/ModelClassifier.cpp                 |  30 ++-
 .../Model/ModelClassifier.hpp                 |  12 +-
 .../Model/ModelLogRegressor.cpp               |  16 +-
 .../Model/ModelLogRegressor.hpp               |  12 +-
 .../Model/ModelRegressor.cpp                  |  33 ++-
 .../Model/ModelRegressor.hpp                  |  24 +-
 .../SISSO_DI/SISSOClassifier.cpp              |  11 +-
 .../SISSO_DI/SISSOClassifier.hpp              |   4 +-
 .../SISSO_DI/SISSOLogRegressor.cpp            |   7 +-
 .../SISSO_DI/SISSOLogRegressor.hpp            |   6 +-
 .../SISSO_DI/SISSORegressor.cpp               |  11 +-
 .../SISSO_DI/SISSORegressor.hpp               |   6 +-
 .../SISSO_DI/SISSO_DI.cpp                     |   2 +
 .../SISSO_DI/SISSO_DI.hpp                     |   8 +-
 .../feature_space/FeatureSpace.cpp            |   7 +-
 .../feature_space/FeatureSpace.hpp            |   4 +-
 src/feature_creation/node/FeatureNode.hpp     |   7 +
 src/feature_creation/node/ModelNode.cpp       |   3 +-
 src/feature_creation/node/ModelNode.hpp       |  12 +-
 src/feature_creation/node/Node.hpp            |  14 +-
 .../node/operator_nodes/OperatorNode.hpp      |   6 +
 .../{ => abs}/absolute_value.cpp              |   4 +-
 .../{ => abs}/absolute_value.hpp              |  11 +-
 .../{ => abs_diff}/absolute_difference.cpp    |   4 +-
 .../{ => abs_diff}/absolute_difference.hpp    |  11 +-
 .../allowed_operator_nodes/{ => add}/add.cpp  |   4 +-
 .../allowed_operator_nodes/{ => add}/add.hpp  |  11 +-
 .../allowed_operator_nodes/{ => cb}/cube.cpp  |   4 +-
 .../allowed_operator_nodes/{ => cb}/cube.hpp  |  11 +-
 .../{ => cbrt}/cube_root.cpp                  |   4 +-
 .../{ => cbrt}/cube_root.hpp                  |  11 +-
 .../allowed_operator_nodes/{ => cos}/cos.cpp  |   4 +-
 .../allowed_operator_nodes/{ => cos}/cos.hpp  |  11 +-
 .../{ => div}/divide.cpp                      |   4 +-
 .../{ => div}/divide.hpp                      |  11 +-
 .../{ => exp}/exponential.cpp                 |   4 +-
 .../{ => exp}/exponential.hpp                 |  11 +-
 .../{ => inv}/inverse.cpp                     |   4 +-
 .../{ => inv}/inverse.hpp                     |  11 +-
 .../allowed_operator_nodes/{ => log}/log.cpp  |   4 +-
 .../allowed_operator_nodes/{ => log}/log.hpp  |  11 +-
 .../{ => mult}/multiply.cpp                   |   4 +-
 .../{ => mult}/multiply.hpp                   |  11 +-
 .../{ => neg_exp}/negative_exponential.cpp    |   4 +-
 .../{ => neg_exp}/negative_exponential.hpp    |  11 +-
 .../allowed_operator_nodes/{ => sin}/sin.cpp  |   4 +-
 .../allowed_operator_nodes/{ => sin}/sin.hpp  |  11 +-
 .../{ => sp}/sixth_power.cpp                  |   4 +-
 .../{ => sp}/sixth_power.hpp                  |  11 +-
 .../{ => sq}/square.cpp                       |   4 +-
 .../{ => sq}/square.hpp                       |  11 +-
 .../{ => sqrt}/square_root.cpp                |   4 +-
 .../{ => sqrt}/square_root.hpp                |  11 +-
 .../{ => sub}/subtract.cpp                    |   4 +-
 .../{ => sub}/subtract.hpp                    |  11 +-
 .../node/operator_nodes/allowed_ops.hpp       |  37 +--
 src/feature_creation/node/utils.cpp           |   2 +-
 src/feature_creation/units/Unit.cpp           |  27 +++
 src/feature_creation/units/Unit.hpp           |   8 +-
 src/inputs/InputParser.cpp                    |   6 +-
 src/inputs/InputParser.hpp                    |   1 +
 src/main.cpp                                  |  16 +-
 src/python/__init__.py                        |   5 +
 src/python/bindings_docstring_keyed.cpp       |  35 ++-
 src/python/bindings_docstring_keyed.hpp       |   5 +-
 .../descriptor_identifier/SISSOClassifier.cpp |   6 +-
 .../SISSOLogRegressor.cpp                     |   8 +-
 .../descriptor_identifier/SISSORegressor.cpp  |   6 +-
 src/python/descriptor_identifier/SISSO_DI.cpp |   4 +
 src/python/feature_creation/FeatureSpace.cpp  |  31 ++-
 src/python/feature_creation/node_utils.cpp    |   4 +-
 src/python/postprocess/__init__.py            | 229 ++++++++++++++----
 .../postprocess/check_cv_convergence.py       |  27 +--
 src/python/postprocess/plotting/config.py     |   2 +-
 src/python/postprocess/plotting/config.toml   |  12 +-
 src/python/postprocess/utils.py               |  56 ++++-
 src/utils/string_utils.cpp                    |  20 ++
 src/utils/string_utils.hpp                    |   4 +-
 tests/exec_test/check_model.py                |   2 +-
 tests/exec_test/classification/check_model.py |  10 +
 tests/exec_test/classification/data.csv       | 101 ++++++++
 tests/exec_test/classification/sisso.json     |  14 ++
 tests/{ => exec_test/default}/data.csv        |   2 +-
 tests/exec_test/default/sisso.json            |  14 ++
 tests/exec_test/gen_proj/sisso.json           |  15 ++
 tests/exec_test/log_reg/data.csv              | 101 ++++++++
 tests/exec_test/log_reg/sisso.json            |  14 ++
 tests/exec_test/max_corr/sisso.json           |  15 ++
 tests/pytest/data.csv                         | 101 ++++++++
 .../test_classification.py                    |  12 +-
 .../model_files/test.dat                      |  22 ++
 .../model_files/train.dat                     | 111 +++++++++
 .../test_log_regressor.py                     |  60 +++++
 .../test_descriptor_identifier/test_model.py  |   6 +-
 .../test_regressor.py                         |   1 +
 .../test_abs_diff_node.py                     |   0
 .../test_feat_generation/test_abs_node.py     |   0
 .../test_feat_generation/test_add_node.py     |   0
 .../test_feat_generation/test_cb_node.py      |   0
 .../test_feat_generation/test_cbrt_node.py    |   0
 .../test_feat_generation/test_cos_node.py     |   0
 .../test_feat_generation/test_div_node.py     |   0
 .../test_feat_generation/test_exp_node.py     |   0
 .../test_feat_generation/test_inv_node.py     |   0
 .../test_feat_generation/test_log_node.py     |   0
 .../test_feat_generation/test_mult_node.py    |   0
 .../test_feat_generation/test_neg_exp_node.py |   0
 .../test_feat_generation/test_sin_node.py     |   0
 .../test_feat_generation/test_six_pow_node.py |   0
 .../test_feat_generation/test_sq_node.py      |   0
 .../test_feat_generation/test_sqrt_node.py    |   0
 .../test_feat_generation/test_sub_node.py     |   0
 .../test_feature_space/test_feature_space.py  |   0
 .../test_feature_space/test_units.py          |   0
 tests/{ => pytest}/test_sisso.py              |   0
 tests/sisso.json                              |  15 --
 .../model_files/test.dat                      |  36 ---
 .../model_files/train.dat                     |  95 --------
 123 files changed, 1448 insertions(+), 424 deletions(-)
 rename src/feature_creation/node/operator_nodes/allowed_operator_nodes/{ => abs}/absolute_value.cpp (98%)
 rename src/feature_creation/node/operator_nodes/allowed_operator_nodes/{ => abs}/absolute_value.hpp (93%)
 rename src/feature_creation/node/operator_nodes/allowed_operator_nodes/{ => abs_diff}/absolute_difference.cpp (99%)
 rename src/feature_creation/node/operator_nodes/allowed_operator_nodes/{ => abs_diff}/absolute_difference.hpp (93%)
 rename src/feature_creation/node/operator_nodes/allowed_operator_nodes/{ => add}/add.cpp (99%)
 rename src/feature_creation/node/operator_nodes/allowed_operator_nodes/{ => add}/add.hpp (93%)
 rename src/feature_creation/node/operator_nodes/allowed_operator_nodes/{ => cb}/cube.cpp (99%)
 rename src/feature_creation/node/operator_nodes/allowed_operator_nodes/{ => cb}/cube.hpp (93%)
 rename src/feature_creation/node/operator_nodes/allowed_operator_nodes/{ => cbrt}/cube_root.cpp (98%)
 rename src/feature_creation/node/operator_nodes/allowed_operator_nodes/{ => cbrt}/cube_root.hpp (93%)
 rename src/feature_creation/node/operator_nodes/allowed_operator_nodes/{ => cos}/cos.cpp (99%)
 rename src/feature_creation/node/operator_nodes/allowed_operator_nodes/{ => cos}/cos.hpp (93%)
 rename src/feature_creation/node/operator_nodes/allowed_operator_nodes/{ => div}/divide.cpp (99%)
 rename src/feature_creation/node/operator_nodes/allowed_operator_nodes/{ => div}/divide.hpp (93%)
 rename src/feature_creation/node/operator_nodes/allowed_operator_nodes/{ => exp}/exponential.cpp (99%)
 rename src/feature_creation/node/operator_nodes/allowed_operator_nodes/{ => exp}/exponential.hpp (93%)
 rename src/feature_creation/node/operator_nodes/allowed_operator_nodes/{ => inv}/inverse.cpp (99%)
 rename src/feature_creation/node/operator_nodes/allowed_operator_nodes/{ => inv}/inverse.hpp (92%)
 rename src/feature_creation/node/operator_nodes/allowed_operator_nodes/{ => log}/log.cpp (99%)
 rename src/feature_creation/node/operator_nodes/allowed_operator_nodes/{ => log}/log.hpp (93%)
 rename src/feature_creation/node/operator_nodes/allowed_operator_nodes/{ => mult}/multiply.cpp (99%)
 rename src/feature_creation/node/operator_nodes/allowed_operator_nodes/{ => mult}/multiply.hpp (92%)
 rename src/feature_creation/node/operator_nodes/allowed_operator_nodes/{ => neg_exp}/negative_exponential.cpp (98%)
 rename src/feature_creation/node/operator_nodes/allowed_operator_nodes/{ => neg_exp}/negative_exponential.hpp (93%)
 rename src/feature_creation/node/operator_nodes/allowed_operator_nodes/{ => sin}/sin.cpp (99%)
 rename src/feature_creation/node/operator_nodes/allowed_operator_nodes/{ => sin}/sin.hpp (93%)
 rename src/feature_creation/node/operator_nodes/allowed_operator_nodes/{ => sp}/sixth_power.cpp (99%)
 rename src/feature_creation/node/operator_nodes/allowed_operator_nodes/{ => sp}/sixth_power.hpp (93%)
 rename src/feature_creation/node/operator_nodes/allowed_operator_nodes/{ => sq}/square.cpp (99%)
 rename src/feature_creation/node/operator_nodes/allowed_operator_nodes/{ => sq}/square.hpp (93%)
 rename src/feature_creation/node/operator_nodes/allowed_operator_nodes/{ => sqrt}/square_root.cpp (98%)
 rename src/feature_creation/node/operator_nodes/allowed_operator_nodes/{ => sqrt}/square_root.hpp (93%)
 rename src/feature_creation/node/operator_nodes/allowed_operator_nodes/{ => sub}/subtract.cpp (99%)
 rename src/feature_creation/node/operator_nodes/allowed_operator_nodes/{ => sub}/subtract.hpp (93%)
 create mode 100644 tests/exec_test/classification/check_model.py
 create mode 100644 tests/exec_test/classification/data.csv
 create mode 100644 tests/exec_test/classification/sisso.json
 rename tests/{ => exec_test/default}/data.csv (99%)
 create mode 100644 tests/exec_test/default/sisso.json
 create mode 100644 tests/exec_test/gen_proj/sisso.json
 create mode 100644 tests/exec_test/log_reg/data.csv
 create mode 100644 tests/exec_test/log_reg/sisso.json
 create mode 100644 tests/exec_test/max_corr/sisso.json
 create mode 100644 tests/pytest/data.csv
 rename tests/{ => pytest}/test_classification/test_classification.py (93%)
 create mode 100644 tests/pytest/test_descriptor_identifier/model_files/test.dat
 create mode 100644 tests/pytest/test_descriptor_identifier/model_files/train.dat
 create mode 100644 tests/pytest/test_descriptor_identifier/test_log_regressor.py
 rename tests/{ => pytest}/test_descriptor_identifier/test_model.py (88%)
 rename tests/{ => pytest}/test_descriptor_identifier/test_regressor.py (98%)
 rename tests/{ => pytest}/test_feature_creation/test_feat_generation/test_abs_diff_node.py (100%)
 rename tests/{ => pytest}/test_feature_creation/test_feat_generation/test_abs_node.py (100%)
 rename tests/{ => pytest}/test_feature_creation/test_feat_generation/test_add_node.py (100%)
 rename tests/{ => pytest}/test_feature_creation/test_feat_generation/test_cb_node.py (100%)
 rename tests/{ => pytest}/test_feature_creation/test_feat_generation/test_cbrt_node.py (100%)
 rename tests/{ => pytest}/test_feature_creation/test_feat_generation/test_cos_node.py (100%)
 rename tests/{ => pytest}/test_feature_creation/test_feat_generation/test_div_node.py (100%)
 rename tests/{ => pytest}/test_feature_creation/test_feat_generation/test_exp_node.py (100%)
 rename tests/{ => pytest}/test_feature_creation/test_feat_generation/test_inv_node.py (100%)
 rename tests/{ => pytest}/test_feature_creation/test_feat_generation/test_log_node.py (100%)
 rename tests/{ => pytest}/test_feature_creation/test_feat_generation/test_mult_node.py (100%)
 rename tests/{ => pytest}/test_feature_creation/test_feat_generation/test_neg_exp_node.py (100%)
 rename tests/{ => pytest}/test_feature_creation/test_feat_generation/test_sin_node.py (100%)
 rename tests/{ => pytest}/test_feature_creation/test_feat_generation/test_six_pow_node.py (100%)
 rename tests/{ => pytest}/test_feature_creation/test_feat_generation/test_sq_node.py (100%)
 rename tests/{ => pytest}/test_feature_creation/test_feat_generation/test_sqrt_node.py (100%)
 rename tests/{ => pytest}/test_feature_creation/test_feat_generation/test_sub_node.py (100%)
 rename tests/{ => pytest}/test_feature_creation/test_feature_space/test_feature_space.py (100%)
 rename tests/{ => pytest}/test_feature_creation/test_feature_space/test_units.py (100%)
 rename tests/{ => pytest}/test_sisso.py (100%)
 delete mode 100644 tests/sisso.json
 delete mode 100644 tests/test_descriptor_identifier/model_files/test.dat
 delete mode 100644 tests/test_descriptor_identifier/model_files/train.dat

diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 8489ab6d..a55b6a3d 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -8,15 +8,15 @@ stages:
 build-intel:
   stage: build
   script:
-    - python -m venv sissopp_env
-    - source sissopp_env/bin/activate
+    - python -m venv cpp_sisso_env
+    - source cpp_sisso_env/bin/activate
     - mkdir build_intel/
     - cd build_intel/
     - export I_MPI_ROOT=/home/runner/intel/oneapi/mpi/2021.1-beta10/
     - export INTEL_COMP_ROOT=/home/runner/intel/oneapi/compiler/2021.1-beta10/linux/
     - export MKLROOT=/home/runner/intel/oneapi/mkl/2021.1-beta10/
     - export LD_LIBRARY_PATH=$I_MPI_ROOT/lib/:$I_MPI_ROOT/lib/release:$MKLROOT/lib/intel64:$INTEL_COMP_ROOT/lib/:$INTEL_COMP_ROOT/compiler/lib/intel64/:$LD_LIBRARY_PATH:$HOME/intel/oneapi/intelpython/latest/lib/:$HOME/intel/oneapi/intelpython/latest/lib/python3.7
-    - export PYTHONPATH=$HOME/intel/oneapi/intelpython/latest/lib/python3.7/site-packages/:sissopp_env/lib/python3.7/site-packages/
+    - export PYTHONPATH=$HOME/intel/oneapi/intelpython/latest/lib/python3.7/site-packages/:cpp_sisso_env/lib/python3.7/site-packages/
     - export PATH=$INTEL_COMP_ROOT/bin/:$INTEL_COMP_ROOT/bin/intel64:$I_MPI_ROOT/bin:$PATH
     - cmake -C ../cmake/toolchains/intel_py.cmake ../
     - make
@@ -28,7 +28,7 @@ build-intel:
       - bin/sisso++
       - lib/boost/*
       - lib/coin-or/*
-      - sissopp_env/*
+      - cpp_sisso_env/*
     expire_in: 1 days
 
 test-intel-py:
@@ -36,12 +36,12 @@ test-intel-py:
   dependencies:
     - build-intel
   script:
-    - source sissopp_env/bin/activate
+    - source cpp_sisso_env/bin/activate
     - export I_MPI_ROOT=/home/runner/intel/oneapi/mpi/2021.1-beta10/
     - export INTEL_COMP_ROOT=/home/runner/intel/oneapi/compiler/2021.1-beta10/linux/
     - export MKLROOT=/home/runner/intel/oneapi/mkl/2021.1-beta10/
     - export LD_LIBRARY_PATH=$I_MPI_ROOT/lib/:$I_MPI_ROOT/lib/release:$MKLROOT/lib/intel64:$INTEL_COMP_ROOT/lib/:$INTEL_COMP_ROOT/compiler/lib/intel64/:$LD_LIBRARY_PATH:$HOME/intel/oneapi/intelpython/latest/lib/:$HOME/intel/oneapi/intelpython/latest/lib/python3.7
-    - export PYTHONPATH=$HOME/intel/oneapi/intelpython/latest/lib/python3.7/site-packages/:sissopp_env/lib/python3.7/site-packages/
+    - export PYTHONPATH=$HOME/intel/oneapi/intelpython/latest/lib/python3.7/site-packages/:cpp_sisso_env/lib/python3.7/site-packages/
     - export PATH=$INTEL_COMP_ROOT/bin/:$INTEL_COMP_ROOT/bin/intel64:$I_MPI_ROOT/bin:$PATH
     - pytest tests
 
@@ -50,12 +50,12 @@ test-intel-bin:
   dependencies:
     - build-intel
   script:
-    - source sissopp_env/bin/activate
+    - source cpp_sisso_env/bin/activate
     - export I_MPI_ROOT=/home/runner/intel/oneapi/mpi/2021.1-beta10/
     - export INTEL_COMP_ROOT=/home/runner/intel/oneapi/compiler/2021.1-beta10/linux/
     - export MKLROOT=/home/runner/intel/oneapi/mkl/2021.1-beta10/
     - export LD_LIBRARY_PATH=$I_MPI_ROOT/lib/:$I_MPI_ROOT/lib/release:$MKLROOT/lib/intel64:$INTEL_COMP_ROOT/lib/:$INTEL_COMP_ROOT/compiler/lib/intel64/:$LD_LIBRARY_PATH:$HOME/intel/oneapi/intelpython/latest/lib/:$HOME/intel/oneapi/intelpython/latest/lib/python3.7
-    - export PYTHONPATH=$HOME/intel/oneapi/intelpython/latest/lib/python3.7/site-packages/:sissopp_env/lib/python3.7/site-packages/
+    - export PYTHONPATH=$HOME/intel/oneapi/intelpython/latest/lib/python3.7/site-packages/:cpp_sisso_env/lib/python3.7/site-packages/
     - export PATH=$INTEL_COMP_ROOT/bin/:$INTEL_COMP_ROOT/bin/intel64:$I_MPI_ROOT/bin:$PATH
     - cd tests/exec_test/
     - mpiexec -n 2 ../../bin/sisso++
@@ -65,10 +65,10 @@ build-gnu:
   stage: build
   script:
     - conda info --envs
-    - python -m venv sissopp_env
-    - source sissopp_env/bin/activate
+    - python -m venv cpp_sisso_env
+    - source cpp_sisso_env/bin/activate
     - export LD_LIBRARY_PATH=$HOME/intel/oneapi/intelpython/latest/lib/:$HOME/intel/oneapi/intelpython/latest/lib/python3.7:$LD_LIBRARY_PATH
-    - export PYTHONPATH=$HOME/intel/oneapi/intelpython/latest/lib/python3.7/site-packages/:sissopp_env/lib/python3.7/site-packages/
+    - export PYTHONPATH=$HOME/intel/oneapi/intelpython/latest/lib/python3.7/site-packages/:cpp_sisso_env/lib/python3.7/site-packages/
     - mkdir build_gcc/
     - cd build_gcc/
     - cmake -C ../cmake/toolchains/gnu_py.cmake ../
@@ -81,16 +81,16 @@ build-gnu:
       - bin/sisso++
       - lib/boost/*
       - lib/coin-or/*
-      - sissopp_env/*
+      - cpp_sisso_env/*
 
 test-gnu-py:
   stage: unit_test
   dependencies:
     - build-gnu
   script:
-    - source sissopp_env/bin/activate
+    - source cpp_sisso_env/bin/activate
     - export LD_LIBRARY_PATH=$HOME/intel/oneapi/intelpython/latest/lib/:$HOME/intel/oneapi/intelpython/latest/lib/python3.7:$LD_LIBRARY_PATH
-    - export PYTHONPATH=$HOME/intel/oneapi/intelpython/latest/lib/python3.7/site-packages/:sissopp_env/lib/python3.7/site-packages/
+    - export PYTHONPATH=$HOME/intel/oneapi/intelpython/latest/lib/python3.7/site-packages/:cpp_sisso_env/lib/python3.7/site-packages/
     - pytest tests
 
 test-gnu-bin:
@@ -98,9 +98,9 @@ test-gnu-bin:
   dependencies:
     - build-gnu
   script:
-    - source sissopp_env/bin/activate
+    - source cpp_sisso_env/bin/activate
     - export LD_LIBRARY_PATH=$HOME/intel/oneapi/intelpython/latest/lib/:$HOME/intel/oneapi/intelpython/latest/lib/python3.7:$LD_LIBRARY_PATH
-    - export PYTHONPATH=$HOME/intel/oneapi/intelpython/latest/lib/python3.7/site-packages/:sissopp_env/lib/python3.7/site-packages/
+    - export PYTHONPATH=$HOME/intel/oneapi/intelpython/latest/lib/python3.7/site-packages/:cpp_sisso_env/lib/python3.7/site-packages/
     - cd tests/exec_test/
     - mpiexec -n 2 ../../bin/sisso++
     - python check_model.py
diff --git a/CMakeLists.txt b/CMakeLists.txt
index 41378b6b..3bbc9ebe 100644
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
@@ -230,8 +230,8 @@ else(EXTERNAL_BOOST)
         BUILD_IN_SOURCE 1
         CONFIGURE_COMMAND ${Boost_CONFIGURE_COMMAND}
         BUILD_COMMAND
-	./b2 -j ${BOOST_BUILD_N_PROCS}
-	INSTALL_COMMAND ./b2 -j ${BOOST_BUILD_N_PROCS} install
+    	./b2 -j ${BOOST_BUILD_N_PROCS}
+    	INSTALL_COMMAND ./b2 -j ${BOOST_BUILD_N_PROCS} install
         INSTALL_DIR ${Boost_INSTALL_DIR}
     )
 
@@ -327,9 +327,6 @@ ExternalProject_Add(
     external_Clp
     PREFIX "external/coin-Clp"
     URL ${COIN_CLP_URL}
-    # SVN_REPOSITORY "https://projects.coin-or.org/svn/Clp/stable/1.17"
-    # BUILD_IN_SOURCE 1
-    # CONFIGURE_COMMAND "${CMAKE_CURRENT_BINARY_DIR}/external/coin-Clp/src/external_Clp/configure"
     CONFIGURE_COMMAND "${COIN_CLP_CONFIGURE_COMMAND}"
     BUILD_COMMAND make -j ${BOOST_BUILD_N_PROCS}
     INSTALL_COMMAND make install
diff --git a/src/CMakeLists.txt b/src/CMakeLists.txt
index b1014794..e1586e48 100644
--- a/src/CMakeLists.txt
+++ b/src/CMakeLists.txt
@@ -15,14 +15,27 @@ include_directories(${CMAKE_CURRENT_LIST_DIR}/utils/)
 set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wno-deprecated-declarations")
 set(CMAKE_INSTALL_RPATH ${Boost_LIBRARY_DIRS};${LAPACK_DIR};${MPI_DIR};${COIN_CLP_LIBRARY_DIRS};${COIN_UTILS_LIBRARY_DIRS};${CMAKE_CURRENT_LIST_DIR}/../lib/;${CMAKE_CURRENT_LIST_DIR}/../lib/coin-or)
 
-# set(INSTALL_RPATH ${Boost_LIB_DIR})
 file(GLOB_RECURSE SISSOPP_SOURCES *.cpp)
 file(GLOB_RECURSE NOT_SISSOPP_SOURCES python/*.cpp)
 list(REMOVE_ITEM SISSOPP_SOURCES ${NOT_SISSOPP_SOURCES})
+list(REMOVE_ITEM SISSOPP_SOURCES ${CMAKE_CURRENT_LIST_DIR}/main.cpp)
 
 configure_file(${CMAKE_CURRENT_SOURCE_DIR}/sisso++_config.hpp.in ${CMAKE_CURRENT_BINARY_DIR}/sisso++_config.hpp)
 
-add_executable(sisso++  ${SISSOPP_SOURCES})
+add_library(libsisso SHARED ${SISSOPP_SOURCES})
+set_target_properties(libsisso
+    PROPERTIES
+    ARCHIVE_OUTPUT_DIRECTORY "${CMAKE_BINARY_DIR}/lib"
+    LIBRARY_OUTPUT_DIRECTORY "${CMAKE_BINARY_DIR}/lib"
+    RUNTIME_OUTPUT_DIRECTORY "${CMAKE_BINARY_DIR}/bin"
+    PREFIX ""
+    SUFFIX ".so"
+)
+target_link_libraries(libsisso ${LAPACK_LIBRARIES} ${MPI_LIBRARIES} -Wl,--rpath=${Boost_LIB_DIR} -Wl,--rpath=${LAPACK_DIR} ${Boost_LIBRARIES} ${COIN_CLP_LIBRARIES} ${OPENMP_LIBRARIES})
+install(TARGETS libsisso DESTINATION ${CMAKE_CURRENT_LIST_DIR}/../lib/)
+set(CMAKE_INSTALL_RPATH ${Boost_LIBRARY_DIRS};${LAPACK_DIR};${MPI_DIR};${COIN_CLP_LIBRARY_DIRS};${COIN_UTILS_LIBRARY_DIRS};${CMAKE_CURRENT_LIST_DIR}/../lib/;${CMAKE_CURRENT_LIST_DIR}/../lib/coin-or)
+
+add_executable(sisso++ ${CMAKE_CURRENT_LIST_DIR}/main.cpp)
 
 set_target_properties(sisso++
     PROPERTIES
@@ -31,23 +44,21 @@ set_target_properties(sisso++
     RUNTIME_OUTPUT_DIRECTORY "${CMAKE_BINARY_DIR}/bin"
 )
 
-target_link_libraries(sisso++  ${LAPACK_LIBRARIES} ${MPI_LIBRARIES} -Wl,--rpath=${Boost_LIB_DIR} -Wl,--rpath=${LAPACK_DIR} ${Boost_LIBRARIES} ${COIN_CLP_LIBRARIES} ${OPENMP_LIBRARIES})
+target_link_libraries(sisso++ libsisso ${LAPACK_LIBRARIES} ${MPI_LIBRARIES} -Wl,--rpath=${Boost_LIB_DIR} -Wl,--rpath=${LAPACK_DIR} ${Boost_LIBRARIES} ${COIN_CLP_LIBRARIES} ${OPENMP_LIBRARIES})
 install(TARGETS sisso++ DESTINATION ${CMAKE_CURRENT_LIST_DIR}/../bin/)
 
 if(USE_PYTHON)
     include(${CMAKE_CURRENT_LIST_DIR}/../cmake/TransferDocStrings.cmake)
-    set(CMAKE_INSTALL_RPATH "${Boost_LIBRARY_DIRS};${PYTHON_PREFIX}/lib/;${MPI_DIR};${COIN_CLP_LIBRARY_DIRS};${COIN_UTILS_LIBRARY_DIRS};${CMAKE_CURRENT_LIST_DIR}/../lib/;;${CMAKE_CURRENT_LIST_DIR}/../lib/coin-or")
+    set(CMAKE_INSTALL_RPATH ${PYTHON_PREFIX}/lib/;${Boost_LIBRARY_DIRS};${LAPACK_DIR};${MPI_DIR};${COIN_CLP_LIBRARY_DIRS};${COIN_UTILS_LIBRARY_DIRS};${CMAKE_CURRENT_LIST_DIR}/../lib/;${PYTHON_INSTDIR}/cpp_sisso/)
 
     transfer_doc_string(${CMAKE_CURRENT_LIST_DIR}/python/bindings_docstring_keyed.cpp ${CMAKE_CURRENT_LIST_DIR}/python/bindings.cpp)
     transfer_doc_string(${CMAKE_CURRENT_LIST_DIR}/python/bindings_docstring_keyed.hpp ${CMAKE_CURRENT_LIST_DIR}/python/bindings.hpp)
 
-    file(GLOB_RECURSE SISSOLIB_SOURCES *.cpp)
-    list(REMOVE_ITEM SISSOLIB_SOURCES ${CMAKE_CURRENT_LIST_DIR}/main.cpp)
+    file(GLOB_RECURSE SISSOLIB_SOURCES ${CMAKE_CURRENT_LIST_DIR}/python/*cpp)
     list(REMOVE_ITEM SISSOLIB_SOURCES ${CMAKE_CURRENT_LIST_DIR}/python/bindings_docstring_keyed.cpp)
 
     # set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -DPY_BINDINGS")
     add_library(_sisso SHARED ${SISSOLIB_SOURCES})
-
     configure_file(${CMAKE_CURRENT_SOURCE_DIR}/python/__init__.py ${CMAKE_CURRENT_LIST_DIR}/python/__init__.py COPYONLY)
 
     set_target_properties(_sisso
@@ -59,8 +70,10 @@ if(USE_PYTHON)
         PREFIX ""
         SUFFIX ".so"
     )
-    target_link_libraries(_sisso ${MPI_LIBRARIES} -Wl,--rpath=${PYTHON_PREFIX}/lib/ ${LAPACK_LIBRARIES} ${PYTHON_LIBRARIES}  -Wl,--rpath=${Boost_LIB_DIR} ${Boost_LIBRARIES} ${Boost_PYTHON_LIBRARIES} ${COIN_CLP_LIBRARIES} ${OPENMP_LIBRARIES})
+    target_link_libraries(_sisso libsisso ${MPI_LIBRARIES} -Wl,--rpath=${PYTHON_PREFIX}/lib/ ${LAPACK_LIBRARIES} ${PYTHON_LIBRARIES}  -Wl,--rpath=${Boost_LIB_DIR} ${Boost_LIBRARIES} ${Boost_PYTHON_LIBRARIES} ${COIN_CLP_LIBRARIES} ${OPENMP_LIBRARIES})
+
     install(TARGETS _sisso DESTINATION "${PYTHON_INSTDIR}/cpp_sisso")
+    install(TARGETS libsisso DESTINATION "${PYTHON_INSTDIR}/cpp_sisso/")
     install(
         DIRECTORY ${CMAKE_CURRENT_LIST_DIR}/python/ DESTINATION ${PYTHON_INSTDIR}/cpp_sisso
         FILES_MATCHING PATTERN "*.py"
diff --git a/src/descriptor_identifier/Model/Model.cpp b/src/descriptor_identifier/Model/Model.cpp
index 50f0316f..641fa01e 100644
--- a/src/descriptor_identifier/Model/Model.cpp
+++ b/src/descriptor_identifier/Model/Model.cpp
@@ -1,6 +1,6 @@
 #include <descriptor_identifier/Model/Model.hpp>
 
-Model::Model(Unit prop_unit, std::vector<double> prop_train, std::vector<double> prop_test, std::vector<model_node_ptr> feats, std::vector<int> task_sizes_train, std::vector<int> task_sizes_test, bool fix_intercept) :
+Model::Model(std::string prop_label, Unit prop_unit, std::vector<double> prop_train, std::vector<double> prop_test, std::vector<model_node_ptr> feats, std::vector<int> task_sizes_train, std::vector<int> task_sizes_test, bool fix_intercept) :
     _n_samp_train(feats[0]->n_samp()),
     _n_samp_test(feats[0]->n_test_samp()),
     _n_dim(feats.size()),
@@ -11,6 +11,7 @@ Model::Model(Unit prop_unit, std::vector<double> prop_train, std::vector<double>
     _D_test(_n_samp_test * (feats.size() + (1 - fix_intercept))),
     _task_sizes_train(task_sizes_train),
     _task_sizes_test(task_sizes_test),
+    _prop_label(prop_label),
     _prop_unit(prop_unit),
     _fix_intercept(fix_intercept)
 {}
diff --git a/src/descriptor_identifier/Model/Model.hpp b/src/descriptor_identifier/Model/Model.hpp
index 3f068691..6dfca9d3 100644
--- a/src/descriptor_identifier/Model/Model.hpp
+++ b/src/descriptor_identifier/Model/Model.hpp
@@ -51,6 +51,7 @@ protected:
     std::vector<int> _task_sizes_train; //!< Number of training samples in each task
     std::vector<int> _task_sizes_test; //!< Number of testing samples in each task
 
+    std::string _prop_label; //!< label for the model
     Unit _prop_unit; //!< The Unit for the property
 
     bool _fix_intercept; //!< If true fix intercept to 0
@@ -64,6 +65,7 @@ public:
     /**
      * @brief Constructor for the model
      *
+     * @param prop_label The unit of the property
      * @param prop_unit The unit of the property
      * @param prop_train The property vector for the training samples
      * @param prop_test The property vector for the test samples
@@ -71,7 +73,7 @@ public:
      * @param task_sizes_train Number of samples per task in the training data
      * @param task_sizes_test Number of samples per task in the test data
      */
-    Model(Unit prop_unit, std::vector<double> prop_train, std::vector<double> prop_test, std::vector<model_node_ptr> feats, std::vector<int> task_sizes_train, std::vector<int> task_sizes_test, bool fix_intercept);
+    Model(std::string prop_label, Unit prop_unit, std::vector<double> prop_train, std::vector<double> prop_test, std::vector<model_node_ptr> feats, std::vector<int> task_sizes_train, std::vector<int> task_sizes_test, bool fix_intercept);
 
     /**
      * @brief Copy Constructor
@@ -126,6 +128,13 @@ public:
      */
     inline Unit prop_unit(){return _prop_unit;}
 
+    // DocString: model_prop_label
+    /**
+     * @brief The label for the property
+     * @return The label for the property
+     */
+    inline std::string prop_label(){return _prop_label;}
+
     /**
      * @brief Convert the Model into an output file
      *
@@ -196,4 +205,4 @@ public:
     #endif
 };
 
-#endif
\ No newline at end of file
+#endif
diff --git a/src/descriptor_identifier/Model/ModelClassifier.cpp b/src/descriptor_identifier/Model/ModelClassifier.cpp
index 365c6abf..1078d27c 100644
--- a/src/descriptor_identifier/Model/ModelClassifier.cpp
+++ b/src/descriptor_identifier/Model/ModelClassifier.cpp
@@ -1,7 +1,7 @@
 #include <descriptor_identifier/Model/ModelClassifier.hpp>
 
-ModelClassifier::ModelClassifier(Unit prop_unit, std::vector<double> prop_train, std::vector<double> prop_test, std::vector<model_node_ptr> feats, std::vector<int> task_sizes_train, std::vector<int> task_sizes_test, bool fix_intercept) :
-    Model(prop_unit, prop_train, prop_test, feats, task_sizes_train, task_sizes_test, fix_intercept),
+ModelClassifier::ModelClassifier(std::string prop_label, Unit prop_unit, std::vector<double> prop_train, std::vector<double> prop_test, std::vector<model_node_ptr> feats, std::vector<int> task_sizes_train, std::vector<int> task_sizes_test, bool fix_intercept) :
+    Model(prop_label, prop_unit, prop_train, prop_test, feats, task_sizes_train, task_sizes_test, fix_intercept),
     _prop_train_est(_prop_train),
     _prop_test_est(_prop_test),
     _train_error(_n_samp_train),
@@ -67,17 +67,18 @@ ModelClassifier::ModelClassifier(std::string train_file)
     _task_sizes_test = std::vector<int>(_task_sizes_train.size(), 0);
     for(int ff = 0; ff < feature_expr_train.size(); ++ff)
     {
-        split_str = str_utils::split_string_trim(feature_expr_train[ff]);
+        split_str = str_utils::split_string_trim(feature_expr_train[ff], ";");
         int rung = std::stoi(split_str[0]);
         std::string unit_str = split_str[1];
         std::string postfix_expr = split_str[2];
         std::string expr = split_str[3];
+        std::string latex_expr = split_str[4];
 
         std::vector<double> feat_val(_n_samp_train);
         std::vector<double> feat_test_val = {};
         std::copy_n(&_D_train[ff * _n_samp_train], _n_samp_train, feat_val.data());
 
-        model_node_ptr feat = std::make_shared<ModelNode>(ff, rung, expr, postfix_expr, feat_val, feat_test_val, Unit(unit_str));
+        model_node_ptr feat = std::make_shared<ModelNode>(ff, rung, expr, latex_expr, postfix_expr, feat_val, feat_test_val, Unit(unit_str));
         _feats.push_back(feat);
     }
 
@@ -100,12 +101,13 @@ ModelClassifier::ModelClassifier(std::string train_file, std::string test_file)
         if(feature_expr_train[ff] != feature_expr_test[ff])
             throw std::logic_error("Features for train and test file do not agree");
 
-        split_str = str_utils::split_string_trim(feature_expr_train[ff]);
+        split_str = str_utils::split_string_trim(feature_expr_train[ff], ";");
 
         int rung = std::stoi(split_str[0]);
         std::string unit_str = split_str[1];
         std::string postfix_expr = split_str[2];
         std::string expr = split_str[3];
+        std::string latex_expr = split_str[4];
 
         std::vector<double> feat_val(_n_samp_train);
         std::vector<double> feat_test_val(_n_samp_test);
@@ -113,7 +115,7 @@ ModelClassifier::ModelClassifier(std::string train_file, std::string test_file)
         std::copy_n(&_D_train[ff * _n_samp_train], _n_samp_train, feat_val.data());
         std::copy_n(&_D_test[ff * _n_samp_test], _n_samp_test, feat_test_val.data());
 
-        _feats.push_back(std::make_shared<ModelNode>(ff, rung, expr, postfix_expr, feat_val, feat_test_val, Unit(unit_str)));
+        _feats.push_back(std::make_shared<ModelNode>(ff, rung, expr, latex_expr, postfix_expr, feat_val, feat_test_val, Unit(unit_str)));
     }
 
     int file_train_n_convex_overlap = _train_n_convex_overlap;
@@ -147,11 +149,13 @@ std::vector<std::string> ModelClassifier::populate_model(std::string filename, b
     if(unit_line.substr(0, 42).compare("# # Samples in Convex Hull Overlap Region:") != 0)
     {
         split_line = str_utils::split_string_trim(unit_line);
+        _prop_label = split_line[1];
         _prop_unit = Unit(split_line.back());
         std::getline(file_stream, error_line);
     }
     else
     {
+        _prop_label = "Property";
         _prop_unit = Unit();
         error_line = unit_line;
     }
@@ -413,6 +417,16 @@ std::string ModelClassifier::toString() const
     return unit_rep.str();
 }
 
+std::string ModelClassifier::toLatexString() const
+{
+    std::stringstream unit_rep;
+    unit_rep << "[" << _feats[0]->latex_expr();
+    for(int ff = 1; ff < _feats.size(); ++ff)
+        unit_rep << ", " << _feats[ff]->latex_expr();
+    unit_rep << "]";
+    return unit_rep.str();
+}
+
 std::ostream& operator<< (std::ostream& outStream, const ModelClassifier& model)
 {
     outStream << model.toString();
@@ -428,7 +442,7 @@ void ModelClassifier::to_file(std::string filename, bool train, std::vector<int>
     out_file_stream.open(filename);
 
     out_file_stream << "# " << toString() << std::endl;
-    out_file_stream << "# Property of the Unit: " << _prop_unit.toString() << std::endl;
+    out_file_stream << "# Property Label: $" << str_utils::latexify(_prop_label) << "$; Unit of the Property: " << _prop_unit.toString() << std::endl;
     if(train)
         out_file_stream << "# # Samples in Convex Hull Overlap Region: " << _train_n_convex_overlap << "; # SVM Misclassified: " << std::setprecision(15) << n_svm_misclassified_train() << std::endl;
     else
@@ -455,7 +469,7 @@ void ModelClassifier::to_file(std::string filename, bool train, std::vector<int>
 
     out_file_stream << "# Feature Rung, Units, and Expressions" << std::endl;
     for(int ff = 0; ff < _feats.size(); ++ff)
-        out_file_stream << std::setw(6) << std::left << "# " + std::to_string(ff) + ", " << std::to_string(_feats[ff]->rung()) + ", " << std::setw(50) << _feats[ff]->unit().toString() + ", " << _feats[ff]->postfix_expr() + "," << _feats[ff]->expr() << std::endl;
+        out_file_stream << std::setw(6) << std::left << "# " + std::to_string(ff) + "; " << std::to_string(_feats[ff]->rung()) + "; " << std::setw(50) << _feats[ff]->unit().toString() + "; " << _feats[ff]->postfix_expr() + "; " << _feats[ff]->expr() + "; " << _feats[ff]->latex_expr() << std::endl;
 
     out_file_stream << "# Number of Samples Per Task" << std::endl;
     if(train)
diff --git a/src/descriptor_identifier/Model/ModelClassifier.hpp b/src/descriptor_identifier/Model/ModelClassifier.hpp
index 030a0f09..35840cb3 100644
--- a/src/descriptor_identifier/Model/ModelClassifier.hpp
+++ b/src/descriptor_identifier/Model/ModelClassifier.hpp
@@ -68,7 +68,7 @@ public:
      * @param task_sizes_train Number of samples per task in the training data
      * @param task_sizes_test Number of samples per task in the test data
      */
-    ModelClassifier(Unit prop_unit, std::vector<double> prop_train, std::vector<double> prop_test, std::vector<model_node_ptr> feats, std::vector<int> task_sizes_train, std::vector<int> task_sizes_test, bool fix_intercept);
+    ModelClassifier(std::string prop_label, Unit prop_unit, std::vector<double> prop_train, std::vector<double> prop_test, std::vector<model_node_ptr> feats, std::vector<int> task_sizes_train, std::vector<int> task_sizes_test, bool fix_intercept);
 
     // DocString: model_class_init_str
     /**
@@ -136,6 +136,14 @@ public:
      */
     std::string toString() const;
 
+    // DocString: model_class_latex_str
+    /**
+     * @brief Convert the model to a latexified string
+
+     * @return The string representation of the model
+     */
+    std::string toLatexString() const;
+
     /**
      *  @brief Copy the error into a new array
      *
@@ -256,4 +264,4 @@ public:
  */
 std::ostream& operator<< (std::ostream& outStream, const ModelClassifier& model);
 
-#endif
\ No newline at end of file
+#endif
diff --git a/src/descriptor_identifier/Model/ModelLogRegressor.cpp b/src/descriptor_identifier/Model/ModelLogRegressor.cpp
index 969edca5..7a86f6f2 100644
--- a/src/descriptor_identifier/Model/ModelLogRegressor.cpp
+++ b/src/descriptor_identifier/Model/ModelLogRegressor.cpp
@@ -1,7 +1,7 @@
 #include <descriptor_identifier/Model/ModelLogRegressor.hpp>
 
-ModelLogRegressor::ModelLogRegressor(Unit prop_unit, std::vector<double> prop_train, std::vector<double> prop_test, std::vector<model_node_ptr> feats, std::vector<int> task_sizes_train, std::vector<int> task_sizes_test, bool fix_intercept) :
-    ModelRegressor(prop_unit, prop_train, prop_test, feats, task_sizes_train, task_sizes_test, fix_intercept),
+ModelLogRegressor::ModelLogRegressor(std::string prop_label, Unit prop_unit, std::vector<double> prop_train, std::vector<double> prop_test, std::vector<model_node_ptr> feats, std::vector<int> task_sizes_train, std::vector<int> task_sizes_test, bool fix_intercept) :
+    ModelRegressor(prop_label, prop_unit, prop_train, prop_test, feats, task_sizes_train, task_sizes_test, fix_intercept),
     _log_prop_train(_n_samp_train, 0.0),
     _log_prop_test(_n_samp_test, 0.0),
     _log_prop_train_est(_n_samp_train, 0.0),
@@ -109,8 +109,18 @@ std::string ModelLogRegressor::toString() const
     return unit_rep.str();
 }
 
+std::string ModelLogRegressor::toLatexString() const
+{
+    std::stringstream unit_rep;
+    unit_rep << "$c_0";
+    for(int ff = 0; ff < _feats.size(); ++ff)
+        unit_rep << "\\left(" << _feats[ff]->get_latex_expr("") << "\\right)^{a_" << ff << "}";
+    unit_rep << "$";
+    return unit_rep.str();
+}
+
 std::ostream& operator<< (std::ostream& outStream, const ModelLogRegressor& model)
 {
     outStream << model.toString();
     return outStream;
-}
\ No newline at end of file
+}
diff --git a/src/descriptor_identifier/Model/ModelLogRegressor.hpp b/src/descriptor_identifier/Model/ModelLogRegressor.hpp
index 00cd3037..73be5d6b 100644
--- a/src/descriptor_identifier/Model/ModelLogRegressor.hpp
+++ b/src/descriptor_identifier/Model/ModelLogRegressor.hpp
@@ -46,7 +46,7 @@ public:
      * @param task_sizes_train Number of samples per task in the training data
      * @param task_sizes_test Number of samples per task in the test data
      */
-    ModelLogRegressor(Unit prop_unit, std::vector<double> prop_train, std::vector<double> prop_test, std::vector<model_node_ptr> feats, std::vector<int> task_sizes_train, std::vector<int> task_sizes_test, bool fix_intercept);
+    ModelLogRegressor(std::string prop_label, Unit prop_unit, std::vector<double> prop_train, std::vector<double> prop_test, std::vector<model_node_ptr> feats, std::vector<int> task_sizes_train, std::vector<int> task_sizes_test, bool fix_intercept);
 
     // DocString: model_log_reg_init_str
     /**
@@ -103,6 +103,14 @@ public:
      */
     std::string toString() const;
 
+    // DocString: model_log_reg_latex_str
+    /**
+     * @brief Convert the model to a latexified string
+
+     * @return The string representation of the model
+     */
+    std::string toLatexString() const;
+
     /**
      *  @brief Copy the error into a new array
      *
@@ -119,4 +127,4 @@ public:
  */
 std::ostream& operator<< (std::ostream& outStream, const ModelLogRegressor& model);
 
-#endif
\ No newline at end of file
+#endif
diff --git a/src/descriptor_identifier/Model/ModelRegressor.cpp b/src/descriptor_identifier/Model/ModelRegressor.cpp
index 3e5116db..5522cfc6 100644
--- a/src/descriptor_identifier/Model/ModelRegressor.cpp
+++ b/src/descriptor_identifier/Model/ModelRegressor.cpp
@@ -1,7 +1,7 @@
 #include <descriptor_identifier/Model/ModelRegressor.hpp>
 
-ModelRegressor::ModelRegressor(Unit prop_unit, std::vector<double> prop_train, std::vector<double> prop_test, std::vector<model_node_ptr> feats, std::vector<int> task_sizes_train, std::vector<int> task_sizes_test, bool fix_intercept) :
-    Model(prop_unit, prop_train, prop_test, feats, task_sizes_train, task_sizes_test, fix_intercept),
+ModelRegressor::ModelRegressor(std::string prop_label, Unit prop_unit, std::vector<double> prop_train, std::vector<double> prop_test, std::vector<model_node_ptr> feats, std::vector<int> task_sizes_train, std::vector<int> task_sizes_test, bool fix_intercept) :
+    Model(prop_label, prop_unit, prop_train, prop_test, feats, task_sizes_train, task_sizes_test, fix_intercept),
     _prop_train_est(_n_samp_train, 0.0),
     _prop_test_est(_n_samp_test, 0.0),
     _train_error(_n_samp_train),
@@ -66,17 +66,19 @@ ModelRegressor::ModelRegressor(std::string train_file)
 
     for(int ff = 0; ff < feature_expr_train.size(); ++ff)
     {
-        split_str = str_utils::split_string_trim(feature_expr_train[ff]);
+        split_str = str_utils::split_string_trim(feature_expr_train[ff], ";");
+
         int rung = std::stoi(split_str[0]);
         std::string unit_str = split_str[1];
         std::string postfix_expr = split_str[2];
         std::string expr = split_str[3];
+        std::string latex_expr = split_str[4];
 
         std::vector<double> feat_val(_n_samp_train);
         std::vector<double> feat_test_val = {};
         std::copy_n(&_D_train[ff * _n_samp_train], _n_samp_train, feat_val.data());
 
-        model_node_ptr feat = std::make_shared<ModelNode>(ff, rung, expr, postfix_expr, feat_val, feat_test_val, Unit(unit_str));
+        model_node_ptr feat = std::make_shared<ModelNode>(ff, rung, expr, latex_expr, postfix_expr, feat_val, feat_test_val, Unit(unit_str));
         _feats.push_back(feat);
     }
 
@@ -93,12 +95,13 @@ ModelRegressor::ModelRegressor(std::string train_file, std::string test_file)
         if(feature_expr_train[ff] != feature_expr_test[ff])
             throw std::logic_error("Features for train and test file do not agree");
 
-        split_str = str_utils::split_string_trim(feature_expr_train[ff]);
+        split_str = str_utils::split_string_trim(feature_expr_train[ff], ";");
 
         int rung = std::stoi(split_str[0]);
         std::string unit_str = split_str[1];
         std::string postfix_expr = split_str[2];
         std::string expr = split_str[3];
+        std::string latex_expr = split_str[4];
 
         std::vector<double> feat_val(_n_samp_train);
         std::vector<double> feat_test_val(_n_samp_test);
@@ -106,7 +109,7 @@ ModelRegressor::ModelRegressor(std::string train_file, std::string test_file)
         std::copy_n(&_D_train[ff * _n_samp_train], _n_samp_train, feat_val.data());
         std::copy_n(&_D_test[ff * _n_samp_test], _n_samp_test, feat_test_val.data());
 
-        _feats.push_back(std::make_shared<ModelNode>(ff, rung, expr, postfix_expr, feat_val, feat_test_val, Unit(unit_str)));
+        _feats.push_back(std::make_shared<ModelNode>(ff, rung, expr, latex_expr, postfix_expr, feat_val, feat_test_val, Unit(unit_str)));
     }
 }
 
@@ -130,11 +133,13 @@ std::vector<std::string> ModelRegressor::populate_model(std::string filename, bo
     if(unit_line.substr(0, 8).compare("# RMSE: ") != 0)
     {
         split_line = str_utils::split_string_trim(unit_line);
+        _prop_label = split_line[1];
         _prop_unit = Unit(split_line.back());
         std::getline(file_stream, error_line);
     }
     else
     {
+        _prop_label = "Property";
         _prop_unit = Unit();
         error_line = unit_line;
     }
@@ -170,7 +175,7 @@ std::vector<std::string> ModelRegressor::populate_model(std::string filename, bo
     else
         _fix_intercept = false;
 
-    _n_dim = n_dim + 1 - _fix_intercept;
+    _n_dim = n_dim;
 
     std::getline(file_stream, line);
     for(int ff = 0; ff < n_dim; ++ff)
@@ -263,6 +268,16 @@ std::string ModelRegressor::toString() const
     return unit_rep.str();
 }
 
+std::string ModelRegressor::toLatexString() const
+{
+    std::stringstream unit_rep;
+    unit_rep << "$c_0";
+    for(int ff = 0; ff < _feats.size(); ++ff)
+        unit_rep << " + a_" << std::to_string(ff) << _feats[ff]->get_latex_expr("");
+    unit_rep << "$";
+    return unit_rep.str();
+}
+
 std::ostream& operator<< (std::ostream& outStream, const ModelRegressor& model)
 {
     outStream << model.toString();
@@ -278,7 +293,7 @@ void ModelRegressor::to_file(std::string filename, bool train, std::vector<int>
     out_file_stream.open(filename);
 
     out_file_stream << "# " << toString() << std::endl;
-    out_file_stream << "# Property of the Unit: " << _prop_unit.toString() << std::endl;
+    out_file_stream << "# Property Label: $" << str_utils::latexify(_prop_label) << "$; Unit of the Property: " << _prop_unit.toString() << std::endl;
     if(train)
         out_file_stream << "# RMSE: " << std::setprecision(15) << rmse() << "; Max AE: " << max_ae() << std::endl;
     else
@@ -305,7 +320,7 @@ void ModelRegressor::to_file(std::string filename, bool train, std::vector<int>
 
     out_file_stream << "# Feature Rung, Units, and Expressions" << std::endl;
     for(int ff = 0; ff < _feats.size(); ++ff)
-        out_file_stream << std::setw(6) << std::left << "# " + std::to_string(ff) + ", " << std::to_string(_feats[ff]->rung()) + ", " << std::setw(50) << _feats[ff]->unit().toString() + ", " << _feats[ff]->postfix_expr() + "," << _feats[ff]->expr() << std::endl;
+        out_file_stream << std::setw(6) << std::left << "# " + std::to_string(ff) + "; " << std::to_string(_feats[ff]->rung()) + "; " << std::setw(50) << _feats[ff]->unit().toString() + "; " << _feats[ff]->postfix_expr() + "; " << _feats[ff]->expr() + "; " << _feats[ff]->latex_expr() << std::endl;
 
     out_file_stream << "# Number of Samples Per Task" << std::endl;
     if(train)
diff --git a/src/descriptor_identifier/Model/ModelRegressor.hpp b/src/descriptor_identifier/Model/ModelRegressor.hpp
index be1419b2..a12973fa 100644
--- a/src/descriptor_identifier/Model/ModelRegressor.hpp
+++ b/src/descriptor_identifier/Model/ModelRegressor.hpp
@@ -54,7 +54,7 @@ public:
      * @param task_sizes_train Number of samples per task in the training data
      * @param task_sizes_test Number of samples per task in the test data
      */
-    ModelRegressor(Unit prop_unit, std::vector<double> prop_train, std::vector<double> prop_test, std::vector<model_node_ptr> feats, std::vector<int> task_sizes_train, std::vector<int> task_sizes_test, bool fix_intercept);
+    ModelRegressor(std::string prop_label, Unit prop_unit, std::vector<double> prop_train, std::vector<double> prop_test, std::vector<model_node_ptr> feats, std::vector<int> task_sizes_train, std::vector<int> task_sizes_test, bool fix_intercept);
 
     // DocString: model_reg_init_str
     /**
@@ -122,6 +122,14 @@ public:
      */
     virtual std::string toString() const;
 
+    // DocString: model_reg_latex_str
+    /**
+     * @brief Convert the model to a latexified string
+
+     * @return The string representation of the model
+     */
+    std::string toLatexString() const;
+
     /**
      *  @brief Copy the error into a new array
      *
@@ -141,6 +149,18 @@ public:
      */
     inline double test_rmse(){return util_funcs::norm(_test_error.data(), _n_samp_test) / std::sqrt(static_cast<double>(_n_samp_test));}
 
+    // DocString: model_reg_r2
+    /**
+     * @brief The training R^2 of the model
+     */
+    inline double r2(){return util_funcs::r2(_prop_train.data(), _prop_train_est.data(), _n_samp_train);}
+
+    // DocString: model_reg_test_r2
+    /**
+     * @brief The test R^2 of the model
+     */
+    inline double test_r2(){return util_funcs::r2(_prop_test.data(), _prop_test_est.data(), _n_samp_test);}
+
     // DocString: model_reg_max_ae
     /**
      * @brief The max Absolute error of the training data
@@ -303,4 +323,4 @@ public:
  */
 std::ostream& operator<< (std::ostream& outStream, const ModelRegressor& model);
 
-#endif
\ No newline at end of file
+#endif
diff --git a/src/descriptor_identifier/SISSO_DI/SISSOClassifier.cpp b/src/descriptor_identifier/SISSO_DI/SISSOClassifier.cpp
index 0c1baad2..dfa994e3 100644
--- a/src/descriptor_identifier/SISSO_DI/SISSOClassifier.cpp
+++ b/src/descriptor_identifier/SISSO_DI/SISSOClassifier.cpp
@@ -2,6 +2,7 @@
 
 SISSOClassifier::SISSOClassifier(
     std::shared_ptr<FeatureSpace> feat_space,
+    std::string prop_label,
     Unit prop_unit,
     std::vector<double> prop,
     std::vector<double> prop_test,
@@ -12,7 +13,7 @@ SISSOClassifier::SISSOClassifier(
     int n_residual,
     int n_models_store
 ):
-    SISSO_DI(feat_space, prop_unit, prop, prop_test, task_sizes_train, task_sizes_test, leave_out_inds, n_dim, n_residual, n_models_store, false),
+    SISSO_DI(feat_space, prop_label, prop_unit, prop, prop_test, task_sizes_train, task_sizes_test, leave_out_inds, n_dim, n_residual, n_models_store, false),
     _c(1000.0),
     _width(1.0e-5),
     _n_class(1)
@@ -258,10 +259,10 @@ void SISSOClassifier::l0_norm(std::vector<double>& prop, int n_dim)
         for(int ii = 0; ii < n_dim; ++ii)
         {
             int index = all_min_inds[inds[rr] * n_dim + ii];
-            min_nodes[ii] = std::make_shared<ModelNode>(_feat_space->phi_selected()[index]->arr_ind(), _feat_space->phi_selected()[index]->rung(), _feat_space->phi_selected()[index]->expr(), _feat_space->phi_selected()[index]->postfix_expr(), _feat_space->phi_selected()[index]->value(), _feat_space->phi_selected()[index]->test_value(), _feat_space->phi_selected()[index]->unit());
+            min_nodes[ii] = std::make_shared<ModelNode>(_feat_space->phi_selected()[index]->arr_ind(), _feat_space->phi_selected()[index]->rung(), _feat_space->phi_selected()[index]->expr(), _feat_space->phi_selected()[index]->latex_expr(), _feat_space->phi_selected()[index]->postfix_expr(), _feat_space->phi_selected()[index]->value(), _feat_space->phi_selected()[index]->test_value(), _feat_space->phi_selected()[index]->unit());
         }
 
-        models.push_back(ModelClassifier(_prop_unit, _prop, _prop_test, min_nodes, _task_sizes_train, _task_sizes_test, _fix_intercept));
+        models.push_back(ModelClassifier(_prop_label, _prop_unit, _prop, _prop_test, min_nodes, _task_sizes_train, _task_sizes_test, _fix_intercept));
     }
 
     _models.push_back(models);
@@ -282,7 +283,7 @@ void SISSOClassifier::fit()
         _mpi_comm->barrier();
         double duration = omp_get_wtime() - start;
         if(_mpi_comm->rank() == 0)
-            std::cout << "Time for SIS: " << duration << std::endl;
+            std::cout << "Time for SIS: " << duration << " s" << std::endl;
 
         start = omp_get_wtime();
         l0_norm(_prop, dd);
@@ -291,7 +292,7 @@ void SISSOClassifier::fit()
         duration = omp_get_wtime() - start;
         if(_mpi_comm->rank() == 0)
         {
-            std::cout << "Time for l0-norm: " << duration << std::endl;
+            std::cout << "Time for l0-norm: " << duration << " s" << std::endl;
             for(int rr = 0; rr < _n_models_store; ++rr)
             {
                 _models.back()[rr].to_file("models/train_dim_" + std::to_string(dd) + "_model_" + std::to_string(rr) + ".dat");
diff --git a/src/descriptor_identifier/SISSO_DI/SISSOClassifier.hpp b/src/descriptor_identifier/SISSO_DI/SISSOClassifier.hpp
index 9049edf7..26594906 100644
--- a/src/descriptor_identifier/SISSO_DI/SISSOClassifier.hpp
+++ b/src/descriptor_identifier/SISSO_DI/SISSOClassifier.hpp
@@ -57,7 +57,7 @@ public:
      * @param n_residual Number of residuals to pass to the next SIS operation
      * @param n_models_store Number of features to store in files
      */
-    SISSOClassifier(std::shared_ptr<FeatureSpace> feat_space, Unit prop_unit, std::vector<double> prop, std::vector<double> prop_test, std::vector<int> task_sizes_train, std::vector<int> task_sizes_test, std::vector<int> leave_out_inds, int n_dim, int n_residual, int n_models_store);
+    SISSOClassifier(std::shared_ptr<FeatureSpace> feat_space, std::string prop_label, Unit prop_unit, std::vector<double> prop, std::vector<double> prop_test, std::vector<int> task_sizes_train, std::vector<int> task_sizes_test, std::vector<int> leave_out_inds, int n_dim, int n_residual, int n_models_store);
 
     /**
      * @brief Check to ensure all classes in prop_test are in prop
@@ -146,6 +146,7 @@ public:
          */
         SISSOClassifier(
             std::shared_ptr<FeatureSpace> feat_space,
+            std::string prop_label,
             Unit prop_unit,
             np::ndarray prop,
             np::ndarray prop_test,
@@ -175,6 +176,7 @@ public:
          */
         SISSOClassifier(
             std::shared_ptr<FeatureSpace> feat_space,
+            std::string prop_label,
             Unit prop_unit,
             py::list prop,
             py::list prop_test,
diff --git a/src/descriptor_identifier/SISSO_DI/SISSOLogRegressor.cpp b/src/descriptor_identifier/SISSO_DI/SISSOLogRegressor.cpp
index 1391454b..4a1c42c9 100644
--- a/src/descriptor_identifier/SISSO_DI/SISSOLogRegressor.cpp
+++ b/src/descriptor_identifier/SISSO_DI/SISSOLogRegressor.cpp
@@ -2,6 +2,7 @@
 
 SISSOLogRegressor::SISSOLogRegressor(
     std::shared_ptr<FeatureSpace> feat_space,
+    std::string prop_label,
     Unit prop_unit,
     std::vector<double> prop,
     std::vector<double> prop_test,
@@ -13,7 +14,7 @@ SISSOLogRegressor::SISSOLogRegressor(
     int n_models_store,
     bool fix_intercept
 ):
-    SISSORegressor(feat_space, prop_unit, prop, prop_test, task_sizes_train, task_sizes_test, leave_out_inds, n_dim, n_residual, n_models_store, fix_intercept),
+    SISSORegressor(feat_space, prop_label, prop_unit, prop, prop_test, task_sizes_train, task_sizes_test, leave_out_inds, n_dim, n_residual, n_models_store, fix_intercept),
     _prop_no_log(prop)
 {
     if(*std::min_element(prop.begin(), prop.end()) <= 0.0)
@@ -64,9 +65,9 @@ void SISSOLogRegressor::add_model(std::vector<int> indexes)
     for(int ii = 0; ii < indexes.size(); ++ii)
     {
         int index = indexes[ii];
-        min_nodes[ii] = std::make_shared<ModelNode>(_feat_space->phi_selected()[index]->arr_ind(), _feat_space->phi_selected()[index]->rung(), _feat_space->phi_selected()[index]->expr(), _feat_space->phi_selected()[index]->postfix_expr(), _feat_space->phi_selected()[index]->value(), _feat_space->phi_selected()[index]->test_value(), _feat_space->phi_selected()[index]->unit());
+        min_nodes[ii] = std::make_shared<ModelNode>(_feat_space->phi_selected()[index]->arr_ind(), _feat_space->phi_selected()[index]->rung(), _feat_space->phi_selected()[index]->expr(), _feat_space->phi_selected()[index]->latex_expr(), _feat_space->phi_selected()[index]->postfix_expr(), _feat_space->phi_selected()[index]->value(), _feat_space->phi_selected()[index]->test_value(), _feat_space->phi_selected()[index]->unit());
     }
-    ModelLogRegressor model(_prop_unit, _prop_no_log, _prop_test, min_nodes, _task_sizes_train, _task_sizes_test, _fix_intercept);
+    ModelLogRegressor model(_prop_label, _prop_unit, _prop_no_log, _prop_test, min_nodes, _task_sizes_train, _task_sizes_test, _fix_intercept);
     _models.back().push_back(model);
 }
 
diff --git a/src/descriptor_identifier/SISSO_DI/SISSOLogRegressor.hpp b/src/descriptor_identifier/SISSO_DI/SISSOLogRegressor.hpp
index 6404eed0..81e33ec7 100644
--- a/src/descriptor_identifier/SISSO_DI/SISSOLogRegressor.hpp
+++ b/src/descriptor_identifier/SISSO_DI/SISSOLogRegressor.hpp
@@ -46,7 +46,7 @@ public:
      * @param n_models_store Number of features to store in files
      * @param fix_intrecept If true fix intercept to 0
      */
-    SISSOLogRegressor(std::shared_ptr<FeatureSpace> feat_space, Unit prop_unit, std::vector<double> prop, std::vector<double> prop_test, std::vector<int> task_sizes_train, std::vector<int> task_sizes_test, std::vector<int> leave_out_inds, int n_dim, int n_residual, int n_models_store, bool fix_intercept=false);
+    SISSOLogRegressor(std::shared_ptr<FeatureSpace> feat_space, std::string prop_label, Unit prop_unit, std::vector<double> prop, std::vector<double> prop_test, std::vector<int> task_sizes_train, std::vector<int> task_sizes_test, std::vector<int> leave_out_inds, int n_dim, int n_residual, int n_models_store, bool fix_intercept=false);
 
     /**
      * @brief Set the A matrix for the least squares problem
@@ -113,6 +113,7 @@ public:
          */
         SISSOLogRegressor(
             std::shared_ptr<FeatureSpace> feat_space,
+            std::string prop_label,
             Unit prop_unit,
             np::ndarray prop,
             np::ndarray prop_test,
@@ -143,6 +144,7 @@ public:
          */
         SISSOLogRegressor(
             std::shared_ptr<FeatureSpace> feat_space,
+            std::string prop_label,
             Unit prop_unit,
             py::list prop,
             py::list prop_test,
@@ -160,7 +162,7 @@ public:
          * @brief The selected models (cpp definition in <python/descriptor_identifier/SISSO_DI.cpp)
          * @return models as a python list
          */
-        py::list models_py();
+        py::list models_log_reg_py();
     #endif
 };
 
diff --git a/src/descriptor_identifier/SISSO_DI/SISSORegressor.cpp b/src/descriptor_identifier/SISSO_DI/SISSORegressor.cpp
index ff2fa968..c32d044e 100644
--- a/src/descriptor_identifier/SISSO_DI/SISSORegressor.cpp
+++ b/src/descriptor_identifier/SISSO_DI/SISSORegressor.cpp
@@ -2,6 +2,7 @@
 
 SISSORegressor::SISSORegressor(
     std::shared_ptr<FeatureSpace> feat_space,
+    std::string prop_label,
     Unit prop_unit,
     std::vector<double> prop,
     std::vector<double> prop_test,
@@ -13,7 +14,7 @@ SISSORegressor::SISSORegressor(
     int n_models_store,
     bool fix_intercept
 ):
-    SISSO_DI(feat_space, prop_unit, prop, prop_test, task_sizes_train, task_sizes_test, leave_out_inds, n_dim, n_residual, n_models_store, fix_intercept)
+    SISSO_DI(feat_space, prop_label, prop_unit, prop, prop_test, task_sizes_train, task_sizes_test, leave_out_inds, n_dim, n_residual, n_models_store, fix_intercept)
 {}
 
 void  SISSORegressor::set_a(std::vector<int>& inds, int start, int n_samp, double* a)
@@ -87,9 +88,9 @@ void SISSORegressor::add_model(std::vector<int> indexes)
     for(int ii = 0; ii < indexes.size(); ++ii)
     {
         int index = indexes[ii];
-        min_nodes[ii] = std::make_shared<ModelNode>(_feat_space->phi_selected()[index]->arr_ind(), _feat_space->phi_selected()[index]->rung(), _feat_space->phi_selected()[index]->expr(), _feat_space->phi_selected()[index]->postfix_expr(), _feat_space->phi_selected()[index]->value(), _feat_space->phi_selected()[index]->test_value(), _feat_space->phi_selected()[index]->unit());
+        min_nodes[ii] = std::make_shared<ModelNode>(_feat_space->phi_selected()[index]->arr_ind(), _feat_space->phi_selected()[index]->rung(), _feat_space->phi_selected()[index]->expr(), _feat_space->phi_selected()[index]->latex_expr(), _feat_space->phi_selected()[index]->postfix_expr(), _feat_space->phi_selected()[index]->value(), _feat_space->phi_selected()[index]->test_value(), _feat_space->phi_selected()[index]->unit());
     }
-    _models.back().push_back(ModelRegressor(_prop_unit, _prop, _prop_test, min_nodes, _task_sizes_train, _task_sizes_test, _fix_intercept));
+    _models.back().push_back(ModelRegressor(_prop_label, _prop_unit, _prop, _prop_test, min_nodes, _task_sizes_train, _task_sizes_test, _fix_intercept));
 }
 
 void SISSORegressor::output_models(std::vector<double>& residual)
@@ -216,7 +217,7 @@ void SISSORegressor::fit()
         _mpi_comm->barrier();
         double duration = omp_get_wtime() - start;
         if(_mpi_comm->rank() == 0)
-            std::cout << "Time for SIS: " << duration << std::endl;
+            std::cout << "Time for SIS: " << duration  << " s" << std::endl;
 
         start = omp_get_wtime();
         l0_norm(_prop, dd);
@@ -225,7 +226,7 @@ void SISSORegressor::fit()
         duration = duration = omp_get_wtime() - start;
 
         if(_mpi_comm->rank() == 0)
-            std::cout << "Time for l0-norm: " << duration << std::endl;
+            std::cout << "Time for l0-norm: " << duration << " s"  << std::endl;
         output_models(residual);
     }
 }
diff --git a/src/descriptor_identifier/SISSO_DI/SISSORegressor.hpp b/src/descriptor_identifier/SISSO_DI/SISSORegressor.hpp
index 3df14621..9fee0da8 100644
--- a/src/descriptor_identifier/SISSO_DI/SISSORegressor.hpp
+++ b/src/descriptor_identifier/SISSO_DI/SISSORegressor.hpp
@@ -48,7 +48,7 @@ public:
      * @param n_models_store Number of features to store in files
      * @param fix_intrecept If true fix intercept to 0
      */
-    SISSORegressor(std::shared_ptr<FeatureSpace> feat_space, Unit prop_unit, std::vector<double> prop, std::vector<double> prop_test, std::vector<int> task_sizes_train, std::vector<int> task_sizes_test, std::vector<int> leave_out_inds, int n_dim, int n_residual, int n_models_store, bool fix_intercept=false);
+    SISSORegressor(std::shared_ptr<FeatureSpace> feat_space, std::string prop_label, Unit prop_unit, std::vector<double> prop, std::vector<double> prop_test, std::vector<int> task_sizes_train, std::vector<int> task_sizes_test, std::vector<int> leave_out_inds, int n_dim, int n_residual, int n_models_store, bool fix_intercept=false);
 
     /**
      * @brief Set the residual for the next step
@@ -140,6 +140,7 @@ public:
          */
         SISSORegressor(
             std::shared_ptr<FeatureSpace> feat_space,
+            std::string prop_label,
             Unit prop_unit,
             np::ndarray prop,
             np::ndarray prop_test,
@@ -170,6 +171,7 @@ public:
          */
         SISSORegressor(
             std::shared_ptr<FeatureSpace> feat_space,
+            std::string prop_label,
             Unit prop_unit,
             py::list prop,
             py::list prop_test,
@@ -187,7 +189,7 @@ public:
          * @brief The selected models (cpp definition in <python/descriptor_identifier/SISSO_DI.cpp)
          * @return models as a python list
          */
-        virtual py::list models_py();
+        py::list models_py();
     #endif
 };
 
diff --git a/src/descriptor_identifier/SISSO_DI/SISSO_DI.cpp b/src/descriptor_identifier/SISSO_DI/SISSO_DI.cpp
index aa856397..d8e2cf59 100644
--- a/src/descriptor_identifier/SISSO_DI/SISSO_DI.cpp
+++ b/src/descriptor_identifier/SISSO_DI/SISSO_DI.cpp
@@ -2,6 +2,7 @@
 
 SISSO_DI::SISSO_DI(
     std::shared_ptr<FeatureSpace> feat_space,
+    std::string prop_label,
     Unit prop_unit,
     std::vector<double> prop,
     std::vector<double> prop_test,
@@ -19,6 +20,7 @@ SISSO_DI::SISSO_DI(
     _task_sizes_train(task_sizes_train),
     _task_sizes_test(task_sizes_test),
     _leave_out_inds(leave_out_inds),
+    _prop_label(prop_label),
     _prop_unit(prop_unit),
     _feat_space(feat_space),
     _mpi_comm(feat_space->mpi_comm()),
diff --git a/src/descriptor_identifier/SISSO_DI/SISSO_DI.hpp b/src/descriptor_identifier/SISSO_DI/SISSO_DI.hpp
index 4b622050..f59e000b 100644
--- a/src/descriptor_identifier/SISSO_DI/SISSO_DI.hpp
+++ b/src/descriptor_identifier/SISSO_DI/SISSO_DI.hpp
@@ -36,6 +36,7 @@ protected:
     std::vector<int> _leave_out_inds; //!< List of indexes from the initial data file in the test set
 
     Unit _prop_unit; //!< The Unit for the property
+    std::string _prop_label; //!< The label for the property
 
     std::shared_ptr<FeatureSpace> _feat_space; //!< Feature Space for the problem
     std::shared_ptr<MPI_Interface> _mpi_comm; //!< MPI Communicator
@@ -52,6 +53,7 @@ public:
      * @brief Constructor for the Regressor
      *
      * @param feat_space The feature space to run SISSO on
+     * @param prop_label The label for the property
      * @param prop_unit The unit of the property
      * @param prop Vector storing all data to train the SISSO models with
      * @param prpo_test Vector storing all data to test the SISSO models with
@@ -63,7 +65,7 @@ public:
      * @param n_models_store Number of features to store in files
      * @param fix_intrecept If true fix intercept to 0
      */
-    SISSO_DI(std::shared_ptr<FeatureSpace> feat_space, Unit prop_unit, std::vector<double> prop, std::vector<double> prop_test, std::vector<int> task_sizes_train, std::vector<int> task_sizes_test, std::vector<int> leave_out_inds, int n_dim, int n_residual, int n_models_store, bool fix_intercept=false);
+    SISSO_DI(std::shared_ptr<FeatureSpace> feat_space, std::string prop_label, Unit prop_unit, std::vector<double> prop, std::vector<double> prop_test, std::vector<int> task_sizes_train, std::vector<int> task_sizes_test, std::vector<int> leave_out_inds, int n_dim, int n_residual, int n_models_store, bool fix_intercept=false);
 
     /**
      * @brief Perform SISSO to generate the models
@@ -129,6 +131,7 @@ public:
          * @brief Constructor for the Regressor that takes in python objects (cpp definition in <python/descriptor_identifier/SISSO_DI.cpp)
          *
          * @param feat_space The feature space to run SISSO on
+         * @param prop_label The label for the property
          * @param prop_unit The unit of the property
          * @param prop Vector storing all data to train the SISSO models with
          * @param prpo_test Vector storing all data to test the SISSO models with
@@ -142,6 +145,7 @@ public:
          */
         SISSO_DI(
             std::shared_ptr<FeatureSpace> feat_space,
+            std::string prop_label,
             Unit prop_unit,
             np::ndarray prop,
             np::ndarray prop_test,
@@ -159,6 +163,7 @@ public:
          * @brief Constructor for the Regressor that takes in python objects (cpp definition in <python/descriptor_identifier/SISSO_DI.cpp)
          *
          * @param feat_space The feature space to run SISSO on
+         * @param prop_label The label for the property
          * @param prop_unit The unit of the property
          * @param prop Vector storing all data to train the SISSO models with
          * @param prpo_test Vector storing all data to test the SISSO models with
@@ -172,6 +177,7 @@ public:
          */
         SISSO_DI(
             std::shared_ptr<FeatureSpace> feat_space,
+            std::string prop_label,
             Unit prop_unit,
             py::list prop,
             py::list prop_test,
diff --git a/src/feature_creation/feature_space/FeatureSpace.cpp b/src/feature_creation/feature_space/FeatureSpace.cpp
index 847f609a..0827d4e9 100644
--- a/src/feature_creation/feature_space/FeatureSpace.cpp
+++ b/src/feature_creation/feature_space/FeatureSpace.cpp
@@ -121,8 +121,11 @@ void FeatureSpace::initialize_fs(std::vector<double> prop, std::string project_t
     }
     double start = omp_get_wtime();
     generate_feature_space(prop);
+    _mpi_comm->barrier();
     double duration = omp_get_wtime() - start;
-    std::cout << "time to generate feat sapce: " << duration << std::endl;
+    if(_mpi_comm->rank() == 0)
+        std::cout << "time to generate feat sapce: " << duration << " s" << std::endl;
+
     _scores.reserve(_phi.size());
     _scores.resize(_phi.size());
 }
@@ -547,7 +550,7 @@ void FeatureSpace::sis(std::vector<double>& prop)
 
     _mpi_comm->barrier();
     if(_mpi_comm->rank() == 0)
-        std::cout << "Projection time: " << omp_get_wtime() - start << std::endl;
+        std::cout << "Projection time: " << omp_get_wtime() - start << " s" << std::endl;
 
     std::vector<int> inds = util_funcs::argsort(_scores);
 
diff --git a/src/feature_creation/feature_space/FeatureSpace.hpp b/src/feature_creation/feature_space/FeatureSpace.hpp
index d5c25fc1..f0f554f6 100644
--- a/src/feature_creation/feature_space/FeatureSpace.hpp
+++ b/src/feature_creation/feature_space/FeatureSpace.hpp
@@ -420,8 +420,8 @@ public:
          * @param ind index of the feature to get
          * @return A ModelNode of the feature at index ind
          */
-        inline ModelNode get_feature(int ind){return ModelNode(_phi[ind]->d_mat_ind(), _phi[ind]->rung(), _phi[ind]->expr(), _phi[ind]->postfix_expr(), _phi[ind]->value(), _phi[ind]->test_value(), _phi[ind]->unit());}
+        inline ModelNode get_feature(int ind){return ModelNode(_phi[ind]->d_mat_ind(), _phi[ind]->rung(), _phi[ind]->expr(), _phi[ind]->latex_expr(), _phi[ind]->postfix_expr(), _phi[ind]->value(), _phi[ind]->test_value(), _phi[ind]->unit());}
     #endif
 };
 
-#endif
\ No newline at end of file
+#endif
diff --git a/src/feature_creation/node/FeatureNode.hpp b/src/feature_creation/node/FeatureNode.hpp
index 265e0695..5c09ee5b 100644
--- a/src/feature_creation/node/FeatureNode.hpp
+++ b/src/feature_creation/node/FeatureNode.hpp
@@ -11,6 +11,7 @@
 
 #include <utils/math_funcs.hpp>
 #include <utils/enum.hpp>
+#include <utils/string_utils.hpp>
 
 #include <memory>
 
@@ -138,6 +139,12 @@ public:
      */
     inline std::string expr()const{return _expr;}
 
+    // DocString: feat_node_latex_expr
+    /**
+     * @brief Get the latexified expression for the overall feature (From root node down)
+     */
+    inline std::string get_latex_expr(std::string cap){return str_utils::latexify(_expr);}
+
     // DocString: feat_node_unit
     /**
      * @brief The unit of the primary feature
diff --git a/src/feature_creation/node/ModelNode.cpp b/src/feature_creation/node/ModelNode.cpp
index dc5d3376..e4ea9cc0 100644
--- a/src/feature_creation/node/ModelNode.cpp
+++ b/src/feature_creation/node/ModelNode.cpp
@@ -3,10 +3,11 @@
 ModelNode::ModelNode()
 {}
 
-ModelNode::ModelNode(int feat_ind, int rung, std::string expr, std::string post_fix_expr, std::vector<double> value, std::vector<double> test_value, Unit unit) :
+ModelNode::ModelNode(int feat_ind, int rung, std::string expr, std::string latex_expr, std::string post_fix_expr, std::vector<double> value, std::vector<double> test_value, Unit unit) :
     FeatureNode(feat_ind, expr, value, test_value, unit, false),
     _value_svm(_n_samp),
     _test_value_svm(_n_test_samp),
+    _latex_expr(latex_expr),
     _expr_postfix(post_fix_expr),
     _b_remap_svm(0.0),
     _w_remap_svm(1.0),
diff --git a/src/feature_creation/node/ModelNode.hpp b/src/feature_creation/node/ModelNode.hpp
index 0640d11d..7a390296 100644
--- a/src/feature_creation/node/ModelNode.hpp
+++ b/src/feature_creation/node/ModelNode.hpp
@@ -39,12 +39,14 @@ protected:
     std::vector<double> _value_svm; //!< Value of the feature converted to a range of -1.0 to 1.0
     std::vector<double> _test_value_svm; //!< Value of the feature converted to a range of -1.0 to 1.0
 
+    std::string _expr_postfix; //!< postfix expression for the feature
+    std::string _latex_expr; //!< postfix expression for the feature
+
     double _b_remap_svm; //!< value to remap the b from SVM to real life
     double _w_remap_svm; //!< value to remap the w from SVM to real life
 
     int _rung; //!< rung of the feature
 
-    std::string _expr_postfix; //!< postfix expression for the feature
 public:
     /**
      * @brief Base Constructor
@@ -62,7 +64,7 @@ public:
      * @param value Value of the feature for each test sample
      * @param unit Unit of the feature
      */
-    ModelNode(int feat_ind, int rung, std::string expr, std::string expr_postfix, std::vector<double> value, std::vector<double> test_value, Unit unit);
+    ModelNode(int feat_ind, int rung, std::string expr, std::string latex_expr, std::string expr_postfix, std::vector<double> value, std::vector<double> test_value, Unit unit);
 
     /**
      * @brief Copy Constructor
@@ -182,6 +184,12 @@ public:
      */
     inline std::string get_postfix_term(){return _expr_postfix;}
 
+    // DocString: model_node_latex_expr
+    /**
+     * @brief Get the latexified expression for the overall feature (From root node down)
+     */
+    inline std::string get_latex_expr(std::string cap){return cap + _latex_expr.substr(1, _latex_expr.size() - 2) + cap;}
+
     /**
      * @brief update the dictionary used to check if an Add/Sub node is valid
      *
diff --git a/src/feature_creation/node/Node.hpp b/src/feature_creation/node/Node.hpp
index c7cb6be0..0e86e69a 100644
--- a/src/feature_creation/node/Node.hpp
+++ b/src/feature_creation/node/Node.hpp
@@ -190,6 +190,18 @@ public:
      */
     virtual std::string expr() = 0;
 
+    // DocString: node_latex_expr
+    /**
+     * @brief Get the latexified expression for the overall feature (From root node down)
+     */
+    virtual std::string get_latex_expr(std::string cap) = 0;
+
+    // DocString: node_latex_expr
+    /**
+     * @brief Get the latexified expression for the overall feature (From root node down)
+     */
+    inline std::string latex_expr(){return get_latex_expr("$");}
+
     // DocString: node_unit
     /**
      * @brief Get the unit for the overall feature (From root node down)
@@ -365,4 +377,4 @@ class InvalidFeatureException: public std::exception
        return "Invalid Feature created";
     }
 };
-#endif
\ No newline at end of file
+#endif
diff --git a/src/feature_creation/node/operator_nodes/OperatorNode.hpp b/src/feature_creation/node/operator_nodes/OperatorNode.hpp
index 5c5c97f1..6ed29b45 100644
--- a/src/feature_creation/node/operator_nodes/OperatorNode.hpp
+++ b/src/feature_creation/node/operator_nodes/OperatorNode.hpp
@@ -105,6 +105,12 @@ public:
      */
     virtual std::string expr() = 0;
 
+    // DocString: node_latex_expr
+    /**
+     * @brief Get the latexified expression for the overall feature (From root node down)
+     */
+    virtual std::string get_latex_expr(std::string cap) = 0;
+
     // DocString: op_node_unit
     /**
      * @brief Get the unit of the feature (combine the units of _feats)
diff --git a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/absolute_value.cpp b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/abs/absolute_value.cpp
similarity index 98%
rename from src/feature_creation/node/operator_nodes/allowed_operator_nodes/absolute_value.cpp
rename to src/feature_creation/node/operator_nodes/allowed_operator_nodes/abs/absolute_value.cpp
index a7ca28ba..58522a9e 100644
--- a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/absolute_value.cpp
+++ b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/abs/absolute_value.cpp
@@ -1,4 +1,4 @@
-#include <feature_creation/node/operator_nodes/allowed_operator_nodes/absolute_value.hpp>
+#include <feature_creation/node/operator_nodes/allowed_operator_nodes/abs/absolute_value.hpp>
 
 void generateAbsNode(std::vector<node_ptr>& feat_list, node_ptr feat, int& feat_ind, double l_bound, double u_bound)
 {
@@ -69,4 +69,4 @@ void AbsNode::set_test_value(int offset)
 {
     offset = (offset == -1) ? rung() : offset;
     allowed_op_funcs::abs(_n_test_samp, _feats[0]->test_value_ptr(offset + 2), node_value_arrs::get_test_value_ptr(_arr_ind, _feat_ind, offset));
-}
\ No newline at end of file
+}
diff --git a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/absolute_value.hpp b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/abs/absolute_value.hpp
similarity index 93%
rename from src/feature_creation/node/operator_nodes/allowed_operator_nodes/absolute_value.hpp
rename to src/feature_creation/node/operator_nodes/allowed_operator_nodes/abs/absolute_value.hpp
index 978e3a34..eca203c6 100644
--- a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/absolute_value.hpp
+++ b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/abs/absolute_value.hpp
@@ -71,6 +71,15 @@ public:
      */
     inline std::string expr(){return "|" + _feats[0]->expr() + "|";}
 
+    // DocString: abs_node_latex_expr
+    /**
+     * @brief Get the expression for the overall feature (From root node down)
+     */
+    inline std::string get_latex_expr(std::string cap)
+    {
+        return cap + "\\left(\\left|" + _feats[0]->get_latex_expr("") + "\\right|\\right)" + cap;
+    }
+
     // DocString: abs_node_set_value
     /**
      * @brief Set the values of the training data for the feature inside of the value storage arrays
@@ -127,4 +136,4 @@ public:
 };
 void generateAbsNode(std::vector<node_ptr>& feat_list, node_ptr feat, int& feat_ind, double l_bound, double u_bound);
 
-#endif
\ No newline at end of file
+#endif
diff --git a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/absolute_difference.cpp b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/abs_diff/absolute_difference.cpp
similarity index 99%
rename from src/feature_creation/node/operator_nodes/allowed_operator_nodes/absolute_difference.cpp
rename to src/feature_creation/node/operator_nodes/allowed_operator_nodes/abs_diff/absolute_difference.cpp
index 3bd70cd8..b64158f5 100644
--- a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/absolute_difference.cpp
+++ b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/abs_diff/absolute_difference.cpp
@@ -1,4 +1,4 @@
-#include <feature_creation/node/operator_nodes/allowed_operator_nodes/absolute_difference.hpp>
+#include <feature_creation/node/operator_nodes/allowed_operator_nodes/abs_diff/absolute_difference.hpp>
 
 void generateAbsDiffNode(std::vector<node_ptr>& feat_list, node_ptr feat_1, node_ptr feat_2, int& feat_ind, double l_bound, double u_bound)
 {
@@ -98,4 +98,4 @@ void AbsDiffNode::set_test_value(int offset)
 {
     offset = (offset == -1) ? rung() : offset;
     allowed_op_funcs::abs_diff(_n_test_samp, _feats[0]->test_value_ptr(offset + 2), _feats[1]->test_value_ptr(offset + 1), node_value_arrs::get_test_value_ptr(_arr_ind, _feat_ind, offset));
-}
\ No newline at end of file
+}
diff --git a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/absolute_difference.hpp b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/abs_diff/absolute_difference.hpp
similarity index 93%
rename from src/feature_creation/node/operator_nodes/allowed_operator_nodes/absolute_difference.hpp
rename to src/feature_creation/node/operator_nodes/allowed_operator_nodes/abs_diff/absolute_difference.hpp
index ce6508a4..57f9bc0b 100644
--- a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/absolute_difference.hpp
+++ b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/abs_diff/absolute_difference.hpp
@@ -74,6 +74,15 @@ public:
      */
     inline std::string expr(){return "|" + _feats[0]->expr() + " - (" + _feats[1]->expr() + ")|";}
 
+    // DocString: abs_diff_node_latex_expr
+    /**
+     * @brief Get the expression for the overall feature (From root node down)
+     */
+    inline std::string get_latex_expr(std::string cap)
+    {
+        return cap + "\\left(\\left|" + _feats[0]->get_latex_expr("") + " + " + _feats[1]->get_latex_expr("") + "\\right|\\right)" + cap;
+    }
+
     // DocString: abs_diff_node_set_value
     /**
      * @brief Set the values of the training data for the feature inside of the value storage arrays
@@ -131,4 +140,4 @@ public:
 
 void generateAbsDiffNode(std::vector<node_ptr>& feat_list, node_ptr feat_1, node_ptr feat_2, int& feat_ind, double l_bound, double u_bound);
 
-#endif
\ No newline at end of file
+#endif
diff --git a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/add.cpp b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/add/add.cpp
similarity index 99%
rename from src/feature_creation/node/operator_nodes/allowed_operator_nodes/add.cpp
rename to src/feature_creation/node/operator_nodes/allowed_operator_nodes/add/add.cpp
index 406312da..ff104922 100644
--- a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/add.cpp
+++ b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/add/add.cpp
@@ -1,4 +1,4 @@
-#include <feature_creation/node/operator_nodes/allowed_operator_nodes/add.hpp>
+#include <feature_creation/node/operator_nodes/allowed_operator_nodes/add/add.hpp>
 
 void generateAddNode(std::vector<node_ptr>& feat_list, node_ptr feat_1, node_ptr feat_2, int& feat_ind, double l_bound, double u_bound)
 {
@@ -92,4 +92,4 @@ void AddNode::set_test_value(int offset)
 {
     offset = (offset == -1) ? rung() : offset;
     allowed_op_funcs::add(_n_test_samp, _feats[0]->test_value_ptr(offset + 2), _feats[1]->test_value_ptr(offset + 1), node_value_arrs::get_test_value_ptr(_arr_ind, _feat_ind, offset));
-}
\ No newline at end of file
+}
diff --git a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/add.hpp b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/add/add.hpp
similarity index 93%
rename from src/feature_creation/node/operator_nodes/allowed_operator_nodes/add.hpp
rename to src/feature_creation/node/operator_nodes/allowed_operator_nodes/add/add.hpp
index d9783c99..cbddcf22 100644
--- a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/add.hpp
+++ b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/add/add.hpp
@@ -71,6 +71,15 @@ public:
      */
     inline std::string expr(){return "(" + _feats[0]->expr() + " + " + _feats[1]->expr() + ")";}
 
+    // DocString: add_node_latex_expr
+    /**
+     * @brief Get the expression for the overall feature (From root node down)
+     */
+    inline std::string get_latex_expr(std::string cap)
+    {
+        return cap + "\\left(" + _feats[0]->get_latex_expr("") + " + " + _feats[1]->get_latex_expr("") + "\\right)" + cap;
+    }
+
     // DocString: add_node_set_value
     /**
      * @brief Set the values of the training data for the feature inside of the value storage arrays
@@ -127,4 +136,4 @@ public:
 };
 void generateAddNode(std::vector<node_ptr>& feat_list, node_ptr feat_1, node_ptr feat_2, int& feat_ind, double l_bound, double u_bound);
 
-#endif
\ No newline at end of file
+#endif
diff --git a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/cube.cpp b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/cb/cube.cpp
similarity index 99%
rename from src/feature_creation/node/operator_nodes/allowed_operator_nodes/cube.cpp
rename to src/feature_creation/node/operator_nodes/allowed_operator_nodes/cb/cube.cpp
index 4fb759c8..cb122965 100644
--- a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/cube.cpp
+++ b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/cb/cube.cpp
@@ -1,4 +1,4 @@
-#include <feature_creation/node/operator_nodes/allowed_operator_nodes/cube.hpp>
+#include <feature_creation/node/operator_nodes/allowed_operator_nodes/cb/cube.hpp>
 
 void generateCbNode(std::vector<node_ptr>& feat_list, node_ptr feat, int& feat_ind, double l_bound, double u_bound)
 {
@@ -63,4 +63,4 @@ void CbNode::set_test_value(int offset)
 {
     offset = (offset == -1) ? rung() : offset;
     allowed_op_funcs::cb(_n_test_samp, _feats[0]->test_value_ptr(offset + 2), node_value_arrs::get_test_value_ptr(_arr_ind, _feat_ind, offset));
-}
\ No newline at end of file
+}
diff --git a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/cube.hpp b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/cb/cube.hpp
similarity index 93%
rename from src/feature_creation/node/operator_nodes/allowed_operator_nodes/cube.hpp
rename to src/feature_creation/node/operator_nodes/allowed_operator_nodes/cb/cube.hpp
index f031c9d6..9b04c88e 100644
--- a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/cube.hpp
+++ b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/cb/cube.hpp
@@ -69,6 +69,15 @@ public:
      */
     inline std::string expr(){return "(" + _feats[0]->expr() + ")^3";}
 
+    // DocString: cb_node_latex_expr
+    /**
+     * @brief Get the expression for the overall feature (From root node down)
+     */
+    inline std::string get_latex_expr(std::string cap)
+    {
+        return cap + "\\left(" + _feats[0]->get_latex_expr("") + "^3\\right)" + cap;
+    }
+
     // DocString: cb_node_set_value
     /**
      * @brief Set the values of the training data for the feature inside of the value storage arrays
@@ -125,4 +134,4 @@ public:
 };
 void generateCbNode(std::vector<node_ptr>& feat_list, node_ptr feat, int& feat_ind, double l_bound, double u_bound);
 
-#endif
\ No newline at end of file
+#endif
diff --git a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/cube_root.cpp b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/cbrt/cube_root.cpp
similarity index 98%
rename from src/feature_creation/node/operator_nodes/allowed_operator_nodes/cube_root.cpp
rename to src/feature_creation/node/operator_nodes/allowed_operator_nodes/cbrt/cube_root.cpp
index 4ae8730d..f48f3368 100644
--- a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/cube_root.cpp
+++ b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/cbrt/cube_root.cpp
@@ -1,4 +1,4 @@
-#include <feature_creation/node/operator_nodes/allowed_operator_nodes/cube_root.hpp>
+#include <feature_creation/node/operator_nodes/allowed_operator_nodes/cbrt/cube_root.hpp>
 
 void generateCbrtNode(std::vector<node_ptr>& feat_list, node_ptr feat, int& feat_ind, double l_bound, double u_bound)
 {
@@ -64,4 +64,4 @@ void CbrtNode::set_test_value(int offset)
 {
     offset = (offset == -1) ? rung() : offset;
     allowed_op_funcs::cbrt(_n_test_samp, _feats[0]->test_value_ptr(offset + 2), node_value_arrs::get_test_value_ptr(_arr_ind, _feat_ind, offset));
-}
\ No newline at end of file
+}
diff --git a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/cube_root.hpp b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/cbrt/cube_root.hpp
similarity index 93%
rename from src/feature_creation/node/operator_nodes/allowed_operator_nodes/cube_root.hpp
rename to src/feature_creation/node/operator_nodes/allowed_operator_nodes/cbrt/cube_root.hpp
index a786ad6c..d6add3fd 100644
--- a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/cube_root.hpp
+++ b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/cbrt/cube_root.hpp
@@ -69,6 +69,15 @@ public:
      */
     inline std::string expr(){return "cbrt(" + _feats[0]->expr() + ")";}
 
+    // DocString: cbrt_node_latex_expr
+    /**
+     * @brief Get the expression for the overall feature (From root node down)
+     */
+    inline std::string get_latex_expr(std::string cap)
+    {
+        return cap + "\\left(\\sqrt[3]{" + _feats[0]->get_latex_expr("") + "}\\right)" + cap;
+    }
+
     // DocString: cbrt_node_set_value
     /**
      * @brief Set the values of the training data for the feature inside of the value storage arrays
@@ -125,4 +134,4 @@ public:
 };
 void generateCbrtNode(std::vector<node_ptr>& feat_list, node_ptr feat, int& feat_ind, double l_bound, double u_bound);
 
-#endif
\ No newline at end of file
+#endif
diff --git a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/cos.cpp b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/cos/cos.cpp
similarity index 99%
rename from src/feature_creation/node/operator_nodes/allowed_operator_nodes/cos.cpp
rename to src/feature_creation/node/operator_nodes/allowed_operator_nodes/cos/cos.cpp
index 8a1545a2..0509db8d 100644
--- a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/cos.cpp
+++ b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/cos/cos.cpp
@@ -1,4 +1,4 @@
-#include <feature_creation/node/operator_nodes/allowed_operator_nodes/cos.hpp>
+#include <feature_creation/node/operator_nodes/allowed_operator_nodes/cos/cos.hpp>
 
 void generateCosNode(std::vector<node_ptr>& feat_list, node_ptr feat, int& feat_ind, double l_bound, double u_bound)
 {
@@ -72,4 +72,4 @@ void CosNode::set_test_value(int offset)
 {
     offset = (offset == -1) ? rung() : offset;
     allowed_op_funcs::cos(_n_test_samp, _feats[0]->test_value_ptr(offset + 2), node_value_arrs::get_test_value_ptr(_arr_ind, _feat_ind, offset));
-}
\ No newline at end of file
+}
diff --git a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/cos.hpp b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/cos/cos.hpp
similarity index 93%
rename from src/feature_creation/node/operator_nodes/allowed_operator_nodes/cos.hpp
rename to src/feature_creation/node/operator_nodes/allowed_operator_nodes/cos/cos.hpp
index cc4bac8b..ec052256 100644
--- a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/cos.hpp
+++ b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/cos/cos.hpp
@@ -69,6 +69,15 @@ public:
      */
     inline std::string expr(){return "cos(" + _feats[0]->expr() + ")";}
 
+    // DocString: cos_node_latex_expr
+    /**
+     * @brief Get the expression for the overall feature (From root node down)
+     */
+    inline std::string get_latex_expr(std::string cap)
+    {
+        return cap + "\\left(\\cos{" + _feats[0]->get_latex_expr("") + "}\\right)" + cap;
+    }
+
     // DocString: cos_node_set_value
     /**
      * @brief Set the values of the training data for the feature inside of the value storage arrays
@@ -125,4 +134,4 @@ public:
 };
 void generateCosNode(std::vector<node_ptr>& feat_list, node_ptr feat, int& feat_ind, double l_bound, double u_bound);
 
-#endif
\ No newline at end of file
+#endif
diff --git a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/divide.cpp b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/div/divide.cpp
similarity index 99%
rename from src/feature_creation/node/operator_nodes/allowed_operator_nodes/divide.cpp
rename to src/feature_creation/node/operator_nodes/allowed_operator_nodes/div/divide.cpp
index 7229a121..f5497e15 100644
--- a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/divide.cpp
+++ b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/div/divide.cpp
@@ -1,4 +1,4 @@
-#include <feature_creation/node/operator_nodes/allowed_operator_nodes/divide.hpp>
+#include <feature_creation/node/operator_nodes/allowed_operator_nodes/div/divide.hpp>
 
 void generateDivNode(std::vector<node_ptr>& feat_list, node_ptr feat_1, node_ptr feat_2, int& feat_ind, double l_bound, double u_bound)
 {
@@ -93,4 +93,4 @@ void DivNode::set_test_value(int offset)
 {
     offset = (offset == -1) ? rung() : offset;
     allowed_op_funcs::div(_n_test_samp, _feats[0]->test_value_ptr(offset + 2), _feats[1]->test_value_ptr(offset + 1), node_value_arrs::get_test_value_ptr(_arr_ind, _feat_ind, offset));
-}
\ No newline at end of file
+}
diff --git a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/divide.hpp b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/div/divide.hpp
similarity index 93%
rename from src/feature_creation/node/operator_nodes/allowed_operator_nodes/divide.hpp
rename to src/feature_creation/node/operator_nodes/allowed_operator_nodes/div/divide.hpp
index 3194f084..f667f2ff 100644
--- a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/divide.hpp
+++ b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/div/divide.hpp
@@ -71,6 +71,15 @@ public:
      */
     inline std::string expr(){return "[(" + _feats[0]->expr() + ") / (" + _feats[1]->expr() + ")]";}
 
+    // DocString: div_node_latex_expr
+    /**
+     * @brief Get the expression for the overall feature (From root node down)
+     */
+    inline std::string get_latex_expr(std::string cap)
+    {
+        return cap + "\\left(\\frac{" + _feats[0]->get_latex_expr("") + "}{" + _feats[1]->get_latex_expr("") + "}\\right)" + cap;
+    }
+
     // DocString: div_node_set_value
     /**
      * @brief Set the values of the training data for the feature inside of the value storage arrays
@@ -127,4 +136,4 @@ public:
 };
 void generateDivNode(std::vector<node_ptr>& feat_list, node_ptr feat_1, node_ptr feat_2, int& feat_ind, double l_bound, double u_bound);
 
-#endif
\ No newline at end of file
+#endif
diff --git a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/exponential.cpp b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/exp/exponential.cpp
similarity index 99%
rename from src/feature_creation/node/operator_nodes/allowed_operator_nodes/exponential.cpp
rename to src/feature_creation/node/operator_nodes/allowed_operator_nodes/exp/exponential.cpp
index 6b5f5bf9..5900c35d 100644
--- a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/exponential.cpp
+++ b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/exp/exponential.cpp
@@ -1,4 +1,4 @@
-#include <feature_creation/node/operator_nodes/allowed_operator_nodes/exponential.hpp>
+#include <feature_creation/node/operator_nodes/allowed_operator_nodes/exp/exponential.hpp>
 
 void generateExpNode(std::vector<node_ptr>& feat_list, node_ptr feat, int& feat_ind, double l_bound, double u_bound)
 {
@@ -72,4 +72,4 @@ void ExpNode::set_test_value(int offset)
 {
     offset = (offset == -1) ? rung() : offset;
     allowed_op_funcs::exp(_n_test_samp, _feats[0]->test_value_ptr(offset + 2), node_value_arrs::get_test_value_ptr(_arr_ind, _feat_ind, offset));
-}
\ No newline at end of file
+}
diff --git a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/exponential.hpp b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/exp/exponential.hpp
similarity index 93%
rename from src/feature_creation/node/operator_nodes/allowed_operator_nodes/exponential.hpp
rename to src/feature_creation/node/operator_nodes/allowed_operator_nodes/exp/exponential.hpp
index 400842c4..fd828cca 100644
--- a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/exponential.hpp
+++ b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/exp/exponential.hpp
@@ -69,6 +69,15 @@ public:
      */
     inline std::string expr(){return "exp(" + _feats[0]->expr() + ")";}
 
+    // DocString: exp_node_latex_expr
+    /**
+     * @brief Get the expression for the overall feature (From root node down)
+     */
+    inline std::string get_latex_expr(std::string cap)
+    {
+        return cap + "\\left(\\exp{" + _feats[0]->get_latex_expr("") + "}\\right)" + cap;
+    }
+
     // DocString: exp_node_set_value
     /**
      * @brief Set the values of the training data for the feature inside of the value storage arrays
@@ -125,4 +134,4 @@ public:
 };
 void generateExpNode(std::vector<node_ptr>& feat_list, node_ptr feat, int& feat_ind, double l_bound, double u_bound);
 
-#endif
\ No newline at end of file
+#endif
diff --git a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/inverse.cpp b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/inv/inverse.cpp
similarity index 99%
rename from src/feature_creation/node/operator_nodes/allowed_operator_nodes/inverse.cpp
rename to src/feature_creation/node/operator_nodes/allowed_operator_nodes/inv/inverse.cpp
index 34b9eecc..b521f183 100644
--- a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/inverse.cpp
+++ b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/inv/inverse.cpp
@@ -1,4 +1,4 @@
-#include <feature_creation/node/operator_nodes/allowed_operator_nodes/inverse.hpp>
+#include <feature_creation/node/operator_nodes/allowed_operator_nodes/inv/inverse.hpp>
 
 void generateInvNode(std::vector<node_ptr>& feat_list, node_ptr feat, int& feat_ind, double l_bound, double u_bound)
 {
@@ -64,4 +64,4 @@ void InvNode::set_test_value(int offset)
 {
     offset = (offset == -1) ? rung() : offset;
     allowed_op_funcs::inv(_n_test_samp, _feats[0]->test_value_ptr(offset + 2), node_value_arrs::get_test_value_ptr(_arr_ind, _feat_ind, offset));
-}
\ No newline at end of file
+}
diff --git a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/inverse.hpp b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/inv/inverse.hpp
similarity index 92%
rename from src/feature_creation/node/operator_nodes/allowed_operator_nodes/inverse.hpp
rename to src/feature_creation/node/operator_nodes/allowed_operator_nodes/inv/inverse.hpp
index 5fe90eab..fdfde7d3 100644
--- a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/inverse.hpp
+++ b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/inv/inverse.hpp
@@ -57,6 +57,15 @@ public:
      */
     inline std::string expr(){return "1.0 / (" + _feats[0]->expr() + ")";}
 
+    // DocString: inv_node_latex_expr
+    /**
+     * @brief Get the expression for the overall feature (From root node down)
+     */
+    inline std::string get_latex_expr(std::string cap)
+    {
+        return cap + "\\left(\\frac{1}{" +_feats[0]->get_latex_expr("") + "}\\right)" + cap;
+    }
+
     // DocString: inv_node_set_value
     /**
      * @brief Set the values of the training data for the feature inside of the value storage arrays
@@ -113,4 +122,4 @@ public:
 };
 void generateInvNode(std::vector<node_ptr>& feat_list, node_ptr feat, int& feat_ind, double l_bound, double u_bound);
 
-#endif
\ No newline at end of file
+#endif
diff --git a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/log.cpp b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/log/log.cpp
similarity index 99%
rename from src/feature_creation/node/operator_nodes/allowed_operator_nodes/log.cpp
rename to src/feature_creation/node/operator_nodes/allowed_operator_nodes/log/log.cpp
index addda7d8..c37f855a 100644
--- a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/log.cpp
+++ b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/log/log.cpp
@@ -1,4 +1,4 @@
-#include <feature_creation/node/operator_nodes/allowed_operator_nodes/log.hpp>
+#include <feature_creation/node/operator_nodes/allowed_operator_nodes/log/log.hpp>
 
 void generateLogNode(std::vector<node_ptr>& feat_list, node_ptr feat, int& feat_ind, double l_bound, double u_bound)
 {
@@ -72,4 +72,4 @@ void LogNode::set_test_value(int offset)
 {
     offset = (offset == -1) ? rung() : offset;
     allowed_op_funcs::log(_n_test_samp, _feats[0]->test_value_ptr(offset + 2), node_value_arrs::get_test_value_ptr(_arr_ind, _feat_ind, offset));
-}
\ No newline at end of file
+}
diff --git a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/log.hpp b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/log/log.hpp
similarity index 93%
rename from src/feature_creation/node/operator_nodes/allowed_operator_nodes/log.hpp
rename to src/feature_creation/node/operator_nodes/allowed_operator_nodes/log/log.hpp
index 086f9328..d8193767 100644
--- a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/log.hpp
+++ b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/log/log.hpp
@@ -69,6 +69,15 @@ public:
      */
     inline std::string expr(){return "log(" + _feats[0]->expr() + ")";}
 
+    // DocString: log_node_latex_expr
+    /**
+     * @brief Get the expression for the overall feature (From root node down)
+     */
+    inline std::string get_latex_expr(std::string cap)
+    {
+        return cap + "\\left(\\log{" + _feats[0]->get_latex_expr("") + "}\\right)" + cap;
+    }
+
     // DocString: log_node_set_value
     /**
      * @brief Set the values of the training data for the feature inside of the value storage arrays
@@ -125,4 +134,4 @@ public:
 };
 void generateLogNode(std::vector<node_ptr>& feat_list, node_ptr feat, int& feat_ind, double l_bound, double u_bound);
 
-#endif
\ No newline at end of file
+#endif
diff --git a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/multiply.cpp b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/mult/multiply.cpp
similarity index 99%
rename from src/feature_creation/node/operator_nodes/allowed_operator_nodes/multiply.cpp
rename to src/feature_creation/node/operator_nodes/allowed_operator_nodes/mult/multiply.cpp
index 90ca597a..b3c281e0 100644
--- a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/multiply.cpp
+++ b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/mult/multiply.cpp
@@ -1,4 +1,4 @@
-#include <feature_creation/node/operator_nodes/allowed_operator_nodes/multiply.hpp>
+#include <feature_creation/node/operator_nodes/allowed_operator_nodes/mult/multiply.hpp>
 
 void generateMultNode(std::vector<node_ptr>& feat_list, node_ptr feat_1, node_ptr feat_2, int& feat_ind, double l_bound, double u_bound)
 {
@@ -93,4 +93,4 @@ void MultNode::set_test_value(int offset)
 {
     offset = (offset == -1) ? rung() : offset;
     allowed_op_funcs::mult(_n_test_samp, _feats[0]->test_value_ptr(offset + 2), _feats[1]->test_value_ptr(offset + 1), node_value_arrs::get_test_value_ptr(_arr_ind, _feat_ind, offset));
-}
\ No newline at end of file
+}
diff --git a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/multiply.hpp b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/mult/multiply.hpp
similarity index 92%
rename from src/feature_creation/node/operator_nodes/allowed_operator_nodes/multiply.hpp
rename to src/feature_creation/node/operator_nodes/allowed_operator_nodes/mult/multiply.hpp
index 4a56d570..89a84074 100644
--- a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/multiply.hpp
+++ b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/mult/multiply.hpp
@@ -72,6 +72,15 @@ public:
      */
     inline std::string expr(){return "[(" + _feats[0]->expr() + ") * (" + _feats[1]->expr() + ")]";}
 
+    // DocString: mult_node_latex_expr
+    /**
+     * @brief Get the expression for the overall feature (From root node down)
+     */
+    inline std::string get_latex_expr(std::string cap)
+    {
+        return cap + "\\left(\\left(" + _feats[0]->get_latex_expr("") + "\\right) \\left(" + _feats[1]->get_latex_expr("") + "\\right)\\right)" + cap;
+    }
+
     // DocString: mult_node_set_value
     /**
      * @brief Set the values of the training data for the feature inside of the value storage arrays
@@ -128,4 +137,4 @@ public:
 };
 void generateMultNode(std::vector<node_ptr>& feat_list, node_ptr feat_1, node_ptr feat_2, int& feat_ind, double l_bound, double u_bound);
 
-#endif
\ No newline at end of file
+#endif
diff --git a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/negative_exponential.cpp b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/neg_exp/negative_exponential.cpp
similarity index 98%
rename from src/feature_creation/node/operator_nodes/allowed_operator_nodes/negative_exponential.cpp
rename to src/feature_creation/node/operator_nodes/allowed_operator_nodes/neg_exp/negative_exponential.cpp
index 5f44491f..6dc0876c 100644
--- a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/negative_exponential.cpp
+++ b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/neg_exp/negative_exponential.cpp
@@ -1,4 +1,4 @@
-#include <feature_creation/node/operator_nodes/allowed_operator_nodes/negative_exponential.hpp>
+#include <feature_creation/node/operator_nodes/allowed_operator_nodes/neg_exp/negative_exponential.hpp>
 
 void generateNegExpNode(std::vector<node_ptr>& feat_list, node_ptr feat, int& feat_ind, double l_bound, double u_bound)
 {
@@ -72,4 +72,4 @@ void NegExpNode::set_test_value(int offset)
 {
     offset = (offset == -1) ? rung() : offset;
     allowed_op_funcs::neg_exp(_n_test_samp, _feats[0]->test_value_ptr(offset + 2), node_value_arrs::get_test_value_ptr(_arr_ind, _feat_ind, offset));
-}
\ No newline at end of file
+}
diff --git a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/negative_exponential.hpp b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/neg_exp/negative_exponential.hpp
similarity index 93%
rename from src/feature_creation/node/operator_nodes/allowed_operator_nodes/negative_exponential.hpp
rename to src/feature_creation/node/operator_nodes/allowed_operator_nodes/neg_exp/negative_exponential.hpp
index f6d8cd8d..478523f2 100644
--- a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/negative_exponential.hpp
+++ b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/neg_exp/negative_exponential.hpp
@@ -70,6 +70,15 @@ public:
      */
     inline std::string expr(){return "exp[-1.0*(" + _feats[0]->expr() + ")]";}
 
+    // DocString: neg_exp_node_latex_expr
+    /**
+     * @brief Get the expression for the overall feature (From root node down)
+     */
+    inline std::string get_latex_expr(std::string cap)
+    {
+        return cap + "\\left(\\exp{\\left(-" + _feats[0]->get_latex_expr("") + "\\right)}\\right)" + cap;
+    }
+
     // DocString: neg_exp_node_set_value
     /**
      * @brief Set the values of the training data for the feature inside of the value storage arrays
@@ -126,4 +135,4 @@ public:
 };
 void generateNegExpNode(std::vector<node_ptr>& feat_list, node_ptr feat, int& feat_ind, double l_bound, double u_bound);
 
-#endif
\ No newline at end of file
+#endif
diff --git a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/sin.cpp b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/sin/sin.cpp
similarity index 99%
rename from src/feature_creation/node/operator_nodes/allowed_operator_nodes/sin.cpp
rename to src/feature_creation/node/operator_nodes/allowed_operator_nodes/sin/sin.cpp
index 4d18efb3..403dd2ef 100644
--- a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/sin.cpp
+++ b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/sin/sin.cpp
@@ -1,4 +1,4 @@
-#include <feature_creation/node/operator_nodes/allowed_operator_nodes/sin.hpp>
+#include <feature_creation/node/operator_nodes/allowed_operator_nodes/sin/sin.hpp>
 
 void generateSinNode(std::vector<node_ptr>& feat_list, node_ptr feat, int& feat_ind, double l_bound, double u_bound)
 {
@@ -73,4 +73,4 @@ void SinNode::set_test_value(int offset)
 {
     offset = (offset == -1) ? rung() : offset;
     allowed_op_funcs::sin(_n_test_samp, _feats[0]->test_value_ptr(offset + 2), node_value_arrs::get_test_value_ptr(_arr_ind, _feat_ind, offset));
-}
\ No newline at end of file
+}
diff --git a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/sin.hpp b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/sin/sin.hpp
similarity index 93%
rename from src/feature_creation/node/operator_nodes/allowed_operator_nodes/sin.hpp
rename to src/feature_creation/node/operator_nodes/allowed_operator_nodes/sin/sin.hpp
index cb0309a1..8edac3af 100644
--- a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/sin.hpp
+++ b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/sin/sin.hpp
@@ -70,6 +70,15 @@ public:
      */
     inline std::string expr(){return "sin(" + _feats[0]->expr() + ")";}
 
+    // DocString: sin_node_latex_expr
+    /**
+     * @brief Get the expression for the overall feature (From root node down)
+     */
+    inline std::string get_latex_expr(std::string cap)
+    {
+        return cap + "\\left(\\sin{" + _feats[0]->get_latex_expr("") + "}\\right)" + cap;
+    }
+
     // DocString: sin_node_set_value
     /**
      * @brief Set the values of the training data for the feature inside of the value storage arrays
@@ -126,4 +135,4 @@ public:
 };
 void generateSinNode(std::vector<node_ptr>& feat_list, node_ptr feat, int& feat_ind, double l_bound, double u_bound);
 
-#endif
\ No newline at end of file
+#endif
diff --git a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/sixth_power.cpp b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/sp/sixth_power.cpp
similarity index 99%
rename from src/feature_creation/node/operator_nodes/allowed_operator_nodes/sixth_power.cpp
rename to src/feature_creation/node/operator_nodes/allowed_operator_nodes/sp/sixth_power.cpp
index c5478980..36cd7b48 100644
--- a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/sixth_power.cpp
+++ b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/sp/sixth_power.cpp
@@ -1,4 +1,4 @@
-#include <feature_creation/node/operator_nodes/allowed_operator_nodes/sixth_power.hpp>
+#include <feature_creation/node/operator_nodes/allowed_operator_nodes/sp/sixth_power.hpp>
 
 void generateSixPowNode(std::vector<node_ptr>& feat_list, node_ptr feat, int& feat_ind, double l_bound, double u_bound)
 {
@@ -63,4 +63,4 @@ void SixPowNode::set_test_value(int offset)
 {
     offset = (offset == -1) ? rung() : offset;
     allowed_op_funcs::sixth_pow(_n_test_samp, _feats[0]->test_value_ptr(offset + 2), node_value_arrs::get_test_value_ptr(_arr_ind, _feat_ind, offset));
-}
\ No newline at end of file
+}
diff --git a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/sixth_power.hpp b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/sp/sixth_power.hpp
similarity index 93%
rename from src/feature_creation/node/operator_nodes/allowed_operator_nodes/sixth_power.hpp
rename to src/feature_creation/node/operator_nodes/allowed_operator_nodes/sp/sixth_power.hpp
index 43862b36..868ce0ed 100644
--- a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/sixth_power.hpp
+++ b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/sp/sixth_power.hpp
@@ -70,6 +70,15 @@ public:
      */
     inline std::string expr(){return "(" + _feats[0]->expr() + ")^6";}
 
+    // DocString: six_pow_node_latex_expr
+    /**
+     * @brief Get the expression for the overall feature (From root node down)
+     */
+    inline std::string get_latex_expr(std::string cap)
+    {
+        return cap + "\\left(" + _feats[0]->get_latex_expr("") + "^6\\right)" + cap;
+    }
+
     // DocString: six_pow_node_set_value
     /**
      * @brief Set the values of the training data for the feature inside of the value storage arrays
@@ -127,4 +136,4 @@ public:
 };
 void generateSixPowNode(std::vector<node_ptr>& feat_list, node_ptr feat, int& feat_ind, double l_bound, double u_bound);
 
-#endif
\ No newline at end of file
+#endif
diff --git a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/square.cpp b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/sq/square.cpp
similarity index 99%
rename from src/feature_creation/node/operator_nodes/allowed_operator_nodes/square.cpp
rename to src/feature_creation/node/operator_nodes/allowed_operator_nodes/sq/square.cpp
index de7ffd8d..d928fd1d 100644
--- a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/square.cpp
+++ b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/sq/square.cpp
@@ -1,4 +1,4 @@
-#include <feature_creation/node/operator_nodes/allowed_operator_nodes/square.hpp>
+#include <feature_creation/node/operator_nodes/allowed_operator_nodes/sq/square.hpp>
 
 void generateSqNode(std::vector<node_ptr>& feat_list, node_ptr feat, int& feat_ind, double l_bound, double u_bound)
 {
@@ -64,4 +64,4 @@ void SqNode::set_test_value(int offset)
 {
     offset = (offset == -1) ? rung() : offset;
     allowed_op_funcs::sq(_n_test_samp, _feats[0]->test_value_ptr(offset + 2), node_value_arrs::get_test_value_ptr(_arr_ind, _feat_ind, offset));
-}
\ No newline at end of file
+}
diff --git a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/square.hpp b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/sq/square.hpp
similarity index 93%
rename from src/feature_creation/node/operator_nodes/allowed_operator_nodes/square.hpp
rename to src/feature_creation/node/operator_nodes/allowed_operator_nodes/sq/square.hpp
index 96085b05..19d1a5fe 100644
--- a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/square.hpp
+++ b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/sq/square.hpp
@@ -69,6 +69,15 @@ public:
      */
     inline std::string expr(){return "(" + _feats[0]->expr() + ")^2";}
 
+    // DocString: sq_node_latex_expr
+    /**
+     * @brief Get the expression for the overall feature (From root node down)
+     */
+    inline std::string get_latex_expr(std::string cap)
+    {
+        return cap + "\\left(" + _feats[0]->get_latex_expr("") + "^2\\right)" + cap;
+    }
+
     // DocString: sq_node_set_value
     /**
      * @brief Set the values of the training data for the feature inside of the value storage arrays
@@ -125,4 +134,4 @@ public:
 };
 void generateSqNode(std::vector<node_ptr>& feat_list, node_ptr feat, int& feat_ind, double l_bound, double u_bound);
 
-#endif
\ No newline at end of file
+#endif
diff --git a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/square_root.cpp b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/sqrt/square_root.cpp
similarity index 98%
rename from src/feature_creation/node/operator_nodes/allowed_operator_nodes/square_root.cpp
rename to src/feature_creation/node/operator_nodes/allowed_operator_nodes/sqrt/square_root.cpp
index 1bd35e74..d4e4a561 100644
--- a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/square_root.cpp
+++ b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/sqrt/square_root.cpp
@@ -1,4 +1,4 @@
-#include <feature_creation/node/operator_nodes/allowed_operator_nodes/square_root.hpp>
+#include <feature_creation/node/operator_nodes/allowed_operator_nodes/sqrt/square_root.hpp>
 
 void generateSqrtNode(std::vector<node_ptr>& feat_list, node_ptr feat, int& feat_ind, double l_bound, double u_bound)
 {
@@ -64,4 +64,4 @@ void SqrtNode::set_test_value(int offset)
 {
     offset = (offset == -1) ? rung() : offset;
     allowed_op_funcs::sqrt(_n_test_samp, _feats[0]->test_value_ptr(offset + 2), node_value_arrs::get_test_value_ptr(_arr_ind, _feat_ind, offset));
-}
\ No newline at end of file
+}
diff --git a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/square_root.hpp b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/sqrt/square_root.hpp
similarity index 93%
rename from src/feature_creation/node/operator_nodes/allowed_operator_nodes/square_root.hpp
rename to src/feature_creation/node/operator_nodes/allowed_operator_nodes/sqrt/square_root.hpp
index 34a951b1..cee1ca90 100644
--- a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/square_root.hpp
+++ b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/sqrt/square_root.hpp
@@ -70,6 +70,15 @@ public:
      */
     inline std::string expr(){return "sqrt(" + _feats[0]->expr() + ")";}
 
+    // DocString: sqrt_node_latex_expr
+    /**
+     * @brief Get the expression for the overall feature (From root node down)
+     */
+    inline std::string get_latex_expr(std::string cap)
+    {
+        return cap + "\\left(\\sqrt{" + _feats[0]->get_latex_expr("") + "}\\right)" + cap;
+    }
+
     // DocString: sqrt_node_set_value
     /**
      * @brief Set the values of the training data for the feature inside of the value storage arrays
@@ -126,4 +135,4 @@ public:
 };
 void generateSqrtNode(std::vector<node_ptr>& feat_list, node_ptr feat, int& feat_ind, double l_bound, double u_bound);
 
-#endif
\ No newline at end of file
+#endif
diff --git a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/subtract.cpp b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/sub/subtract.cpp
similarity index 99%
rename from src/feature_creation/node/operator_nodes/allowed_operator_nodes/subtract.cpp
rename to src/feature_creation/node/operator_nodes/allowed_operator_nodes/sub/subtract.cpp
index b1ab57c9..e0a18275 100644
--- a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/subtract.cpp
+++ b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/sub/subtract.cpp
@@ -1,4 +1,4 @@
-#include <feature_creation/node/operator_nodes/allowed_operator_nodes/subtract.hpp>
+#include <feature_creation/node/operator_nodes/allowed_operator_nodes/sub/subtract.hpp>
 void generateSubNode(std::vector<node_ptr>& feat_list, node_ptr feat_1, node_ptr feat_2, int& feat_ind, double l_bound, double u_bound)
 {
     ++feat_ind;
@@ -91,4 +91,4 @@ void SubNode::set_test_value(int offset)
 {
     offset = (offset == -1) ? rung() : offset;
     allowed_op_funcs::sub(_n_test_samp, _feats[0]->test_value_ptr(offset + 2), _feats[1]->test_value_ptr(offset + 1), node_value_arrs::get_test_value_ptr(_arr_ind, _feat_ind, offset));
-}
\ No newline at end of file
+}
diff --git a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/subtract.hpp b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/sub/subtract.hpp
similarity index 93%
rename from src/feature_creation/node/operator_nodes/allowed_operator_nodes/subtract.hpp
rename to src/feature_creation/node/operator_nodes/allowed_operator_nodes/sub/subtract.hpp
index f2439558..525892b2 100644
--- a/src/feature_creation/node/operator_nodes/allowed_operator_nodes/subtract.hpp
+++ b/src/feature_creation/node/operator_nodes/allowed_operator_nodes/sub/subtract.hpp
@@ -72,6 +72,15 @@ public:
      */
     inline std::string expr(){return "[(" + _feats[0]->expr() + ") - (" + _feats[1]->expr() + ")]";}
 
+    // DocString: sub_node_latex_expr
+    /**
+     * @brief Get the expression for the overall feature (From root node down)
+     */
+    inline std::string get_latex_expr(std::string cap)
+    {
+        return cap + "\\left(" + _feats[0]->get_latex_expr("") + " - " + _feats[1]->get_latex_expr("") + "\\right)" + cap;
+    }
+
     // DocString: sub_node_set_value
     /**
      * @brief Set the values of the training data for the feature inside of the value storage arrays
@@ -128,4 +137,4 @@ public:
 };
 void generateSubNode(std::vector<node_ptr>& feat_list, node_ptr feat_1, node_ptr feat_2, int& feat_ind, double l_bound, double u_bound);
 
-#endif
\ No newline at end of file
+#endif
diff --git a/src/feature_creation/node/operator_nodes/allowed_ops.hpp b/src/feature_creation/node/operator_nodes/allowed_ops.hpp
index 6df047fc..a522b4d0 100644
--- a/src/feature_creation/node/operator_nodes/allowed_ops.hpp
+++ b/src/feature_creation/node/operator_nodes/allowed_ops.hpp
@@ -8,23 +8,24 @@
 #define ALLOWED_OP_NODES
 
 #include <feature_creation/node/Node.hpp>
-#include <feature_creation/node/operator_nodes/allowed_operator_nodes/add.hpp>
-#include <feature_creation/node/operator_nodes/allowed_operator_nodes/subtract.hpp>
-#include <feature_creation/node/operator_nodes/allowed_operator_nodes/absolute_value.hpp>
-#include <feature_creation/node/operator_nodes/allowed_operator_nodes/absolute_difference.hpp>
-#include <feature_creation/node/operator_nodes/allowed_operator_nodes/multiply.hpp>
-#include <feature_creation/node/operator_nodes/allowed_operator_nodes/divide.hpp>
-#include <feature_creation/node/operator_nodes/allowed_operator_nodes/inverse.hpp>
-#include <feature_creation/node/operator_nodes/allowed_operator_nodes/exponential.hpp>
-#include <feature_creation/node/operator_nodes/allowed_operator_nodes/negative_exponential.hpp>
-#include <feature_creation/node/operator_nodes/allowed_operator_nodes/log.hpp>
-#include <feature_creation/node/operator_nodes/allowed_operator_nodes/square.hpp>
-#include <feature_creation/node/operator_nodes/allowed_operator_nodes/cube.hpp>
-#include <feature_creation/node/operator_nodes/allowed_operator_nodes/sixth_power.hpp>
-#include <feature_creation/node/operator_nodes/allowed_operator_nodes/square_root.hpp>
-#include <feature_creation/node/operator_nodes/allowed_operator_nodes/cube_root.hpp>
-#include <feature_creation/node/operator_nodes/allowed_operator_nodes/sin.hpp>
-#include <feature_creation/node/operator_nodes/allowed_operator_nodes/cos.hpp>
+#include <feature_creation/node/operator_nodes/allowed_operator_nodes/add/add.hpp>
+#include <feature_creation/node/operator_nodes/allowed_operator_nodes/sub/subtract.hpp>
+#include <feature_creation/node/operator_nodes/allowed_operator_nodes/abs_diff/absolute_difference.hpp>
+#include <feature_creation/node/operator_nodes/allowed_operator_nodes/mult/multiply.hpp>
+#include <feature_creation/node/operator_nodes/allowed_operator_nodes/div/divide.hpp>
+#include <feature_creation/node/operator_nodes/allowed_operator_nodes/abs/absolute_value.hpp>
+#include <feature_creation/node/operator_nodes/allowed_operator_nodes/inv/inverse.hpp>
+#include <feature_creation/node/operator_nodes/allowed_operator_nodes/exp/exponential.hpp>
+#include <feature_creation/node/operator_nodes/allowed_operator_nodes/neg_exp/negative_exponential.hpp>
+#include <feature_creation/node/operator_nodes/allowed_operator_nodes/log/log.hpp>
+#include <feature_creation/node/operator_nodes/allowed_operator_nodes/sp/sixth_power.hpp>
+#include <feature_creation/node/operator_nodes/allowed_operator_nodes/cb/cube.hpp>
+#include <feature_creation/node/operator_nodes/allowed_operator_nodes/sq/square.hpp>
+#include <feature_creation/node/operator_nodes/allowed_operator_nodes/sqrt/square_root.hpp>
+#include <feature_creation/node/operator_nodes/allowed_operator_nodes/cbrt/cube_root.hpp>
+#include <feature_creation/node/operator_nodes/allowed_operator_nodes/sin/sin.hpp>
+#include <feature_creation/node/operator_nodes/allowed_operator_nodes/cos/cos.hpp>
+
 #include <map>
 #include <iostream>
 
@@ -40,4 +41,4 @@ namespace allowed_op_maps
 };
 
 
-#endif
\ No newline at end of file
+#endif
diff --git a/src/feature_creation/node/utils.cpp b/src/feature_creation/node/utils.cpp
index e09c9f8a..3ebab644 100644
--- a/src/feature_creation/node/utils.cpp
+++ b/src/feature_creation/node/utils.cpp
@@ -105,7 +105,7 @@ std::vector<node_ptr> str2node::phi_selected_from_file(std::string filename, std
         new_feat->set_selected(true);
         new_feat->set_d_mat_ind(feat_sel);
         new_feat->set_value();
-        phi_selected.push_back(std::make_shared<ModelNode>(feat_ind, new_feat->rung(), new_feat->expr(), new_feat->postfix_expr(), new_feat->value(), new_feat->test_value(), new_feat->unit()));
+        phi_selected.push_back(std::make_shared<ModelNode>(feat_ind, new_feat->rung(), new_feat->expr(), new_feat->latex_expr(), new_feat->postfix_expr(), new_feat->value(), new_feat->test_value(), new_feat->unit()));
         ++feat_ind;
         ++feat_sel;
     }
diff --git a/src/feature_creation/units/Unit.cpp b/src/feature_creation/units/Unit.cpp
index 1d71304a..f96e5bd7 100644
--- a/src/feature_creation/units/Unit.cpp
+++ b/src/feature_creation/units/Unit.cpp
@@ -82,6 +82,33 @@ std::string Unit::toString() const
     return unit_rep.str().substr(3);
 }
 
+std::string Unit::toLatexString() const
+{
+    std::stringstream unit_rep;
+    std::vector<std::string> keys;
+    keys.reserve(_dct.size());
+
+    for(auto& el : _dct)
+        keys.push_back(el.first);
+
+    std::sort(keys.begin(), keys.end());
+
+    for(auto& key : keys)
+    {
+        if(_dct.at(key) == 1)
+            unit_rep << key;
+        else if(_dct.at(key) > 0)
+            unit_rep << key << "$^\\text{" << _dct.at(key) << "}$";
+        else if(_dct.at(key) < 0)
+            unit_rep << key << "$^\\text{-" << std::abs(_dct.at(key)) << "}$";
+    }
+
+    if(unit_rep.str().size() == 0)
+        return "Unitless";
+
+    return unit_rep.str();
+}
+
 Unit Unit::operator*(Unit unit_2)
 {
     std::map<std::string, double> to_out = dct();
diff --git a/src/feature_creation/units/Unit.hpp b/src/feature_creation/units/Unit.hpp
index 39ad519f..9dcf29d3 100644
--- a/src/feature_creation/units/Unit.hpp
+++ b/src/feature_creation/units/Unit.hpp
@@ -81,6 +81,12 @@ public:
      */
     std::string toString() const;
 
+    // DocString: unit_latex_str
+    /**
+     * @brief Convert the unit into a latexified string
+     */
+    std::string toLatexString() const;
+
     // DocString: unit_mult
     /**
      * @brief Multiply operator for units
@@ -175,4 +181,4 @@ public:
  */
 std::ostream& operator<< (std::ostream& outStream, const Unit& unit);
 
-#endif
\ No newline at end of file
+#endif
diff --git a/src/inputs/InputParser.cpp b/src/inputs/InputParser.cpp
index f04da374..affd8d48 100644
--- a/src/inputs/InputParser.cpp
+++ b/src/inputs/InputParser.cpp
@@ -246,6 +246,7 @@ void InputParser::generate_feature_space(std::shared_ptr<MPI_Interface> comm, st
     else
     {
         _prop_unit = Unit(units[propind]);
+        _prop_label = headers[propind];
         _prop_train = std::vector<double>(data[propind].size(), 0.0);
         _prop_test = std::vector<double>(test_data[propind].size(), 0.0);
 
@@ -288,13 +289,16 @@ void stripComments(std::string& filename)
 
     //search for '//', delete everything following, print remainder to new file
     std::string line;
-    int found, found2;
+    int found, found2, find_py_comment;
     while (std::getline(inputfile,line))
     {
         found  = line.find('/');
         found2 = line.find('/', found+1);
+        find_py_comment = line.find('#');
         if (found != line.npos && found2 == found+1)
             inputcopy << line.erase(found, line.length()) << std::endl;
+        else if(find_py_comment != std::string::npos)
+            inputcopy << line.erase(find_py_comment, line.length()) << std::endl;
         else
             inputcopy << line << std::endl;
     }
diff --git a/src/inputs/InputParser.hpp b/src/inputs/InputParser.hpp
index fd1834d3..42cd08b4 100644
--- a/src/inputs/InputParser.hpp
+++ b/src/inputs/InputParser.hpp
@@ -47,6 +47,7 @@ public:
     std::string _filename; //!< Name of the input file
     std::string _data_file; //!< Name of the data file
     std::string _prop_key; //!< Key used to find the property column in the data file
+    std::string _prop_label; //!< label used to describe the property
     std::string _task_key; //!< Key used to find the task column in the data file
     std::string _calc_type; //!< Type of projection operator to use
 
diff --git a/src/main.cpp b/src/main.cpp
index b2572680..86126af3 100644
--- a/src/main.cpp
+++ b/src/main.cpp
@@ -34,21 +34,21 @@ int main(int argc, char const *argv[])
     mpi_setup::comm->barrier();
     duration = omp_get_wtime() - start;
     if(mpi_setup::comm->rank() == 0)
-        std::cout<< "time input_parsing/Feature space generation: " << duration << std::endl;
+        std::cout<< "time input_parsing/Feature space generation: " << duration << " s" << std::endl;
 
     node_value_arrs::initialize_d_matrix_arr();
     if(IP._calc_type.compare("regression") == 0)
     {
-        SISSORegressor sisso(IP._feat_space, IP._prop_unit, IP._prop_train, IP._prop_test, IP._task_sizes_train, IP._task_sizes_test, IP._leave_out_inds, IP._n_dim, IP._n_residuals, IP._n_models_store, IP._fix_intercept);
+        SISSORegressor sisso(IP._feat_space, IP._prop_label, IP._prop_unit, IP._prop_train, IP._prop_test, IP._task_sizes_train, IP._task_sizes_test, IP._leave_out_inds, IP._n_dim, IP._n_residuals, IP._n_models_store, IP._fix_intercept);
         sisso.fit();
 
         if(mpi_setup::comm->rank() == 0)
         {
             for(int ii = 0; ii < sisso.models().size(); ++ii)
             {
-                std::cout << "Train RMSE: " << sisso.models()[ii][0].rmse();
+                std::cout << "Train RMSE: " << sisso.models()[ii][0].rmse() << " " << IP._prop_unit;
                 if(IP._prop_test.size() > 0)
-                    std::cout << "; Test RMSE: " << sisso.models()[ii][0].test_rmse() << std::endl;
+                    std::cout << "; Test RMSE: " << sisso.models()[ii][0].test_rmse() << " " << IP._prop_unit << std::endl;
                 else
                     std::cout << std::endl;
                 std::cout << sisso.models()[ii][0] << "\n" << std::endl;
@@ -57,7 +57,7 @@ int main(int argc, char const *argv[])
     }
     if(IP._calc_type.compare("log_regression") == 0)
     {
-        SISSOLogRegressor sisso(IP._feat_space, IP._prop_unit, IP._prop_train, IP._prop_test, IP._task_sizes_train, IP._task_sizes_test, IP._leave_out_inds, IP._n_dim, IP._n_residuals, IP._n_models_store, IP._fix_intercept);
+        SISSOLogRegressor sisso(IP._feat_space, IP._prop_label, IP._prop_unit, IP._prop_train, IP._prop_test, IP._task_sizes_train, IP._task_sizes_test, IP._leave_out_inds, IP._n_dim, IP._n_residuals, IP._n_models_store, IP._fix_intercept);
         sisso.fit();
 
         if(mpi_setup::comm->rank() == 0)
@@ -75,16 +75,16 @@ int main(int argc, char const *argv[])
     }
     else if(IP._calc_type.compare("classification") == 0)
     {
-        SISSOClassifier sisso(IP._feat_space, IP._prop_unit, IP._prop_train, IP._prop_test, IP._task_sizes_train, IP._task_sizes_test, IP._leave_out_inds, IP._n_dim, IP._n_residuals, IP._n_models_store);
+        SISSOClassifier sisso(IP._feat_space, IP._prop_label, IP._prop_unit, IP._prop_train, IP._prop_test, IP._task_sizes_train, IP._task_sizes_test, IP._leave_out_inds, IP._n_dim, IP._n_residuals, IP._n_models_store);
         sisso.fit();
 
         if(mpi_setup::comm->rank() == 0)
         {
             for(int ii = 0; ii < sisso.models().size(); ++ii)
             {
-                std::cout << "Train Error: " << sisso.models()[ii][0].percent_train_error();
+                std::cout << "Percent of training data misclassified: " << sisso.models()[ii][0].percent_train_error() << "%";
                 if(IP._prop_test.size() > 0)
-                    std::cout << "; Test Error: " << sisso.models()[ii][0].percent_test_error() << std::endl;
+                    std::cout << "; Percent of test data misclassified: " << sisso.models()[ii][0].percent_test_error() << "%" << std::endl;
                 else
                     std::cout << std::endl;
                 std::cout << sisso.models()[ii][0] << "\n" << std::endl;
diff --git a/src/python/__init__.py b/src/python/__init__.py
index 06674b9a..46cee816 100644
--- a/src/python/__init__.py
+++ b/src/python/__init__.py
@@ -100,6 +100,7 @@ def generate_phi_0_from_csv(
 
     # Get prop
     prop_key = df_cols[prop_ind]
+    prop_label = prop_key.split("(")[0].rstrip()
     prop_unit = get_unit(prop_key)
     prop = df[prop_key].to_numpy()
     df = df.drop([prop_key], axis=1)
@@ -183,6 +184,7 @@ def generate_phi_0_from_csv(
 
     return (
         phi_0,
+        prop_label,
         Unit(prop_unit),
         prop[train_inds].flatten(),
         prop[leave_out_inds].flatten(),
@@ -279,6 +281,7 @@ def generate_fs_sr_from_csv(
     """
     (
         phi_0,
+        prop_label,
         prop_unit,
         prop,
         prop_test,
@@ -301,6 +304,7 @@ def generate_fs_sr_from_csv(
     if calc_type.lower() == "regression":
         sr = SISSORegressor(
             fs,
+            prop_label,
             prop_unit,
             prop,
             prop_test,
@@ -314,6 +318,7 @@ def generate_fs_sr_from_csv(
     else:
         sr = SISSOClassifier(
             fs,
+            prop_label,
             prop_unit,
             prop,
             prop_test,
diff --git a/src/python/bindings_docstring_keyed.cpp b/src/python/bindings_docstring_keyed.cpp
index d11ff0b2..bbbc86c3 100644
--- a/src/python/bindings_docstring_keyed.cpp
+++ b/src/python/bindings_docstring_keyed.cpp
@@ -90,6 +90,7 @@ void sisso::feature_creation::registerUnit()
         .def(self == self)
         .def(self != self)
         .def("__pow__", &Unit::operator^, "@DocString_unit_pow@")
+        .add_property("latex_str", &Unit::toLatexString, "@DocString_unit_latex_str@")
     ;
 }
 
@@ -97,6 +98,7 @@ void sisso::feature_creation::node::registerNode()
 {
     void (Node::*reindex_1)(int) = &Node::reindex;
     void (Node::*reindex_2)(int, int) = &Node::reindex;
+
     class_<sisso::feature_creation::node::NodeWrap, boost::noncopyable>("Node", no_init)
         .def("reindex", reindex_1, "@DocString_node_reindex_1@")
         .def("reindex", reindex_2, "@DocString_node_reindex_2@")
@@ -112,6 +114,7 @@ void sisso::feature_creation::node::registerNode()
         .add_property("test_value", &Node::test_value_py, "@DocString_node_test_value_py@")
         .add_property("primary_feat_decomp", &Node::primary_feature_decomp_py, "@DocString_node_primary_feature_decomp@")
         .add_property("postfix_expr", &Node::postfix_expr, "@DocString_node_postfix_expr@")
+        .add_property("latex_expr", &Node::latex_expr, "@DocString_node_latex_expr@")
         .def("expr", pure_virtual(&Node::expr), "@DocString_node_expr@")
         .def("unit", pure_virtual(&Node::unit), "@DocString_node_unit@")
         .def("set_value", pure_virtual(&Node::set_value), "@DocString_node_set_value@")
@@ -149,7 +152,7 @@ void sisso::feature_creation::node::registerModelNode()
     std::string (ModelNode::*expr_const)() const = &ModelNode::expr;
 
     using namespace boost::python;
-    class_<ModelNode, bases<FeatureNode>>("ModelNode", init<int, int, std::string, std::string, std::vector<double>, std::vector<double>, Unit>())
+    class_<ModelNode, bases<FeatureNode>>("ModelNode", init<int, int, std::string, std::string, std::string, std::vector<double>, std::vector<double>, Unit>())
         .def("is_nan", &ModelNode::is_nan, "@DocString_model_node_is_nan@")
         .def("is_const", &ModelNode::is_const, "@DocString_model_node_is_const@")
         .def("set_value", &ModelNode::set_value, "@DocString_model_node_set_value@")
@@ -356,6 +359,7 @@ void sisso::descriptor_identifier::registerModel()
         .add_property("feats", &Model::feats, "@DocString_model_feats@")
         .add_property("coefs", &Model::coefs, "@DocString_model_coefs@")
         .add_property("prop_unit", &Model::prop_unit, "@DocString_model_prop_unit@")
+        .add_property("prop_label", &Model::prop_label, "@DocString_model_prop_label@")
         .add_property("task_size_train", &Model::task_sizes_train, "@DocString_model_task_sizes_train")
         .add_property("task_size_test", &Model::task_sizes_test, "@DocString_model_task_sizes_test")
         .add_property("fix_intercept", &Model::fix_intercept, "@DocString_model_fix_intercept")
@@ -364,15 +368,18 @@ void sisso::descriptor_identifier::registerModel()
 
 void sisso::descriptor_identifier::registerModelRegressor()
 {
-    class_<ModelRegressor, bases<Model>>("ModelRegressor", init<Unit, std::vector<double>, std::vector<double>, std::vector<model_node_ptr>, std::vector<int>, std::vector<int>, bool>())
+    class_<ModelRegressor, bases<Model>>("ModelRegressor", init<std::string, Unit, std::vector<double>, std::vector<double>, std::vector<model_node_ptr>, std::vector<int>, std::vector<int>, bool>())
         .def(init<std::string>())
         .def(init<std::string, std::string>())
         .def("__str__", &ModelRegressor::toString, "@DocString_model_reg_str@")
         .def("__repr__", &ModelRegressor::toString, "@DocString_model_reg_str@")
+        .add_property("latex_str", &ModelRegressor::toLatexString, "@DocString_model_reg_latex_str@")
         .add_property("fit", &ModelRegressor::prop_train_est, "@DocString_model_reg_prop_train_est@")
         .add_property("predict", &ModelRegressor::prop_test_est, "@DocString_model_reg_prop_test_est@")
         .add_property("train_error", &ModelRegressor::train_error, "@DocString_model_reg_train_error@")
         .add_property("test_error", &ModelRegressor::test_error, "@DocString_model_reg_test_error@")
+        .add_property("r2", &ModelRegressor::r2, "@DocString_model_reg_r2@")
+        .add_property("test_r2", &ModelRegressor::test_r2, "@DocString_model_reg_test_r2@")
         .add_property("rmse", &ModelRegressor::rmse, "@DocString_model_reg_rmse@")
         .add_property("test_rmse", &ModelRegressor::test_rmse, "@DocString_model_reg_test_rmse@")
         .add_property("max_ae", &ModelRegressor::max_ae, "@DocString_model_reg_max_ae@")
@@ -394,17 +401,18 @@ void sisso::descriptor_identifier::registerModelRegressor()
 
 void sisso::descriptor_identifier::registerModelLogRegressor()
 {
-    class_<ModelLogRegressor, bases<ModelRegressor>>("ModelLogRegressor", init<Unit, std::vector<double>, std::vector<double>, std::vector<model_node_ptr>, std::vector<int>, std::vector<int>, bool>())
+    class_<ModelLogRegressor, bases<ModelRegressor>>("ModelLogRegressor", init<std::string, Unit, std::vector<double>, std::vector<double>, std::vector<model_node_ptr>, std::vector<int>, std::vector<int>, bool>())
         .def(init<std::string>())
         .def(init<std::string, std::string>())
-        .def("__str__", &ModelRegressor::toString, "@DocString_model_reg_str@")
-        .def("__repr__", &ModelRegressor::toString, "@DocString_model_reg_str@")
+        .def("__str__", &ModelLogRegressor::toString, "@DocString_model_log_reg_str@")
+        .def("__repr__", &ModelLogRegressor::toString, "@DocString_model_log_reg_str@")
+        .add_property("latex_str", &ModelLogRegressor::toLatexString, "@DocString_model_log_reg_latex_str@")
     ;
 }
 
 void sisso::descriptor_identifier::registerModelClassifier()
 {
-    class_<ModelClassifier, bases<Model>>("ModelClassifier", init<Unit, std::vector<double>, std::vector<double>, std::vector<model_node_ptr>, std::vector<int>, std::vector<int>, bool>())
+    class_<ModelClassifier, bases<Model>>("ModelClassifier", init<std::string, Unit, std::vector<double>, std::vector<double>, std::vector<model_node_ptr>, std::vector<int>, std::vector<int>, bool>())
         .def(init<std::string>())
         .def(init<std::string, std::string>())
         .def(init<ModelClassifier, py::list, np::ndarray, np::ndarray>())
@@ -412,6 +420,7 @@ void sisso::descriptor_identifier::registerModelClassifier()
         .def("__str__", &ModelClassifier::toString, "@DocString_model_class_str@")
         .def("__repr__", &ModelClassifier::toString, "@DocString_model_class_str@")
         .def("to_file", &ModelClassifier::to_file_py, "@DocString_model_class_to_file@")
+        .add_property("latex_str", &ModelClassifier::toLatexString, "@DocString_model_class_latex_str@")
         .add_property("fit", &ModelClassifier::prop_train_est, "@DocString_model_class_prop_train_est@")
         .add_property("predict", &ModelClassifier::prop_test_est, "@DocString_model_class_prop_test_est@")
         .add_property("train_error", &ModelClassifier::train_error, "@DocString_model_class_train_error@")
@@ -442,26 +451,26 @@ void sisso::descriptor_identifier::registerSISSO_DI()
 
 void sisso::descriptor_identifier::registerSISSORegressor()
 {
-    class_<SISSORegressor, bases<SISSO_DI>>("SISSORegressor", init<std::shared_ptr<FeatureSpace>, Unit, np::ndarray, np::ndarray, py::list, py::list, py::list, int, int, int, optional<bool>>())
+    class_<SISSORegressor, bases<SISSO_DI>>("SISSORegressor", init<std::shared_ptr<FeatureSpace>, std::string, Unit, np::ndarray, np::ndarray, py::list, py::list, py::list, int, int, int, optional<bool>>())
         .def("fit", &SISSORegressor::fit, "@DocString_sisso_reg_fit@")
-        .def(init<std::shared_ptr<FeatureSpace>, Unit, py::list, py::list, py::list, py::list, py::list, int, int, int, optional<bool>>())
+        .def(init<std::shared_ptr<FeatureSpace>, std::string, Unit, py::list, py::list, py::list, py::list, py::list, int, int, int, optional<bool>>())
         .add_property("models", &SISSORegressor::models_py, "@DocString_sisso_reg_models_py@")
     ;
 }
 
 void sisso::descriptor_identifier::registerSISSOLogRegressor()
 {
-    class_<SISSOLogRegressor, bases<SISSORegressor>>("SISSOLogRegressor", init<std::shared_ptr<FeatureSpace>, Unit, np::ndarray, np::ndarray, py::list, py::list, py::list, int, int, int, optional<bool>>())
+    class_<SISSOLogRegressor, bases<SISSORegressor>>("SISSOLogRegressor", init<std::shared_ptr<FeatureSpace>, std::string, Unit, np::ndarray, np::ndarray, py::list, py::list, py::list, int, int, int, optional<bool>>())
         .def("fit", &SISSOLogRegressor::fit, "@DocString_sisso_reg_fit@")
-        .def(init<std::shared_ptr<FeatureSpace>, Unit, py::list, py::list, py::list, py::list, py::list, int, int, int, optional<bool>>())
-        .add_property("models", &SISSOLogRegressor::models_py, "@DocString_sisso_reg_models_py@")
+        .def(init<std::shared_ptr<FeatureSpace>, std::string, Unit, py::list, py::list, py::list, py::list, py::list, int, int, int, optional<bool>>())
+        .add_property("models", &SISSOLogRegressor::models_log_reg_py, "@DocString_sisso_log_reg_models_py@")
     ;
 }
 
 void sisso::descriptor_identifier::registerSISSOClassifier()
 {
-    class_<SISSOClassifier, bases<SISSO_DI>>("SISSOClassifier", init<std::shared_ptr<FeatureSpace>, Unit, np::ndarray, np::ndarray, py::list, py::list, py::list, int, int, int>())
-        .def(init<std::shared_ptr<FeatureSpace>, Unit, py::list, py::list, py::list, py::list, py::list, int, int, int>())
+    class_<SISSOClassifier, bases<SISSO_DI>>("SISSOClassifier", init<std::shared_ptr<FeatureSpace>, std::string, Unit, np::ndarray, np::ndarray, py::list, py::list, py::list, int, int, int>())
+        .def(init<std::shared_ptr<FeatureSpace>, std::string, Unit, py::list, py::list, py::list, py::list, py::list, int, int, int>())
         .def("fit", &SISSOClassifier::fit, "@DocString_sisso_class_fit@")
         .add_property("models", &SISSOClassifier::models_py, "@DocString_sisso_class_models_py@")
     ;
diff --git a/src/python/bindings_docstring_keyed.hpp b/src/python/bindings_docstring_keyed.hpp
index aab68b43..cee1f5b8 100644
--- a/src/python/bindings_docstring_keyed.hpp
+++ b/src/python/bindings_docstring_keyed.hpp
@@ -38,6 +38,7 @@ namespace sisso
             {
             public:
                 inline std::string expr(){return this->get_override("expr")();}
+                inline std::string get_latex_expr(std::string cap){return this->get_override("latex_expr")();}
                 inline Unit unit(){return this->get_override("unit")();}
                 inline std::vector<double> value(){return this->get_override("value")();}
                 inline std::vector<double> test_value(){return this->get_override("test_value")();}
@@ -69,6 +70,7 @@ namespace sisso
                 inline NODE_TYPE type(){return this->get_override("type")();}
                 inline int rung(int cur_rung = 0){return this->get_override("rung")();}
                 inline std::string expr(){return this->get_override("expr")();}
+                inline std::string get_latex_expr(std::string cap){return this->get_override("latex_expr")();}
                 inline Unit unit(){return this->get_override("unit")();}
                 inline std::string get_postfix_term(){return this->get_override("get_postfix_term")();}
                 inline void update_add_sub_leaves(std::map<std::string, int>& add_sub_leaves, int pl_mn, int& expected_abs_tot){this->get_override("update_add_sub_leaves")();}
@@ -103,6 +105,7 @@ namespace sisso
                     .def("set_test_value", py::pure_virtual(&OperatorNode<N>::set_test_value), "@DocString_op_node_set_test_value@")
                     .def("rung", py::pure_virtual(&OperatorNode<N>::rung), "@DocString_op_node_rung@")
                     .def("expr", py::pure_virtual(&OperatorNode<N>::expr), "@DocString_op_node_expr@")
+                    .def("latex_expr", py::pure_virtual(&OperatorNode<N>::latex_expr), "@DocString_op_node_latex_expr@")
                     .def("unit", py::pure_virtual(&OperatorNode<N>::unit), "@DocString_op_node_unit@")
                     .add_property("n_feats", &OperatorNode<N>::n_feats, "@DocString_op_node_n_feats@")
                     .add_property("feat", &OperatorNode<N>::feat, "@DocString_op_node_feat@")
@@ -252,4 +255,4 @@ namespace sisso
     }
 }
 
-#endif
\ No newline at end of file
+#endif
diff --git a/src/python/descriptor_identifier/SISSOClassifier.cpp b/src/python/descriptor_identifier/SISSOClassifier.cpp
index b89ccbaa..ce949e2a 100644
--- a/src/python/descriptor_identifier/SISSOClassifier.cpp
+++ b/src/python/descriptor_identifier/SISSOClassifier.cpp
@@ -2,6 +2,7 @@
 
 SISSOClassifier::SISSOClassifier(
     std::shared_ptr<FeatureSpace> feat_space,
+    std::string prop_label,
     Unit prop_unit,
     np::ndarray prop,
     np::ndarray prop_test,
@@ -12,7 +13,7 @@ SISSOClassifier::SISSOClassifier(
     int n_residual,
     int n_models_store
 ) :
-    SISSO_DI(feat_space, prop_unit, prop, prop_test, task_sizes_train, task_sizes_test, leave_out_inds, n_dim, n_residual, n_models_store, false),
+    SISSO_DI(feat_space, prop_label, prop_unit, prop, prop_test, task_sizes_train, task_sizes_test, leave_out_inds, n_dim, n_residual, n_models_store, false),
     _c(100.0),
     _width(1.0e-5),
     _n_class(1)
@@ -26,6 +27,7 @@ SISSOClassifier::SISSOClassifier(
 
 SISSOClassifier::SISSOClassifier(
     std::shared_ptr<FeatureSpace> feat_space,
+    std::string prop_label,
     Unit prop_unit,
     py::list prop,
     py::list prop_test,
@@ -36,7 +38,7 @@ SISSOClassifier::SISSOClassifier(
     int n_residual,
     int n_models_store
 ) :
-    SISSO_DI(feat_space, prop_unit, prop, prop_test, task_sizes_train, task_sizes_test, leave_out_inds, n_dim, n_residual, n_models_store, false),
+    SISSO_DI(feat_space, prop_label, prop_unit, prop, prop_test, task_sizes_train, task_sizes_test, leave_out_inds, n_dim, n_residual, n_models_store, false),
     _c(100.0),
     _width(1.0e-5),
     _n_class(1)
diff --git a/src/python/descriptor_identifier/SISSOLogRegressor.cpp b/src/python/descriptor_identifier/SISSOLogRegressor.cpp
index c20a0351..3d3f02bd 100644
--- a/src/python/descriptor_identifier/SISSOLogRegressor.cpp
+++ b/src/python/descriptor_identifier/SISSOLogRegressor.cpp
@@ -2,6 +2,7 @@
 
 SISSOLogRegressor::SISSOLogRegressor(
     std::shared_ptr<FeatureSpace> feat_space,
+    std::string prop_label,
     Unit prop_unit,
     np::ndarray prop,
     np::ndarray prop_test,
@@ -13,7 +14,7 @@ SISSOLogRegressor::SISSOLogRegressor(
     int n_models_store,
     bool fix_intercept
 ) :
-    SISSORegressor(feat_space, prop_unit, prop, prop_test, task_sizes_train, task_sizes_test, leave_out_inds, n_dim, n_residual, n_models_store, fix_intercept),
+    SISSORegressor(feat_space, prop_label, prop_unit, prop, prop_test, task_sizes_train, task_sizes_test, leave_out_inds, n_dim, n_residual, n_models_store, fix_intercept),
     _prop_no_log(_prop)
 {
     if(*std::min_element(_prop.begin(), _prop.end()) <= 0.0)
@@ -24,6 +25,7 @@ SISSOLogRegressor::SISSOLogRegressor(
 
 SISSOLogRegressor::SISSOLogRegressor(
     std::shared_ptr<FeatureSpace> feat_space,
+    std::string prop_label,
     Unit prop_unit,
     py::list prop,
     py::list prop_test,
@@ -35,7 +37,7 @@ SISSOLogRegressor::SISSOLogRegressor(
     int n_models_store,
     bool fix_intercept
 ) :
-    SISSORegressor(feat_space, prop_unit, prop, prop_test, task_sizes_train, task_sizes_test, leave_out_inds, n_dim, n_residual, n_models_store, fix_intercept),
+    SISSORegressor(feat_space, prop_label, prop_unit, prop, prop_test, task_sizes_train, task_sizes_test, leave_out_inds, n_dim, n_residual, n_models_store, fix_intercept),
     _prop_no_log(_prop)
 {
     if(*std::min_element(_prop.begin(), _prop.end()) <= 0.0)
@@ -43,7 +45,7 @@ SISSOLogRegressor::SISSOLogRegressor(
     std::transform(_prop.begin(), _prop.end(), _prop.begin(), [](double p){return std::log(p);});
 }
 
-py::list SISSOLogRegressor::models_py()
+py::list SISSOLogRegressor::models_log_reg_py()
 {
     py::list model_list;
     for(auto& mod_list : _models)
diff --git a/src/python/descriptor_identifier/SISSORegressor.cpp b/src/python/descriptor_identifier/SISSORegressor.cpp
index b4022200..12114787 100644
--- a/src/python/descriptor_identifier/SISSORegressor.cpp
+++ b/src/python/descriptor_identifier/SISSORegressor.cpp
@@ -2,6 +2,7 @@
 
 SISSORegressor::SISSORegressor(
     std::shared_ptr<FeatureSpace> feat_space,
+    std::string prop_label,
     Unit prop_unit,
     np::ndarray prop,
     np::ndarray prop_test,
@@ -13,11 +14,12 @@ SISSORegressor::SISSORegressor(
     int n_models_store,
     bool fix_intercept
 ) :
-    SISSO_DI(feat_space, prop_unit, prop, prop_test, task_sizes_train, task_sizes_test, leave_out_inds, n_dim, n_residual, n_models_store, fix_intercept)
+    SISSO_DI(feat_space, prop_label, prop_unit, prop, prop_test, task_sizes_train, task_sizes_test, leave_out_inds, n_dim, n_residual, n_models_store, fix_intercept)
 {}
 
 SISSORegressor::SISSORegressor(
     std::shared_ptr<FeatureSpace> feat_space,
+    std::string prop_label,
     Unit prop_unit,
     py::list prop,
     py::list prop_test,
@@ -29,7 +31,7 @@ SISSORegressor::SISSORegressor(
     int n_models_store,
     bool fix_intercept
 ) :
-    SISSO_DI(feat_space, prop_unit, prop, prop_test, task_sizes_train, task_sizes_test, leave_out_inds, n_dim, n_residual, n_models_store, fix_intercept)
+    SISSO_DI(feat_space, prop_label, prop_unit, prop, prop_test, task_sizes_train, task_sizes_test, leave_out_inds, n_dim, n_residual, n_models_store, fix_intercept)
 {}
 
 py::list SISSORegressor::models_py()
diff --git a/src/python/descriptor_identifier/SISSO_DI.cpp b/src/python/descriptor_identifier/SISSO_DI.cpp
index 46989104..c7920cc4 100644
--- a/src/python/descriptor_identifier/SISSO_DI.cpp
+++ b/src/python/descriptor_identifier/SISSO_DI.cpp
@@ -2,6 +2,7 @@
 
 SISSO_DI::SISSO_DI(
     std::shared_ptr<FeatureSpace> feat_space,
+    std::string prop_label,
     Unit prop_unit,
     np::ndarray prop,
     np::ndarray prop_test,
@@ -19,6 +20,7 @@ SISSO_DI::SISSO_DI(
     _task_sizes_train(python_conv_utils::from_list<int>(task_sizes_train)),
     _task_sizes_test(python_conv_utils::from_list<int>(task_sizes_test)),
     _leave_out_inds(python_conv_utils::from_list<int>(leave_out_inds)),
+    _prop_label(prop_label),
     _prop_unit(prop_unit),
     _feat_space(feat_space),
     _mpi_comm(feat_space->mpi_comm()),
@@ -34,6 +36,7 @@ SISSO_DI::SISSO_DI(
 
 SISSO_DI::SISSO_DI(
     std::shared_ptr<FeatureSpace> feat_space,
+    std::string prop_label,
     Unit prop_unit,
     py::list prop,
     py::list prop_test,
@@ -51,6 +54,7 @@ SISSO_DI::SISSO_DI(
     _task_sizes_train(python_conv_utils::from_list<int>(task_sizes_train)),
     _task_sizes_test(python_conv_utils::from_list<int>(task_sizes_test)),
     _leave_out_inds(python_conv_utils::from_list<int>(leave_out_inds)),
+    _prop_label(prop_label),
     _prop_unit(prop_unit),
     _feat_space(feat_space),
     _mpi_comm(feat_space->mpi_comm()),
diff --git a/src/python/feature_creation/FeatureSpace.cpp b/src/python/feature_creation/FeatureSpace.cpp
index 8371b387..b450a230 100644
--- a/src/python/feature_creation/FeatureSpace.cpp
+++ b/src/python/feature_creation/FeatureSpace.cpp
@@ -95,12 +95,39 @@ FeatureSpace::FeatureSpace(
     _n_rung_generate(0)
 {
     _n_samp = _phi_0[0]->n_samp();
+
     if(project_type.compare("regression") == 0)
+    {
         _project = project_funcs::project_r2;
+        _project_no_omp = project_funcs::project_r2_no_omp;
+    }
     else if(project_type.compare("classification") == 0)
+    {
         _project = project_funcs::project_classify;
+        _project_no_omp = project_funcs::project_classify_no_omp;
+    }
+    else if(project_type.compare("log_regression") == 0)
+    {
+        if(_task_sizes.size() > 1)
+            throw std::logic_error("Log Regression can not be done using multiple tasks.");
+        _project = project_funcs::project_log_r2;
+        _project_no_omp = project_funcs::project_log_r2_no_omp;
+    }
     else
+    {
         throw std::logic_error("Wrong projection type passed to FeatureSpace constructor.");
+    }
+
+    if(_cross_cor_max < 0.99999)
+    {
+        _is_valid = comp_feats::valid_feature_against_selected;
+        _is_valid_feat_list = comp_feats::valid_feature_against_selected_feat_list;
+    }
+    else
+    {
+        _is_valid = comp_feats::valid_feature_against_selected_max_corr_1;
+        _is_valid_feat_list = comp_feats::valid_feature_against_selected_max_corr_1_feat_list;
+    }
 
     std::vector<node_ptr> phi_temp = str2node::phi_from_file(feature_file, _phi_0);
 
@@ -153,7 +180,7 @@ py::list FeatureSpace::phi_py()
 {
     py::list feat_lst;
     for(auto& feat : _phi)
-        feat_lst.append<ModelNode>(ModelNode(feat->feat_ind(), feat->rung(), feat->expr(), feat->postfix_expr(), feat->value(), feat->test_value(), feat->unit()));
+        feat_lst.append<ModelNode>(ModelNode(feat->feat_ind(), feat->rung(), feat->expr(), feat->latex_expr(), feat->postfix_expr(), feat->value(), feat->test_value(), feat->unit()));
     return feat_lst;
 }
 
@@ -161,6 +188,6 @@ py::list FeatureSpace::phi_selected_py()
 {
     py::list feat_lst;
     for(auto& feat : _phi_selected)
-        feat_lst.append<ModelNode>(ModelNode(feat->feat_ind(), feat->rung(), feat->expr(), feat->postfix_expr(), feat->value(), feat->test_value(), feat->unit()));
+        feat_lst.append<ModelNode>(ModelNode(feat->feat_ind(), feat->rung(), feat->expr(), feat->latex_expr(), feat->postfix_expr(), feat->value(), feat->test_value(), feat->unit()));
     return feat_lst;
 }
diff --git a/src/python/feature_creation/node_utils.cpp b/src/python/feature_creation/node_utils.cpp
index d89a3225..22dd2722 100644
--- a/src/python/feature_creation/node_utils.cpp
+++ b/src/python/feature_creation/node_utils.cpp
@@ -5,6 +5,6 @@ py::list str2node::phi_selected_from_file_py(std::string filename, py::list phi_
     std::vector<node_ptr> phi_selected = phi_selected_from_file(filename, python_conv_utils::shared_ptr_vec_from_list<Node, FeatureNode>(phi_0));
     py::list feat_lst;
     for(auto& feat : phi_selected)
-        feat_lst.append<ModelNode>(ModelNode(feat->d_mat_ind(), feat->rung(), feat->expr(), feat->postfix_expr(), feat->value(), feat->test_value(), feat->unit()));
+        feat_lst.append<ModelNode>(ModelNode(feat->d_mat_ind(), feat->rung(), feat->expr(), feat->latex_expr(), feat->postfix_expr(), feat->value(), feat->test_value(), feat->unit()));
     return feat_lst;
-}
\ No newline at end of file
+}
diff --git a/src/python/postprocess/__init__.py b/src/python/postprocess/__init__.py
index fd104116..3bcb73ba 100644
--- a/src/python/postprocess/__init__.py
+++ b/src/python/postprocess/__init__.py
@@ -1,20 +1,185 @@
 from cpp_sisso.postprocess.check_cv_convergence import jackknife_cv_conv_est
 from cpp_sisso.postprocess.plotting import plt, config
 from cpp_sisso.postprocess.plotting.utils import adjust_box_widths, latexify
-from cpp_sisso.postprocess.utils import get_models
-from cpp_sisso import ModelRegressor, ModelClassifier
+from cpp_sisso.postprocess.utils import get_models, load_model
+from cpp_sisso import ModelRegressor, ModelLogRegressor, ModelClassifier
 import toml
 
 import numpy as np
 import pandas as pd
 
+from scipy.stats import linregress
+
 from copy import deepcopy
 from glob import glob
 
 import seaborn as sns
 
 
-def generate_plot(dir_expr, filename, fig_settings=None):
+def plot_model_ml_plot(model, filename=None, fig_settings=None):
+    """Wrapper to plot_model for a set of training and testing data
+
+    Args:
+        model: The model to plot
+        filename (str): Name of the file to store the plot in
+        fig_settings (adict): non-default plot settings
+
+    Rerturns:
+        matplotlib.pyplot.Figure: The machine learning plot of the given model
+    """
+
+    if isinstance(model, ModelClassifier):
+        raise ValueError(
+            "Machine learning plots are designed for regression type plots"
+        )
+
+    if isinstance(fig_settings, str):
+        fig_settings = toml.load(fig_settings)
+
+    fig_config = deepcopy(config)
+    if fig_settings:
+        fig_config.update(fig_settings)
+
+    fig, ax = plt.subplots(
+        nrows=1,
+        ncols=1,
+        figsize=[fig_config["size"]["width"], fig_config["size"]["height"]],
+    )
+    fig.subplots_adjust(**fig_config["subplots_adjust_single_axis"])
+
+    ax.set_xlabel(model.prop_label + " (" + model.prop_unit.latex_str + ")")
+    ax.set_ylabel(
+        f"Estimated {model.prop_label}" + " (" + model.prop_unit.latex_str + ")"
+    )
+    if len(model.prop_test) > 0:
+        lims = [
+            min(
+                min(model.prop_train.min(), model.prop_test.min()),
+                min(model.fit.min(), model.predict.min()),
+            ),
+            max(
+                max(model.prop_train.max(), model.prop_test.max()),
+                max(model.fit.max(), model.predict.max()),
+            ),
+        ]
+    else:
+        lims = [
+            min(model.prop_train.min(), model.fit.min()),
+            max(model.prop_train.max(), model.fit.max()),
+        ]
+    lim_range = lims[1] - lims[0]
+    lims[0] -= lim_range * 0.1
+    lims[1] += lim_range * 0.1
+
+    ax.set_xlim(lims)
+    ax.set_ylim(lims)
+
+    xx = ax.plot(lims, lims, "--", color="#555555")
+    error_label = f"Training $R^2$: {model.r2: .2f}"
+    ax.plot(
+        model.prop_train,
+        model.fit,
+        "o",
+        color=fig_config["colors"]["train"],
+        label="train",
+    )
+    if len(model.prop_test) > 0:
+        ax.plot(
+            model.prop_test,
+            model.predict,
+            "s",
+            color=fig_config["colors"]["box_test_edge"],
+            label="test",
+        )
+        ax.legend(loc="upper left")
+        error_label += f"\nTesting $R^2$: {model.test_r2 :.2f}"
+
+    ax.text(
+        0.99,
+        0.01,
+        error_label,
+        horizontalalignment="right",
+        verticalalignment="bottom",
+        transform=ax.transAxes,
+    )
+    if filename:
+        fig.savefig(filename)
+
+    return fig
+
+
+def plot_model_ml_plot_from_file(
+    train_file, test_file=None, filename=None, fig_settings=None
+):
+    """Wrapper to plot_model for a set of training and testing data
+
+    Args:
+        tarin_file (str): The filename of the training file
+        test_file (str): The filename of the testing file
+        filename (str): Name of the file to store the plot in
+        fig_settings (adict): non-default plot settings
+
+    Rerturns:
+        matplotlib.pyplot.Figure: The machine learning plot of the given model
+    """
+    return plot_model_ml_plot(load_model(train_file, test_file), filename, fig_settings)
+
+
+def plot_validation_rmse(models, filename=None, fig_settings=None):
+    """Plots the mean validation rmses/standard deviation using jack-knife resampling
+
+    Args:
+        models (list of Models or str): The list of models to plot
+        filename (str): Name of the file to store the plot in
+        fig_settings (adict): non-default plot settings
+
+    Returns:
+        matplotlib.pyplot.Figure: The resulting plot
+    """
+    if isinstance(models, str):
+        models = get_models(models)
+
+    if isinstance(fig_settings, str):
+        fig_settings = toml.load(fig_settings)
+
+    fig_config = deepcopy(config)
+    if fig_settings:
+        fig_config.update(fig_settings)
+
+    fig, ax = plt.subplots(
+        nrows=1,
+        ncols=1,
+        figsize=[fig_config["size"]["width"], fig_config["size"]["height"]],
+    )
+    fig.subplots_adjust(**fig_config["subplots_adjust_single_axis"])
+
+    n_dim = np.max([model_list[0].n_dim for model_list in models])
+    mean_val_rmse, var_val_rmse = jackknife_cv_conv_est(models)
+
+    ax.set_xlabel("Model Dimension")
+    ax.set_ylabel("Validation RMSE (" + str(models[0][0].prop_unit.latex_str) + ")")
+
+    ax.plot(
+        np.arange(1, n_dim + 1, dtype=np.int64),
+        mean_val_rmse,
+        "o",
+        color=fig_config["colors"]["box_test_edge"],
+    )
+    ax.errorbar(
+        np.arange(1, n_dim + 1, dtype=np.int64),
+        mean_val_rmse,
+        yerr=np.sqrt(var_val_rmse),
+        capsize=5.0,
+        capthick=1.5,
+        linewidth=1.5,
+        color=fig_config["colors"]["box_test_edge"],
+    )
+    if filename:
+        fig.savefig(filename)
+    return fig
+
+
+def generate_plot(dir_expr, filename=None, fig_settings=None):
     """Generate a plot a plot from a set of plot settings (defaults stored in config)
 
     Args:
@@ -36,7 +201,7 @@ def generate_plot(dir_expr, filename, fig_settings=None):
         ncols=1,
         figsize=[fig_config["size"]["width"], fig_config["size"]["height"]],
     )
-    fig.subplots_adjust(**fig_config["subplots_adjust"])
+    fig.subplots_adjust(**fig_config["subplots_adjust_single_axis"])
 
     ax.set_xlabel("Model Dimension")
     models = get_models(dir_expr)
@@ -56,7 +221,7 @@ def generate_plot(dir_expr, filename, fig_settings=None):
 
     if fig_config["plot_options"]["type"] == "split":
         # Set yaxis label
-        ax.set_ylabel("Absolute Error (" + latexify(str(models[0][0].prop_unit)) + ")")
+        ax.set_ylabel("Absolute Error (" + str(models[0][0].prop_unit.latex_str) + ")")
 
         # Populate data
         for nn in range(len(models)):
@@ -112,7 +277,9 @@ def generate_plot(dir_expr, filename, fig_settings=None):
         ]
     elif fig_config["plot_options"]["type"] == "train":
         # Set yaxis label
-        ax.set_ylabel("Absolute Training Error (" + latexify(str(models[0][0].prop_unit)) + ")")
+        ax.set_ylabel(
+            "Absolute Training Error (" + latexify(str(models[0][0].prop_unit)) + ")"
+        )
         # Populate data
         for nn in range(len(models)):
             train_error = np.array(
@@ -140,7 +307,9 @@ def generate_plot(dir_expr, filename, fig_settings=None):
         plot_settings["box_edge_colors"] = [fig_config["colors"]["box_train_edge"]]
     elif fig_config["plot_options"]["type"] == "test":
         # Set yaxis label
-        ax.set_ylabel("Absolute Test Error (" + latexify(str(models[0][0].prop_unit)) + ")")
+        ax.set_ylabel(
+            "Absolute Test Error (" + latexify(str(models[0][0].prop_unit)) + ")"
+        )
 
         # Populate data
         for nn in range(len(models)):
@@ -235,7 +404,9 @@ def generate_plot(dir_expr, filename, fig_settings=None):
             zorder=0,
         )
 
-    fig.savefig(filename)
+    if filename:
+        fig.savefig(filename)
+
     return fig
 
 
@@ -318,45 +489,3 @@ def add_pointplot(ax, data, plot_settings):
         zorder=4,
         scale=0.75,
     )
-
-
-def jackknife_cv_conv_est(dir_expr):
-    """Get the jackknife variance of the CV test error
-
-    Args:
-        dir_expr (str): Regular expression for the directory list
-
-    Returns:
-        avg_error: The average rmse of the test error
-        variance: The jackknife estimate of the variance of the test RMSE
-    """
-    train_model_list = sorted(
-        glob(dir_expr + "/models/train_*_model_0.dat"),
-        key=lambda s: int(s.split("/")[-1].split("_")[2]),
-    )
-    test_model_list = sorted(
-        glob(dir_expr + "/models/test_*_model_0.dat"),
-        key=lambda s: int(s.split("/")[-1].split("_")[2]),
-    )
-    n_dim = int(train_model_list[-1].split("/")[-1].split("_")[2])
-
-    models = [
-        ModelRegressor(train_file, test_file)
-        for train_file, test_file in zip(train_model_list, test_model_list)
-    ]
-
-    test_rmse = np.array([model.test_rmse for model in models]).reshape(n_dim, -1)
-    x_bar_i = []
-    for dim_error in test_rmse:
-        x_bar_i.append([])
-        for ii in range(len(dim_error)):
-            x_bar_i[-1].append(np.delete(dim_error, ii).mean())
-
-    x_bar_i = np.array(x_bar_i)
-    avg_error = x_bar_i.mean(axis=1)
-    variance = (
-        (test_rmse.shape[1] - 1.0)
-        / test_rmse.shape[1]
-        * np.sum((x_bar_i - avg_error.reshape(n_dim, 1)) ** 2.0, axis=1)
-    )
-    return avg_error, variance
diff --git a/src/python/postprocess/check_cv_convergence.py b/src/python/postprocess/check_cv_convergence.py
index 12073daa..c260228f 100644
--- a/src/python/postprocess/check_cv_convergence.py
+++ b/src/python/postprocess/check_cv_convergence.py
@@ -1,33 +1,26 @@
 import numpy as np
 from glob import glob
 
+from cpp_sisso.postprocess.utils import get_models
 
-def jackknife_cv_conv_est(dir_expr):
+
+def jackknife_cv_conv_est(models):
     """Get the jackknife variance of the CV test error
 
     Args:
-        dir_expr (str): Regular expression for the directory list
+        models (str or list ofModelRegressor or ModelLogRegressor): Models to evaluate
 
     Returns:
         avg_error: The average rmse of the test error
         variance: The jackknife estimate of the variance of the test RMSE
     """
-    train_model_list = sorted(
-        glob(dir_expr + "/models/train_*_model_0.dat"),
-        key=lambda s: int(s.split("/")[-1].split("_")[2]),
-    )
-    test_model_list = sorted(
-        glob(dir_expr + "/models/test_*_model_0.dat"),
-        key=lambda s: int(s.split("/")[-1].split("_")[2]),
-    )
-    n_dim = int(train_model_list[-1].split("/")[-1].split("_")[2])
+    if isinstance(models, str):
+        models = get_models(models)
 
-    models = [
-        Model(train_file, test_file)
-        for train_file, test_file in zip(train_model_list, test_model_list)
-    ]
-
-    test_rmse = np.array([model.test_rmse for model in models]).reshape(n_dim, -1)
+    n_dim = np.max([model_list[0].n_dim for model_list in models])
+    test_rmse = np.array([model.test_rmse for model in models.flatten()]).reshape(
+        n_dim, -1
+    )
     x_bar_i = []
     for dim_error in test_rmse:
         x_bar_i.append([])
diff --git a/src/python/postprocess/plotting/config.py b/src/python/postprocess/plotting/config.py
index 721c0073..ad26105b 100644
--- a/src/python/postprocess/plotting/config.py
+++ b/src/python/postprocess/plotting/config.py
@@ -4,7 +4,7 @@ import toml
 
 from pathlib import Path
 
-mpl.use("pdf")
+# mpl.use("pdf")
 
 parent = Path(__file__).parent
 
diff --git a/src/python/postprocess/plotting/config.toml b/src/python/postprocess/plotting/config.toml
index 2027a549..7c3111c7 100644
--- a/src/python/postprocess/plotting/config.toml
+++ b/src/python/postprocess/plotting/config.toml
@@ -1,6 +1,6 @@
 [size]
-width = 3.24
-ratio = 1.33333333
+width = 3.25
+height = 2.5
 
 [colors]
 train = "#619365"
@@ -16,6 +16,12 @@ right = 0.835
 bottom = 0.165
 top = 0.95
 
+[subplots_adjust_single_axis]
+left = 0.165
+right = 0.95
+bottom = 0.165
+top = 0.95
+
 [save.kw]
 transparent = true
 
@@ -48,4 +54,4 @@ mean_marker_type = "o"
 median_marker_type = "_"
 show_box_outliers = false
 show_box_caps = false
-show_means = true
\ No newline at end of file
+show_means = true
diff --git a/src/python/postprocess/utils.py b/src/python/postprocess/utils.py
index d3d42488..c08b7e43 100644
--- a/src/python/postprocess/utils.py
+++ b/src/python/postprocess/utils.py
@@ -1,7 +1,8 @@
-from cpp_sisso import ModelRegressor, ModelClassifier
+from cpp_sisso import ModelRegressor, ModelLogRegressor, ModelClassifier
 from glob import glob
 import numpy as np
 
+
 def sort_model_file_key(s):
     """Function to determine the order of model files to import
 
@@ -14,6 +15,36 @@ def sort_model_file_key(s):
     return s.split("/")[-1].split("_")[2]
 
 
+def load_model(train_file, test_file=None):
+    """Loads the correct model from a training and test file
+
+    Args:
+        tarin_file (str): The filename of the training file
+        test_file (str): The filename of the testing file
+
+    Returns:
+        model: The model from the files
+    """
+    model_line = open(train_file, "r").readline()[2:]
+    if model_line[0] == "[":
+        if test_file:
+            return ModelClassifier(train_file, test_file)
+        else:
+            return ModelClassifier(train_file)
+    elif model_line[:4] == "c0 +":
+        if test_file:
+            return ModelRegressor(train_file, test_file)
+        else:
+            return ModelRegressor(train_file)
+    elif model_line[:4] == "c0 *":
+        if test_file:
+            return ModelLogRegressor(train_file, test_file)
+        else:
+            return ModelLogRegressor(train_file)
+    else:
+        raise ValueError("Model type is not defind.")
+
+
 def get_models(dir_expr):
     """From a regular expression generate a list of models
 
@@ -23,19 +54,24 @@ def get_models(dir_expr):
     Return:
         list of Models: Models represented by the expression
     """
-    train_model_list = sorted(
-        glob(dir_expr + "/models/train_*_model_0.dat"), key=sort_model_file_key
-    )
-    test_model_list = sorted(
-        glob(dir_expr + "/models/test_*_model_0.dat"), key=sort_model_file_key
-    )
+    dir_list = sorted(glob(dir_expr))
+    train_model_list = []
+    test_model_list = []
+    for direc in dir_list:
+        train_model_list += list(glob(direc + "/models/train_*_model_0.dat"))
+        test_model_list += list(glob(direc + "/models/test_*_model_0.dat"))
+
+    train_model_list = sorted(train_model_list, key=sort_model_file_key)
+    test_model_list = sorted(test_model_list, key=sort_model_file_key)
+
     n_dim = int(train_model_list[-1].split("/")[-1].split("_")[2])
+
     if len(test_model_list) > 0:
-        models =  [
-            ModelRegressor(train_file, test_file)
+        models = [
+            load_model(train_file, test_file)
             for train_file, test_file in zip(train_model_list, test_model_list)
         ]
     else:
-        models =  [ModelRegressor(train_file) for train_file in train_model_list]
+        models = [load_model(train_file) for train_file in train_model_list]
 
     return np.array(models).reshape((n_dim, len(models) // n_dim))
diff --git a/src/utils/string_utils.cpp b/src/utils/string_utils.cpp
index 9691d2fb..04e63b3b 100644
--- a/src/utils/string_utils.cpp
+++ b/src/utils/string_utils.cpp
@@ -9,3 +9,23 @@ std::vector<std::string> str_utils::split_string_trim(std::string str, std::stri
 
     return split_str;
 }
+
+std::string str_utils::latexify(std::string str)
+{
+    std::string to_ret = "";
+    std::vector<std::string> split_str = split_string_trim(str, " ");
+    for(auto& term_str : split_str)
+    {
+        std::vector<std::string> split_term_str = split_string_trim(term_str, "_");
+        to_ret += split_term_str[0];
+        if(split_term_str.size() > 1)
+        {
+            to_ret += "_{" + split_term_str[1];
+            for(int ii = 2; ii < split_term_str.size(); ++ii)
+                to_ret += ", " + split_term_str[ii];
+            to_ret += "}";
+        }
+        to_ret += " ";
+    }
+    return to_ret.substr(0, to_ret.size() - 1);
+}
diff --git a/src/utils/string_utils.hpp b/src/utils/string_utils.hpp
index e34fd167..0db946eb 100644
--- a/src/utils/string_utils.hpp
+++ b/src/utils/string_utils.hpp
@@ -25,7 +25,9 @@ namespace str_utils
      * @return The vector with the string split along the split tokens
      */
     std::vector<std::string> split_string_trim(std::string str, std::string split_tokens = ",;:");
+
+    std::string latexify(std::string str);
 }
 
 
-#endif
\ No newline at end of file
+#endif
diff --git a/tests/exec_test/check_model.py b/tests/exec_test/check_model.py
index 86bb8946..0380ccc9 100644
--- a/tests/exec_test/check_model.py
+++ b/tests/exec_test/check_model.py
@@ -1,4 +1,4 @@
-from cpp_sisso import ModelRegressor
+from sissopp import ModelRegressor
 from pathlib import Path
 
 import numpy as np
diff --git a/tests/exec_test/classification/check_model.py b/tests/exec_test/classification/check_model.py
new file mode 100644
index 00000000..5958c75a
--- /dev/null
+++ b/tests/exec_test/classification/check_model.py
@@ -0,0 +1,10 @@
+from sissopp import ModelClassifier
+from pathlib import Path
+
+import numpy as np
+
+model = ModelClassifier(
+    str("models/train_dim_2_model_0.dat"), str("models/test_dim_2_model_0.dat")
+)
+assert model.percent_error < 1e-7
+assert model.percent_test_error < 1e-7
diff --git a/tests/exec_test/classification/data.csv b/tests/exec_test/classification/data.csv
new file mode 100644
index 00000000..6c0ead3c
--- /dev/null
+++ b/tests/exec_test/classification/data.csv
@@ -0,0 +1,101 @@
+index,prop,A,B,C,D,E,F,G,H,I,J
+0,1.0,0.01,-0.01,10.0,10.0,-0.49282517909827384,0.17302597769416206,0.598942935224295,-0.2987544751968252,-0.5812549090102688,-0.11065649421055634
+1,1.0,-1.8944281037421362,-1.3199613439800735,0.9557138968762431,0.4781172014274879,0.7775861911003294,0.6553697167785566,0.17491417142796584,-0.2884988775306041,-0.04531653614948916,-0.6065861937524113
+2,1.0,-1.4746015071142384,-1.2261496452343335,0.33014029248479626,-0.38950549868991224,0.13893526582480842,-0.8718692821675553,0.3747246204870105,0.1641859118951301,0.29356070144371693,0.28560894022002103
+3,1.0,-1.3021341433673468,-1.8262126241881185,-0.71381302228685,0.9687695850076805,0.6831481792028635,0.7991250925387956,0.30947917352650367,0.7280520310034677,0.7254955809948858,-0.6765763028042482
+4,1.0,-1.7393863226933401,-1.583498665054882,-0.06778341709581581,0.22998854989132322,0.5824275980446467,0.2619471491848253,-0.31573435079735024,0.6117812214426803,-0.18705821696723812,0.8717643476903345
+5,1.0,-1.5666089663239755,-1.058618149021826,-0.21280098230276434,0.9154597761466068,-0.2634655525918126,-0.6661264959880135,-0.19502899649000716,-0.23717005768011568,-0.9333588585968833,-0.19726273171241027
+6,1.0,-1.5534087615389538,-1.2520923128583763,0.9725857879017872,-0.8725028871856755,0.7291109108144516,0.26524478721086564,-0.7269009736244325,0.2486261701079393,0.8090043968802652,-0.27849406478047856
+7,1.0,-1.5462532513644658,-1.8123888845081852,0.4316458762210631,-0.5952232732793832,0.9535570697581124,-0.22291521912156265,0.25767093907617356,0.7289278790983178,-0.5797830554176866,-0.17996031942809454
+8,1.0,-1.1273555452403516,-1.6926149744472814,0.8270911990526928,0.20878148291075949,-0.5733171873615286,-0.004887589213519616,0.3419187160346375,-0.2929016713562016,-0.34531700628784034,0.7669205476300731
+9,1.0,-1.3536783481588408,-1.381410564729617,0.023965203621815423,-0.8054125694184839,0.8068101397184948,-0.8065762631278186,-0.3927997785617203,-0.4638193637740795,0.16095248005356044,-0.5534197471316085
+10,1.0,-1.1785315188879562,-1.2770582929850374,0.916015229666356,0.027594664515699696,-0.8295070079776354,-0.7000636893272012,-0.7156014565887143,-0.4397991651435269,-0.4872412204948866,0.24527926705612058
+11,1.0,-1.1754704976687549,-1.0561328124666542,0.4045577188977567,0.17599736106236108,0.22738373082247842,0.7742546525779774,-0.6160809969176364,-0.948639007451084,0.9540764333752245,-0.49710200117233927
+12,1.0,-1.6727791503394291,-1.8619023988358836,0.9067088448860641,-0.5927370303736976,0.8852298038909494,0.8220692972419072,0.20436748556299245,0.24610324883504986,-0.079476866422163,-0.24400699558343364
+13,1.0,-1.9632616543888368,-1.316804580896929,-0.2845703941884137,-0.19868606157423807,-0.1688976095411121,-0.9293925911269099,0.26589905967191263,-0.8287276425847814,-0.42745309747416815,-0.7383680916088831
+14,1.0,-1.7949776980848098,-1.1394821735708154,0.011556817105956574,0.49940831475722924,0.25316342477447806,0.017645446880421023,0.40173516709526447,-0.6502876172985015,-0.262217482830833,-0.5101021201305884
+15,1.0,-1.079571205362615,-1.9324595507799076,-0.5201611742812009,-0.3924784593473962,0.14749583956586831,0.7808796064740751,-0.28104468779917324,-0.14803690813578552,-0.2088104722240931,0.2789619297181276
+16,1.0,-1.5255514503750653,-1.7245520919673556,0.562803219191695,0.3236760616369958,0.49073713641037187,-0.9547619269949603,0.028603504036769145,-0.2462952196505066,-0.29773629359773923,-0.6324738309576534
+17,1.0,-1.2914230950731502,-1.950696152621199,0.8596694610541045,0.4857728192540889,0.26888359882500934,0.253553321699552,-0.04574308756939516,0.6679340327824896,0.3085919639199468,0.7710843014640274
+18,1.0,-1.234047871210012,-1.6817351928784738,-0.11887110046241278,0.15900093776813162,0.29854288417560015,-0.20382920533253834,-0.6379456952513525,0.6580496908109092,0.9495295891340081,-0.577812553880056
+19,1.0,-1.1351305002955077,-1.3119036669604036,0.7403741109304496,0.558782660077505,-0.0960521263545826,0.5291198173605371,0.37219060477014443,0.6886564662536832,-0.8194331653155049,-0.12814415930810963
+20,1.0,-0.01,0.01,-10.0,-10.0,0.7852373497328908,-0.38721773049540054,-0.9424092188994484,0.16080657729767522,-0.7239699836619722,-0.4526501344158229
+21,1.0,1.1507658618080976,1.726050539272399,-0.9229073425132097,0.46575138421963236,-0.81727500527083,-0.18247264092662796,0.8875260706203563,0.11159288397848788,0.3864350788801618,-0.44001721122127235
+22,1.0,1.9038976822470102,1.7188075931659075,0.2803397954645097,0.379365407838544,0.6348430081926235,0.37175391878083874,-0.6116533053698634,0.7325679278741848,0.85803611350317,-0.5779734417084115
+23,1.0,1.777519764523806,1.2869705037057755,0.22285089894507704,-0.4240570888282873,-0.27619426781835954,0.6169416676806943,-0.696779972923147,0.23612770730498034,0.7607058897805274,0.34004139732032956
+24,1.0,1.6531432749387367,1.162828102113115,-0.3395011973829445,0.8985295913658116,-0.8815382282315818,0.09072882666430049,-0.8583958707809345,0.9072723315158959,0.16053173561906742,0.6789118117688409
+25,1.0,1.309551805582044,1.3682775573764787,-0.4445767545785626,-0.7887117451257197,0.2466257730701833,-0.6634740188183126,-0.4463555520604636,0.7503127731346337,-0.9895952297013597,-0.1501201098407059
+26,1.0,1.4492443117189315,1.4032886458116898,-0.38867957733440184,-0.7081934507919516,0.8503100848003078,-0.7672563385316118,0.37050931732919423,0.38735492101575075,0.5331603211649865,0.14939021245513073
+27,1.0,1.613625013917531,1.054483144145669,-0.35964468015596895,0.6825554041477278,-0.5319540093654402,0.9341016895908625,-0.7360028401883201,-0.2996029145405299,0.3511255963558182,-0.1878428846692788
+28,1.0,1.0243392720597995,1.9105960212113278,-0.24940915747071712,-0.13707002438664384,-0.707128531134104,0.9449320492342947,0.43123336605298723,0.44954399095926245,0.9129019322800267,0.7739461096382698
+29,1.0,1.9944467859460666,1.6720498444130638,0.9353026424804634,0.8337209665238072,0.25416795671734294,-0.007922712021390721,-0.11477629537676681,-0.2760428960022421,-0.8130984031254187,0.3419220522124746
+30,1.0,1.4011033028792645,1.109011516195995,0.4257167722550872,0.5441748037327634,0.492863854358204,-0.04758979171716571,-0.7438407906336721,0.5252894890604114,0.8296117155449363,0.01519322184552152
+31,1.0,1.9499509062547116,1.0572741079996884,0.12665368551441047,0.469705238170149,0.8744362482730081,-0.7595711754681347,0.31023073561926484,-0.8034208437448502,-0.4624310824864766,-0.40716588675912946
+32,1.0,1.472646250429945,1.1891364327906537,-0.7313930180310388,0.46110222460300854,-0.8845283918853222,-0.41989394484079834,0.6475182143890668,0.6881263264084854,0.7546563716916033,0.1168819230678162
+33,1.0,1.4590167720750702,1.1702436403729433,-0.5274334249471311,-0.5987746978086463,0.11319779160167642,-0.50528865259863,0.11757211428893855,0.590400320594326,-0.15515938676912566,0.3548279774131968
+34,1.0,1.3204274424304074,1.198019529303835,-0.8186266224051648,-0.0290085645105993,0.9523159683784683,0.8174957842139241,0.18222455484504252,-0.013773043646529981,-0.2627319529358798,-0.8595305628086736
+35,1.0,1.8813823797628926,1.0367008183967852,0.3052186880166259,-0.06088564766002724,-0.38790344660551357,-0.10806404273546488,-0.9629804050096822,-0.42428960420351114,-0.2534422930772855,0.30963736841129674
+36,1.0,1.9986688782460984,1.3690925712861786,0.5431284474003899,0.5913724730408367,-0.8353670866934573,0.19831525342250655,-0.18143473978380187,0.6364291057549478,0.42062864699233105,0.9901223646646209
+37,1.0,1.504558184990442,1.190949743496727,-0.6532636073327616,0.35935945086837595,0.30131719114182043,0.6495817943565889,0.9422689556330865,0.8846598944893771,-0.4731712393443981,0.039635066570717026
+38,1.0,1.008333615471538,1.981506300008274,-0.8123524571767606,0.2197661015909831,-0.6502106779028902,0.4236216902915564,-0.5886509927579104,0.061487886019890414,-0.2377374740160869,0.6412843473808252
+39,1.0,1.6017918572461922,1.1250859962714062,-0.8298193869407411,-0.3451046875738015,0.48516607054511907,-0.2588397274480556,-0.9206152083268813,0.27549821587142675,-0.629075534110342,-0.6425278879606868
+40,0.0,0.01,0.01,10.0,-10.0,0.04151985651136059,0.2330346162909498,-0.49723324619118703,-0.023544587617094992,-0.41854083777000306,-0.5502339327925116
+41,0.0,-1.0914757437035456,1.704187017012845,-0.4803164747027948,-0.7537847103406319,-0.6132342356169982,0.16795557366247427,0.45563663131504173,-0.3802276359532064,0.48021383007368956,-0.45367492988510794
+42,0.0,-1.9425392252914977,1.5931139414465358,0.31009805091338705,-0.835007082906627,0.40758014085085303,0.5569242475965532,-0.38861660463934644,0.6021510475141199,-0.9843221980987535,-0.9963328889833365
+43,0.0,-1.403024210449149,1.0504137974303827,-0.8984564534469641,-0.7970299242453491,0.47491891024478017,0.48019322053841695,-0.7508561635586863,-0.043960372032017636,-0.24265139180566164,-0.10923906105400616
+44,0.0,-1.45810616907354,1.084683264970634,0.5713295229340183,-0.6773798263796229,0.09839698468883196,-0.9615991701040352,-0.7539225919221575,0.3614358912575588,-0.6380304554939824,0.40434902484390767
+45,0.0,-1.6042143290163837,1.5773097324751753,0.40243320555526796,-0.06801187450077983,-0.3730896611520318,0.23970878487105018,0.41645110664336094,-0.5059916627143299,-0.8866903480674095,0.3036452361644302
+46,0.0,-1.5486866135010242,1.3288318457670756,-0.17492524550976607,0.05033039145153584,-0.38867679574193215,-0.7230760497855304,0.6460761077249644,-0.0010558990958802195,0.4919287207437726,-0.647995101369186
+47,0.0,-1.8920756792534963,1.7657625846115277,0.28941076121752496,0.4856651089687205,0.33868477386080054,0.3743195814396485,-0.10547901462716669,0.004520417892417861,0.2228622619759395,0.23538363683763985
+48,0.0,-1.5144292231365304,1.6984040931515498,-0.19904456301740736,-0.8558041127811826,0.9475720005649064,0.6549395628101518,0.802084131057488,0.010033694468233323,0.44976636625057376,0.11997413461843287
+49,0.0,-1.334691441718412,1.8012484689328736,-0.34672956898995055,-0.614828863660672,0.5781503720015266,-0.6973564899083871,-0.27249617742754695,-0.3266795053631859,0.40318590749462313,0.6598349869723568
+50,0.0,-1.392160865916832,1.96030807097305,-0.4709187754332349,-0.16596517376787534,-0.11837327580213919,-0.8046715422993092,-0.27309628387497664,0.9646762900347077,-0.2407860162851745,0.7810927507182175
+51,0.0,-1.1081877449652693,1.1321805921252017,-0.5463150777240524,-0.26339780806113056,0.0734161121606478,-0.5615845135833513,-0.003812545601593831,-0.06790170865979794,-0.7973376248924131,-0.5024942886762793
+52,0.0,-1.1273342237834545,1.222900933909083,-0.23961815168048672,0.2812826831120643,-0.12225333824316409,-0.4163409124224706,-0.3029448237633121,0.9506971678575753,0.08477434826975472,0.24564363747814055
+53,0.0,-1.5450458531844702,1.4646555655585867,0.6724652616073976,0.9636771128762993,-0.7328669447410141,0.26987900702231227,-0.7341217639847928,-0.1847500436486904,0.49478360423045675,-0.5634696889084065
+54,0.0,-1.6972898977881243,1.9342793806461098,0.916674666213795,0.7441006696135166,-0.5363256808793411,0.7453493138967058,-0.6084949711216283,-0.036147807131094334,0.7300972119817077,-0.9860206879212554
+55,0.0,-1.467166856883277,1.9195073363935855,-0.04001237513761069,0.2482575243891476,-0.795936343325832,-0.755933622220192,0.6649430625674231,-0.5608250699419657,-0.987328335835364,0.009181823833889657
+56,0.0,-1.507858084142104,1.11065681931139,-0.7565527152681395,-0.43396597947565385,-0.9258208007633866,0.6212043805382641,-0.7253554358023508,-0.08719504527829058,0.5000400077995837,-0.3510240708674768
+57,0.0,-1.793339477832935,1.6461561157023556,0.5936703687181848,0.7412541556633099,-0.835056311664806,-0.12828334096535055,0.7957690701135833,0.3380628722493768,0.9616102822792876,-0.5197559610490992
+58,0.0,-1.685623286883061,1.7913664511633125,-0.9177920046292005,-0.2248076520670288,0.7511725309540487,0.7449254977655742,0.05482138754018062,-0.26814612271904337,-0.373795753322288,-0.0236199006955784
+59,0.0,-1.7032511687316396,1.561738983983668,0.937331444475048,-0.18914659666867584,0.7267575281390293,0.571196020214809,0.1504784966595285,0.7163709047538911,0.6459479363917942,-0.09651249984138066
+60,0.0,-0.01,-0.01,-10.0,10.0,0.3037482340767377,0.0946840691842421,0.8466519087621067,0.5057109910970319,-0.6648466204250763,-0.7229347856701709
+61,0.0,1.517475034607442,-1.5797683396912157,-0.31385345647165575,-0.6706416904370422,0.33748118903604074,-0.6950596675808771,0.38251266476664836,-0.7540512945658595,-0.5409128937716641,-0.15273659248128935
+62,0.0,1.3672941639996612,-1.5494260699524456,0.7462797650357975,0.3206679093982656,0.0757122783161257,0.5570890283268028,-0.31445996245727414,-0.09117939535299069,-0.7125726183527377,-0.8625237702649187
+63,0.0,1.8755185956540283,-1.0124502444775816,0.9616342423045714,0.9990251718017698,0.4285764726207524,0.7902542298430564,-0.16273214801418256,0.05710841557502144,0.09962536752119067,-0.4177957372666703
+64,0.0,1.8407338686869008,-1.5868070635995237,-0.29373799492321306,-0.6546037139247634,-0.15830470325220958,-0.45061718235930015,0.10621728605636549,-0.25016507950845557,-0.5988943508598357,-0.8603824760047425
+65,0.0,1.4799923864034554,-1.6886196544558572,0.6613747096357248,0.3354136960485343,0.29540846912662677,-0.3407250803665458,0.6119612274582391,0.5332770226092296,-0.960254363897463,0.9132513378340925
+66,0.0,1.0735581028251975,-1.060524245309374,-0.7902813350132356,0.37259465524782054,-0.9400757902613452,0.9721066172153674,-0.24687488719815498,-0.5015445240130325,-0.13494761193218818,0.1300908069763218
+67,0.0,1.6376974303400835,-1.649460990932646,-0.6005900469726237,0.28162130970935295,0.8362440030881721,0.5625055617944303,-0.24424824400159317,0.2742731104136069,0.9882291644128922,-0.9034928924297636
+68,0.0,1.9226795203724978,-1.5881079200154482,0.23039784446724942,0.458000795025685,0.16053436480789807,0.10676023110363286,0.08437633629048147,-0.4102570968096315,-0.3889759130323822,0.23368493276044622
+69,0.0,1.428218106951723,-1.758329763791654,0.12289411290053698,-0.19374642536783515,0.6024111339994527,-0.8956945110997676,0.34728022344428666,0.045175117581032875,-0.2326617713895407,-0.3146487851555211
+70,0.0,1.426028756973607,-1.1608245105048438,0.9060271622161762,0.7364181822252924,-0.04128485443820251,0.30852412684049657,0.3692055404974064,0.33319303146616197,0.9854449773409701,-0.253876502721057
+71,0.0,1.7300201940414208,-1.8094742195380222,-0.6776614683334685,0.07388223501889013,-0.6821472673109052,0.02412639199219635,0.8489462496789089,-0.5162539947354388,0.2026274256350431,-0.8974772498432042
+72,0.0,1.1160580867858627,-1.0562234913753827,0.4924315133007724,-0.737330353527688,0.5948943274413478,0.8054360371547522,-0.9127166792458934,-0.39019932233826227,-0.7358052031844453,-0.058032643451690014
+73,0.0,1.5287830677917298,-1.5282207370489649,-0.8638215305852943,-0.9871259051181829,-0.6981909166452223,-0.17859271120363984,-0.9024979934000754,0.7774480505476058,0.03349780154212989,0.5698021932461961
+74,0.0,1.6960209130376898,-1.6879132950675153,-0.9196790361121787,-0.08379502301562369,0.49207875063490536,-0.10278600265499382,0.1680009845018644,-0.9849109111206711,-0.9010178860550528,0.6398135602683432
+75,0.0,1.8229209542705762,-1.7992151616780458,0.10745593717114521,-0.8547117567503333,0.3449692462697873,0.5190929861298248,0.41023065780507606,-0.9121646126915404,0.0339436116875278,-0.3066433169799614
+76,0.0,1.153822450324946,-1.9125109596392957,-0.8084861601829396,-0.010443047871684152,-0.7062967902838859,0.8221182617361114,0.16332743077240175,0.25278629136411546,-0.501338527911191,-0.28349201031842997
+77,0.0,1.1952162783159461,-1.4347201247937995,-0.8144168383678148,-0.029402316469989476,-0.8414282024081439,-0.004586605289200518,-0.6064347305419278,0.7142773164379119,-0.4448189769242301,-0.7536984563026647
+78,0.0,1.9935896172064345,-1.5249947828194232,-0.8776374613798481,0.4144055355504066,-0.033655814948979845,0.6246920435596353,-0.8324026588913145,-0.7230280627324008,-0.8670990346040541,-0.18563237806149768
+79,0.0,1.623519204932399,-1.5204567735605674,0.9778286856360292,-0.5750238094139197,-0.4026176094620355,0.6319679592519518,-0.42650442043409664,0.4805794604963276,0.6863380782764676,-0.7938128517078891
+80,1.0,-1.9061964810894976,-1.2890845064683893,10.0,10.0,0.10110213628450881,-0.416199695149021,-0.49485098716478193,-0.5686984484832116,-0.18478238247187462,-0.5522304988566058
+81,1.0,-1.1233456870613596,-1.4319272868794908,-0.20267104500415667,-0.13891416360392483,-0.9371567107968573,-0.11679010938437773,-0.0942374319418513,-0.8967611185539714,-0.18342332063686673,0.4586246330654187
+82,1.0,-1.8593800902098794,-1.2014277824817987,-0.6994029020523282,0.8016069079080759,0.618074329335756,-0.17256870875707642,-0.07569344530437294,0.48881526808669196,-0.6122253862675848,0.5154748580158188
+83,1.0,-1.4459305927616168,-1.507381441431149,-0.14646706623716144,0.7737171788723411,0.4987969849603513,-0.01586272159205504,0.48716282764946706,-0.0020169225903672316,-0.4803954556572778,0.14066039485631854
+84,1.0,-1.5068337349461003,-1.3960574872196596,0.9352733360226106,-0.6584023257750704,0.2540286154963187,-0.2079493637863219,0.49423396418171595,0.3425440151560937,-0.2790717466048003,0.6817667819203079
+85,1.0,1.2945952163736183,1.2595474551517882,-10.0,-10.0,-0.7729483005820612,-0.7555910808571309,-0.7956918977844933,0.1406538353922091,-0.16048348692278092,0.46092093570445214
+86,1.0,1.0468940151290935,1.4889992490615644,-0.10280602307649511,-0.2322567217543967,0.9824873120780633,0.22063948796997224,0.46610825105829923,-0.32823900060322386,0.9556882858690123,0.9840121424736405
+87,1.0,1.5883047440360363,1.7022605521341374,-0.40090994887229314,-0.8872400296917884,-0.7963665539711986,-0.18901134135900155,-0.9842642698324229,0.22853934832310796,0.6960450376429215,-0.7349411665560721
+88,1.0,1.0700121628460502,1.818456986404958,-0.9066754218923723,0.6435018002723063,0.29644429045149967,-0.21233982252142947,-0.6249473476636442,-0.07650553418511508,0.6900069458740186,0.6031788656970374
+89,1.0,1.478188533919311,1.1810797217515985,-0.7778783717821762,0.15870040018507803,0.7700838694175804,0.31820158149436617,-0.577373286340777,0.2079154087822559,0.16989820716894366,-0.13471834974110908
+90,0.0,-1.3979253633769553,1.8903759983708976,10.0,-10.0,-0.3815436230444891,-0.15060860491731232,0.9402009350589582,-0.26012695659385154,0.011178432296194751,-0.5526461887962022
+91,0.0,-1.3418191928050067,1.3777038429060606,-0.2738961073464674,0.9218628887177005,0.1833297141250405,0.7949957967753243,0.4703407862424096,0.5871591279939063,0.6560651905340187,0.7103783594351551
+92,0.0,-1.0853574965532813,1.2568456448317546,-0.09343868466017535,0.8673637319098968,0.5019793353377939,0.9291335314667162,0.8530385462334946,0.23164737184209572,-0.9213639337894683,0.9955206665909002
+93,0.0,-1.5078347061732043,1.755372973469426,-0.35304762896340125,0.6869964596284959,0.12650715249211952,-0.5841575512334931,0.6780119845973502,0.1301840756737609,-0.5413658827498185,0.804095414322346
+94,0.0,-1.6723266529177478,1.9139884218475265,-0.055989266428471796,0.08397268885628328,0.49540687896065805,-0.5318515111518416,-0.6829875503825202,-0.7627193412374218,0.044183568378214355,0.5694928604351057
+95,0.0,1.5219674737320155,-1.8127243158447541,-10.0,10.0,-0.5924273489245648,-0.24521529180917545,0.45028680560933676,-0.6172008060217697,-0.07832380637663072,-0.13840019966409445
+96,0.0,1.3427761908932137,-1.0426461453585447,-0.8405236108806917,-0.5797680617663143,0.20708806522492362,-0.30689024242517027,-0.7073198325932093,0.06720948720809505,-0.21904144161504235,0.6516183145928414
+97,0.0,1.729966706858185,-1.2614818535634313,-0.07134725891047888,-0.5716479318807921,0.002484974059520084,0.4103461232511616,0.29425426224880424,0.6980183692479018,0.6525532678930528,-0.9606212198157282
+98,0.0,1.6367960859950461,-1.4048311726687266,0.13335534338270483,0.7851836236372127,0.10649410652264102,0.45700338475494173,-0.31447076807019614,-0.053371126918829725,0.8614734514136297,-0.7701671581075855
+99,0.0,1.2253193252857404,-1.3983212310825488,0.7518196805414694,0.8434776597312679,0.880714646905367,0.20665859661747032,-0.8505399954222603,0.7702440358432017,-0.790477429383416,-0.21937326040566685
diff --git a/tests/exec_test/classification/sisso.json b/tests/exec_test/classification/sisso.json
new file mode 100644
index 00000000..d08faba2
--- /dev/null
+++ b/tests/exec_test/classification/sisso.json
@@ -0,0 +1,14 @@
+{
+    "desc_dim": 2,
+    "n_sis_select": 1,
+    "max_rung": 1,
+    "n_residual": 1,
+    "data_file": "data.csv",
+    "property_key": "prop",
+    "leave_out_frac": 0.2,
+    "n_models_store": 1,
+    "calc_type": "classification",
+    "leave_out_inds": [80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95 ,96 ,97, 98 , 99],
+    "opset": ["add", "sub", "abs_diff", "mult", "div", "inv", "abs", "exp", "log", "sin", "cos", "sq", "cb", "six_pow", "sqrt", "cbrt", "neg_exp"],
+    "fix_intercept": false
+}
diff --git a/tests/data.csv b/tests/exec_test/default/data.csv
similarity index 99%
rename from tests/data.csv
rename to tests/exec_test/default/data.csv
index 57be8572..00650540 100644
--- a/tests/data.csv
+++ b/tests/exec_test/default/data.csv
@@ -1,4 +1,4 @@
-Sample,Task,Prop,A (m),B (s),C ,D (Unitless)
+Sample,Task,Prop,A (m),B (s),C,D (Unitless)
 1,X,1031303.34310437,40047.7725031033,81.6019767547866,12535.2818525271,-683.666065848847
 2,X,207179.181972689,8273.93114052335,47.4359192293739,2518.19019867913,-1407.86160002623
 3,X,594547.990034924,-24495.5390890833,46.3994727792424,7226.59341895378,-154.449699580799
diff --git a/tests/exec_test/default/sisso.json b/tests/exec_test/default/sisso.json
new file mode 100644
index 00000000..14446f06
--- /dev/null
+++ b/tests/exec_test/default/sisso.json
@@ -0,0 +1,14 @@
+{
+    "desc_dim": 2,
+    "n_sis_select": 1,
+    "max_rung": 2,
+    "n_residual": 1,
+    "data_file": "data.csv",
+    "property_key": "Prop",
+    "task_key": "Task",
+    "leave_out_frac": 0.05,
+    "n_models_store": 1,
+    "leave_out_inds": [0, 1, 2, 60, 61],
+    "opset": ["add", "sub", "abs_diff", "mult", "div", "inv", "abs", "exp", "log", "sin", "cos", "sq", "cb", "six_pow", "sqrt", "cbrt", "neg_exp"],
+    "fix_intercept": false
+}
diff --git a/tests/exec_test/gen_proj/sisso.json b/tests/exec_test/gen_proj/sisso.json
new file mode 100644
index 00000000..82a1a850
--- /dev/null
+++ b/tests/exec_test/gen_proj/sisso.json
@@ -0,0 +1,15 @@
+{
+    "desc_dim": 2,
+    "n_sis_select": 1,
+    "max_rung": 2,
+    "n_residual": 1,
+    "data_file": "../data.csv",
+    "property_key": "Prop",
+    "task_key": "Task",
+    "leave_out_frac": 0.05,
+    "n_models_store": 1,
+    "n_rung_generate": 1,
+    "leave_out_inds": [0, 1, 2, 60, 61],
+    "opset": ["add", "sub", "abs_diff", "mult", "div", "inv", "abs", "exp", "log", "sin", "cos", "sq", "cb", "six_pow", "sqrt", "cbrt", "neg_exp"],
+    "fix_intercept": false
+}
diff --git a/tests/exec_test/log_reg/data.csv b/tests/exec_test/log_reg/data.csv
new file mode 100644
index 00000000..ca8bf5a5
--- /dev/null
+++ b/tests/exec_test/log_reg/data.csv
@@ -0,0 +1,101 @@
+Sample,Prop,A,B,C,D
+1,0.042644585721321,49.8070975337169,307.029288124149,127.290160977898,119.363921090366
+2,0.051286552544473,207.151687934325,385.803129282962,232.077642053051,152.451146796233
+3,0.115456483870835,259.115243485315,350.89565982664,126.112015312861,194.269240170231
+4,15.1980039696031,341.392794952748,2.50119646241174,370.843032180154,11.5134267982876
+5,0.030828548720774,123.642359657426,107.674457089161,278.481606674966,277.417740655869
+6,0.113310081828946,434.902594566398,405.870579391739,241.257457440821,213.210004173378
+7,0.071973103667228,393.51387804721,243.422246542275,98.6921352953811,38.4646528894646
+8,0.079124869408332,454.177123336891,312.18007412968,365.093447826501,93.4027537695288
+9,0.084392098447046,262.904172099227,476.075762264071,95.6336322308505,269.096484900607
+10,0.044467510536784,447.85350715457,423.476173355266,422.140647530134,105.30966798588
+11,0.032607637392685,156.982834162006,216.937871068582,436.272542198989,438.578448993111
+12,0.026590761658031,30.4947043445939,353.866638985695,131.530432627816,103.578990316735
+13,0.882677326292512,324.183123925377,379.786690798287,93.7301886485703,440.061356079183
+14,2.92863264703151,253.756388912426,191.488748047164,69.2744710777569,166.908183251839
+15,0.041178758470398,433.425208331938,422.012179205648,138.480233516687,94.7864081755759
+16,0.02951523999224,364.325349150667,478.127595998362,85.6428392655327,93.6000841896524
+17,0.103881442342912,286.280745819571,226.720331942159,138.891298487121,78.0909759774124
+18,0.079447386532567,184.446053206631,428.401650931551,356.108594859543,238.138089889099
+19,0.490048714715941,437.097165477292,89.4506321427452,357.249495038998,21.2823078127849
+20,0.315083955685341,464.689646103278,357.8239998639,166.719192215288,470.323737680847
+21,0.336628829642389,453.229393345602,121.062956838254,368.072231287668,33.8834258163608
+22,0.596250795257965,329.206703754852,336.263700827181,259.138467352137,407.462821099063
+23,0.024008996387738,429.499336054639,451.30307811156,253.003258066353,11.9533585885846
+24,0.702221733754583,293.874006420616,221.692047223248,42.2106293777491,277.645795523081
+25,0.807995736802549,276.103314658051,161.778514482533,426.942506234502,208.868975590116
+26,0.002730384017923,83.1768893799547,13.0356263432169,197.449738789384,355.96703006125
+27,0.034760234337076,239.859835781859,226.950754683377,422.533852762138,472.823756482285
+28,0.103325140788876,142.348449069038,308.188608196991,19.8589904106916,445.782867882951
+29,0.069515399570576,325.065695936087,341.202112577236,280.094904922382,126.698479235295
+30,0.026792086484689,105.496640506796,431.709809945209,201.067642207524,125.346044992663
+31,0.06968520346432,278.288015848546,340.463606947326,62.5783439753465,135.763786890355
+32,0.033814657993073,314.022680967248,366.997527539967,146.288622368974,52.3761667809278
+33,0.036437360543752,182.544295340408,74.6987790052084,111.509818170011,241.023451399827
+34,0.007278291999309,161.331981931619,108.016457758383,331.063276457351,498.833301187792
+35,0.063162819962502,489.309265600451,424.263298255808,159.148021054163,150.645927662812
+36,0.084687250049346,268.778111170417,12.2088368874704,283.50778492837,126.160505819305
+37,0.193238168342884,356.429298308577,16.8790201937628,79.0256508718712,105.78215423975
+38,0.002398998011824,18.700576586924,54.7261653842681,148.691281396519,365.033771980965
+39,0.153459585676185,294.597161844894,103.910596069842,345.840575142828,208.072222213417
+40,0.008244336063986,284.63690286644,15.4716735442082,102.064258484033,407.319217444755
+41,0.038860594113166,242.474343916146,489.029235376506,398.649900719263,182.88700899363
+42,0.068119481955482,288.653567920823,104.182871116269,70.369683070715,260.771564816744
+43,0.645977325788857,285.468431709879,261.088369157819,493.106944180385,321.625679649568
+44,0.056318139669975,180.087855188098,72.886342824395,103.167691072242,204.56315967502
+45,4.03890609225025,198.253697705946,243.375043815026,348.789155694472,222.634358725914
+46,0.050956703375414,66.8423643063199,65.3676810191975,496.070776260521,158.342895817271
+47,7.71218334170515,395.439636236816,230.35515336685,64.9645947685536,211.908463158588
+48,0.0214328895491,176.169330689589,464.950315259736,452.14257500201,81.9464407280035
+49,2.6815357030127,431.955781915035,308.66349882149,253.874224338872,343.837415858124
+50,0.750972167059506,224.327882299282,147.30201835669,243.603529830416,103.105383948355
+51,0.01828936588316,249.505131204334,114.794245180025,129.495085479016,408.195718175082
+52,0.323325638910123,159.141681749234,106.854744390776,18.9019889656962,51.4307457794658
+53,0.033770961308791,413.581373474192,154.408120689668,446.571947766101,435.968644308603
+54,0.318264739192304,346.575691958707,59.8626851714252,255.289629539941,132.392312946497
+55,0.543889052259968,202.50018301682,224.87814578565,179.824036025239,168.045578982369
+56,0.014813028268087,317.664906479146,95.6823941971786,410.391814113528,449.004998805183
+57,3.57717288413323,275.990345747377,445.847337469333,364.60493511239,475.71306458261
+58,0.129900583191695,9.14924177359304,119.375334020565,113.361398805488,62.8300931374718
+59,0.03708937574487,185.837293865833,447.695352621602,145.051135814477,160.694140439915
+60,0.014921382861101,8.28959490938569,378.093007577705,419.707558105521,40.4024046726325
+61,0.387351324110643,407.707347482472,350.199800459508,105.74924661113,253.972219444862
+62,0.101466855479979,267.424013298673,386.705098956208,106.841281052243,212.004932409899
+63,1.32736567001172,352.225865613491,171.568875244562,466.911865424454,130.806654602562
+64,0.028663161770634,200.509116400279,185.054151659821,169.780882949394,426.355265958408
+65,0.066319972191657,393.903835973502,34.1090298843032,395.993284870997,201.465690562591
+66,0.225228846252684,416.977891536474,469.286103651892,451.235879905236,329.368398152194
+67,0.193954759220393,161.654732137163,117.19778787807,386.605589800927,191.350615565409
+68,0.096588543650903,207.153853909472,281.333900278231,463.836665364427,431.036779599936
+69,0.045009258705634,363.180221118644,60.3413214970106,282.709764209504,263.181135410842
+70,0.148522003805379,337.475883356403,345.712927332466,287.038267897121,198.123437087294
+71,0.233773563579686,417.561141951304,416.699554349938,87.471372642944,284.442500963842
+72,0.07632953740424,353.462065059428,230.244117781226,205.862394545593,418.718162036753
+73,0.018751232608648,211.936255629827,38.9153036995522,335.407375291207,269.160151304954
+74,0.11436186782933,232.953194819263,75.6969339038158,145.359684234586,179.190191924589
+75,0.059780123967251,69.2355001498863,279.298667202486,382.963222385851,434.84703056805
+76,0.039466512803061,423.250211589563,470.623951042526,385.799699148574,127.024253733708
+77,0.012044591018025,208.006274730239,51.9283654736177,256.955959532126,347.239249164622
+78,0.02529290143529,104.714857532854,304.9501964971,31.8389612251596,37.8811131918349
+79,0.004302255370929,177.067793973296,28.3724394563165,331.479931132225,461.564224843564
+80,0.074133627013002,158.134705180115,315.976143027337,278.395310237464,484.311241814557
+81,0.160836802038602,410.048253861679,191.235924936736,470.978702738376,322.209828240428
+82,2.01269810172184,405.219860759387,382.357826642821,340.408900681332,340.037356281025
+83,0.086463232833669,465.637225155392,273.935550533945,471.461401666282,69.4129300775254
+84,0.09652399673283,484.793856217189,217.858512463092,317.765365776244,30.5574828436997
+85,25.7254804725824,82.523989196792,270.839083225059,174.362686576973,263.842713219115
+86,3123.2782662272,106.094612132738,332.523075165983,43.7520305984393,333.205085763281
+87,0.024585156837383,445.981783633524,66.2298255679999,373.905334245721,377.150351563984
+88,0.018205881117995,117.196993636293,487.277963654347,133.617747939771,85.6568366996931
+89,0.188996142204433,479.415773481536,87.8379455379654,429.859856289863,204.165337443809
+90,0.030687409853583,324.976847605473,470.960628473335,126.942577378075,106.983488589713
+91,0.084751033512141,234.705200183164,155.730168172949,303.535045799807,16.2640518096776
+92,0.019959943675665,342.417666791063,8.85018592692246,287.691069761052,283.171709994183
+93,0.017102517653105,162.236891091598,81.3657842047582,456.824872475328,318.421521204707
+94,0.070128975496401,23.8513212365969,333.533751731515,196.322141219655,479.082443413362
+95,0.006819280465836,151.095020172981,37.8615330871062,227.034115691539,362.755341229094
+96,0.009406739334267,32.3147853544107,113.549008148151,57.9053526795503,348.470092929769
+97,0.026975504059684,53.4863829718894,186.176691450922,56.7764485836142,371.957528066938
+98,711.780196017338,7.36908228123383,488.268102038773,252.670421142526,486.698386703915
+99,2.82416953708447,183.480683022873,368.758162863328,2.80315566798772,340.166678214112
+100,0.3850029324009,482.322626243789,100.05861510573,495.618985557607,17.9764839910466
diff --git a/tests/exec_test/log_reg/sisso.json b/tests/exec_test/log_reg/sisso.json
new file mode 100644
index 00000000..02f6a552
--- /dev/null
+++ b/tests/exec_test/log_reg/sisso.json
@@ -0,0 +1,14 @@
+{
+    "desc_dim": 2,
+    "n_sis_select": 1,
+    "max_rung": 1,
+    "n_residual": 1,
+    "data_file": "data.csv",
+    "property_key": "Prop",
+    "leave_out_frac": 0.05,
+    "n_models_store": 1,
+    "calc_type": "log_regression",
+    "leave_out_inds": [0, 1, 2, 60, 61],
+    "opset": ["add", "sub", "abs_diff", "mult", "div", "inv", "abs", "exp", "log", "sin", "cos", "sq", "cb", "six_pow", "sqrt", "cbrt", "neg_exp"],
+    "fix_intercept": false
+}
diff --git a/tests/exec_test/max_corr/sisso.json b/tests/exec_test/max_corr/sisso.json
new file mode 100644
index 00000000..124243f9
--- /dev/null
+++ b/tests/exec_test/max_corr/sisso.json
@@ -0,0 +1,15 @@
+{
+    "desc_dim": 2,
+    "n_sis_select": 1,
+    "max_rung": 2,
+    "n_residual": 1,
+    "data_file": "../data.csv",
+    "property_key": "Prop",
+    "task_key": "Task",
+    "leave_out_frac": 0.05,
+    "n_models_store": 1,
+    "max_feat_cross_correlation": 0.9,
+    "leave_out_inds": [0, 1, 2, 60, 61],
+    "opset": ["add", "sub", "abs_diff", "mult", "div", "inv", "abs", "exp", "log", "sin", "cos", "sq", "cb", "six_pow", "sqrt", "cbrt", "neg_exp"],
+    "fix_intercept": false
+}
diff --git a/tests/pytest/data.csv b/tests/pytest/data.csv
new file mode 100644
index 00000000..9f314329
--- /dev/null
+++ b/tests/pytest/data.csv
@@ -0,0 +1,101 @@
+Sample,Task,Prop,A (m) : (-inf; inf) | [0.0],B (s) : (0; infty),C,D (Unitless) : (-infty; 0)
+1,X,1031303.34310437,40047.7725031033,81.6019767547866,12535.2818525271,-683.666065848847
+2,X,207179.181972689,8273.93114052335,47.4359192293739,2518.19019867913,-1407.86160002623
+3,X,594547.990034924,-24495.5390890833,46.3994727792424,7226.59341895378,-154.449699580799
+4,X,1431871.75085735,-5975.17124802999,96.2922472869417,17404.1240046628,-383.63965153104
+5,X,2132341.51391611,33545.2455355934,23.2389997524879,25918.2170844233,-2214.8717939546
+6,X,1849456.85903214,-36585.1506450251,21.7653754396546,22479.8013103184,-499.788202406702
+7,X,416377.473683951,47617.1641535909,53.9342164837372,5060.96052467702,-2002.28785563532
+8,X,1834852.24383494,164.577549590314,55.7417291729005,22302.2848302114,-1462.8889504883
+9,X,2030615.0021387,-25590.077352893,13.3180597514294,24681.7483092487,-267.582565811964
+10,X,418204.906991729,-35631.266855653,67.830087711799,5083.17267158509,-2819.77637904098
+11,X,1600764.65336791,24069.5603461085,91.2031527296231,19456.9890506716,-2706.92171287459
+12,X,-237442.303891325,-28375.8492844066,76.6780058713539,-2886.10976641617,-1650.25772935281
+13,X,389569.403019936,-17679.1039531987,93.7334723703787,4735.11289934218,-553.765889146761
+14,X,1097874.59558522,25271.39171418,53.6965192771211,13344.4443174432,-1094.01486564295
+15,X,896512.426133544,-16691.6898965759,19.4379065649528,10896.9207498079,-2899.60958857901
+16,X,12475.3344165542,11073.3959911305,52.0025761588363,151.597422562947,-782.134708201617
+17,X,643218.531288929,-33665.7156040407,29.7373317632719,7818.17572605823,-1080.66347038372
+18,X,888098.246309737,-42864.1312633446,93.9228362331387,10794.6477981533,-1638.80485180208
+19,X,1636015.66023612,-1874.52319024457,61.4904198919873,19885.4591582095,-2643.77032366468
+20,X,1523022.28471858,-49138.4737863941,17.975585548934,18512.0435328828,-560.378442383903
+21,X,-18066.9165614168,-35122.5184807359,6.32108929256205,-219.638541412487,-1004.04464422701
+22,X,753574.994852389,-504.277781827623,64.3463985117791,9159.54014727008,-690.33547481712
+23,X,484679.670507055,-47904.9616755848,34.793137673643,5891.16232922052,-2871.23133035778
+24,X,1418886.29518641,40005.8303266016,89.663527446701,17246.2879576819,-1230.52218744124
+25,X,746864.366592613,-29303.0557293284,63.1160346689987,9077.97355841423,-3078.94168258733
+26,X,826676.469591929,31855.9700915967,12.4598774065994,10048.0763518243,-3214.1429201838
+27,X,904870.905255709,-1370.05112198737,18.1776031280461,10998.5166695707,-1733.87235240405
+28,X,1081673.04047048,46129.8007590074,65.8763747557873,13147.5171186325,-1237.15538447696
+29,X,1602766.31102942,12215.0498178804,28.9863403535557,19481.3188655265,-2669.08606113272
+30,X,848296.081366335,-8523.54146953082,14.4884132013553,10310.8591252139,-1070.59795231075
+31,X,881987.050483579,-32109.023962203,59.952453510063,10720.3672326848,-1978.64149010475
+32,X,1384967.83924126,31795.5231836559,46.3619825035018,16834.0147857661,-3214.77894538541
+33,X,1435243.99308821,-41605.9821955878,61.1093419800895,17445.1130460068,-1581.87602287648
+34,X,1482822.4415542,-49423.8250112063,57.7898783145655,18023.4211475179,-2245.35073430102
+35,X,1159462.50457973,24974.6967563244,2.46710777290358,14093.035073862,-1653.30479641573
+36,X,1385445.91552098,44300.000697173,14.1598975077974,16839.8257231643,-1154.39523418031
+37,X,1078840.90378916,-33471.5314909414,86.4825835158785,13113.0929698841,-1772.81761496697
+38,X,322072.318257427,-32616.3765208785,71.5517709413264,3914.69709752203,-1834.58611475719
+39,X,1547503.57192612,15339.6613906795,78.8203546957091,18809.6094936362,-538.87662795121
+40,X,1174714.5075073,38777.544632935,63.0951620300882,14278.4206242917,-380.323852794412
+41,X,94875.3402808423,12249.6781769406,90.3127462736438,1153.15574346477,-1590.10909636815
+42,X,362160.364120508,-49277.9984660007,8.3266338235128,4401.96060534147,-1423.02119058586
+43,X,673617.378755157,21157.5642575089,40.4360003803782,8187.66864424759,-2304.57417593545
+44,X,882351.052225793,44482.8684188695,60.148559750113,10724.7916131167,-3010.89784032583
+45,X,22400.9390066318,17108.6417404538,68.2422016131663,272.24149001619,-1091.87923472037
+46,X,1781136.79777257,30136.189144163,65.8784392884513,21649.382366535,-779.999951946907
+47,X,621416.608280441,-31495.5881531396,67.4176383345993,7553.17699006137,-3091.37667023128
+48,X,750411.885581194,42277.9111802948,52.7091601206799,9121.09305972893,-1213.67564944238
+49,X,1525062.49801326,-20619.9327982041,18.5983023041602,18536.841985025,-518.413321644593
+50,X,679068.208535292,42337.0868480189,55.8737535970023,8253.92257061978,-1337.41889839093
+51,X,447826.687204506,-3841.47148699515,57.8803936758992,5443.22046731452,-2117.64647879144
+52,X,336890.280723035,-25698.4911052116,26.2484582718796,4094.80695856079,-2304.9408398086
+53,X,468079.149217039,-36421.9167980631,9.52225176867021,5689.38576313015,-2346.34809901136
+54,X,1404060.53519045,10116.138294505,33.8807589471792,17066.0833189846,-2177.75555908996
+55,X,1827150.95390431,33677.6712656449,65.3664484400669,22208.6767557623,-768.872566798946
+56,X,-33394.4572217261,23643.588170146,95.3617653535894,-405.942240360551,-802.333589068958
+57,X,1443453.59596531,48648.6785581152,83.107773775309,17544.8993990111,-1826.75004222983
+58,X,1550858.36965351,39565.5654401456,28.6332188363784,18850.3865001573,-176.047021901582
+59,X,329623.778660326,9384.94614690253,83.9023194218408,4006.48383865674,-1510.2742546313
+60,X,596362.271476793,-7862.7530203713,84.8842436218459,7248.64570723748,-1125.70379322904
+61,Y,-1747903.77060764,-15426.701719437,73.530132833494,12277.508278164,-2388.05382648639
+62,Y,-602031.002716425,-26628.9177804096,56.1127291052339,4228.72153980883,-2494.38544516297
+63,Y,-914915.654901957,-22908.9603476779,55.2235512174418,6426.47150794108,-3065.18336481344
+64,Y,-1293976.98085175,44255.5466634393,24.3327718109724,9089.05662095335,-1530.79847762564
+65,Y,-556992.07118952,44821.3470186639,63.0165978378747,3912.36115061704,-3240.22306333347
+66,Y,-2294033.39637973,-44132.4823446645,42.5612469609221,16113.6068505302,-255.147829778129
+67,Y,-1213629.16675478,-42539.8069752961,48.9584343155192,8524.68120454026,-164.586906089718
+68,Y,292809.005099769,-49432.7543013633,80.507648968553,-2056.77244309559,-871.09190770659
+69,Y,-2235342.64861732,-6632.95213361424,93.4293107228537,15701.3540037878,-2178.77545326323
+70,Y,-3732932.41042696,-40485.6986880114,25.9765685287417,26220.6550555191,-598.407067002771
+71,Y,-252210.474776827,-14427.735364365,59.6676061209021,1771.52829396117,-845.471004201544
+72,Y,-98889.6656695742,-41488.1745504839,42.4820587894618,694.579325611127,-1299.98047519081
+73,Y,-1370204.37668197,-21879.2550863842,34.7942407834795,9624.48958514065,-1954.71594115708
+74,Y,-3104420.51726401,31704.4165180227,44.4564228685462,21805.8907533925,-977.750657934738
+75,Y,-2388277.98822188,7800.82135026513,48.5821408939988,16775.5953738995,-2577.18311095899
+76,Y,-724977.658463333,-44659.6170999659,35.6876655675306,5092.31777838026,-2837.25474789309
+77,Y,-1794477.91392858,6521.923601348,88.7042922408313,12604.6522325595,-2393.39103443748
+78,Y,-223213.115899978,28443.9701603649,37.3226807484787,1567.84638066104,-1284.75416736837
+79,Y,423046.005849878,9502.16765496074,17.4038852841401,-2971.57718730938,-793.452569769765
+80,Y,-3047818.61588223,7598.41423185622,90.0700126497531,21408.3102848147,-749.371738309082
+81,Y,-2409342.6015377,35261.072039404,47.9286965191158,16923.5564603709,-3048.15567690909
+82,Y,-742814.585466495,-29503.3166005498,7.75349725175401,5217.60709978568,-2729.9120626205
+83,Y,-571579.430647006,-44941.8628170447,85.8317735174233,4014.82500929801,-1269.94347716121
+84,Y,-2195610.2686634,48026.9824672444,3.47886888914346,15422.2676483873,-208.387321904327
+85,Y,-964020.379427545,-5862.59066560875,32.3951971412924,6771.39065366773,-2348.00221246913
+86,Y,-2102214.66994452,-1627.31398929461,65.1915191571454,14766.2426011092,-2448.65166476797
+87,Y,-890649.179337315,-31734.0384124326,73.7172018923155,6256.02004752945,-586.069879271884
+88,Y,-2207063.83218629,14835.6206610657,31.7102632894148,15502.7192420416,-1698.88417254839
+89,Y,-749402.325380223,-49686.6769123602,49.3012898909983,5263.8803991841,-1176.36020313534
+90,Y,-2494089.08559485,234.017339793194,43.1649546520338,17518.8293606888,-2223.27305100155
+91,Y,-758480.09438593,-42219.6177653841,85.476183481532,5327.64404629679,-864.677157209562
+92,Y,-2025827.98191011,2374.67279858794,33.5495503844189,14229.6907309965,-2868.43169850788
+93,Y,-2354065.35735529,-48559.373111767,43.9360775681768,16535.2805868339,-1226.37195019107
+94,Y,-1588621.54314025,-37866.8557345306,22.4186822710487,11158.6853894212,-2716.07040834036
+95,Y,-3175419.95188679,-45432.4026527398,31.3118028803292,22304.6017131089,-666.77340835222
+96,Y,-2152215.92330461,-26966.2051976371,0.258766409063485,15117.4590856704,-32.6895291544268
+97,Y,-547157.630624095,-1300.97533450509,46.2515307967681,3843.28254500137,-2502.56292413987
+98,Y,-2672876.70357122,28750.3814277021,7.66749583919236,18774.6605662742,-1875.23509974759
+99,Y,-2080211.9597305,-40822.549051454,89.438883925997,14611.6921599612,-1948.30990769798
+100,Y,-2578377.05246833,-2300.90575344433,65.926962237196,18110.8804765956,-2076.35142495637
diff --git a/tests/test_classification/test_classification.py b/tests/pytest/test_classification/test_classification.py
similarity index 93%
rename from tests/test_classification/test_classification.py
rename to tests/pytest/test_classification/test_classification.py
index 5de1e0aa..c1c33104 100644
--- a/tests/test_classification/test_classification.py
+++ b/tests/pytest/test_classification/test_classification.py
@@ -76,7 +76,17 @@ def test_sisso_classifier():
 
     feat_space = generate_fs(phi_0, prop, [80], op_set, "classification", 1, 10)
     sisso = SISSOClassifier(
-        feat_space, Unit("m"), prop, prop_test, [80], [20], list(range(10)), 2, 1, 1
+        feat_space,
+        "class",
+        Unit("m"),
+        prop,
+        prop_test,
+        [80],
+        [20],
+        list(range(10)),
+        2,
+        1,
+        1,
     )
     sisso.fit()
 
diff --git a/tests/pytest/test_descriptor_identifier/model_files/test.dat b/tests/pytest/test_descriptor_identifier/model_files/test.dat
new file mode 100644
index 00000000..ebd30213
--- /dev/null
+++ b/tests/pytest/test_descriptor_identifier/model_files/test.dat
@@ -0,0 +1,22 @@
+# c0 + a0 * [(B) / ((A)^2)] + a1 * C
+# Property Label: $Prop$; Unit of the Property: Unitless
+# RMSE: 1.85169904965289e-09; Max AE: 3.25962901115417e-09
+# Coefficients
+# Task     a0                      a1                      c0
+# 0,      -7.215479542045696e+00,  8.227180000000007e+01,  3.141589999622815e+00, 
+# 1,       9.914521341156421e+01, -1.423659000000001e+02, -5.254860000136937e+00, 
+# Feature Rung, Units, and Expressions
+# 0;  2; m^-2 * s;                                         1|0|sq|div; [(B) / ((A)^2)]; $\left(\frac{B}{\left(A^2\right)}\right)$
+# 1;  0; Unitless;                                         2; C; $$
+# Number of Samples Per Task
+# Task;   n_mats_test             
+# 0,      3                     
+# 1,      2                     
+# Test Indexes: [ 0, 1, 2, 60, 61 ]
+
+#Property Value          Property Value (EST)    Feature 0 Value         Feature 1 Value        
+ 1.031303343104370e+06,  1.031303343104372e+06,  5.087963053254981e-08,  1.253528185252710e+04
+ 2.071791819726890e+05,  2.071791819726897e+05,  6.929206698600502e-07,  2.518190198679130e+03
+ 5.945479900349240e+05,  5.945479900349238e+05,  7.732840323401594e-08,  7.226593418953780e+03
+-1.747903770607640e+06, -1.747903770607637e+06,  3.089720438913970e-07,  1.227750827816400e+04
+-6.020310027164250e+05, -6.020310027164250e+05,  7.913240374082342e-08,  4.228721539808830e+03
diff --git a/tests/pytest/test_descriptor_identifier/model_files/train.dat b/tests/pytest/test_descriptor_identifier/model_files/train.dat
new file mode 100644
index 00000000..ca807201
--- /dev/null
+++ b/tests/pytest/test_descriptor_identifier/model_files/train.dat
@@ -0,0 +1,111 @@
+# c0 + a0 * [(B) / ((A)^2)] + a1 * C
+# Property Label: $Prop$; Unit of the Property: Unitless
+# RMSE: 2.74896402033522e-09; Max AE: 9.31322574615479e-09
+# Coefficients
+# Task     a0                      a1                      c0
+# 0,      -7.215479542045696e+00,  8.227180000000007e+01,  3.141589999622815e+00, 
+# 1,       9.914521341156421e+01, -1.423659000000001e+02, -5.254860000136937e+00, 
+# Feature Rung, Units, and Expressions
+# 0;  2; m^-2 * s;                                         1|0|sq|div; [(B) / ((A)^2)]; $\left(\frac{B}{\left(A^2\right)}\right)$
+# 1;  0; Unitless;                                         2; C; $$
+# Number of Samples Per Task
+# Task;   n_mats_train            
+# 0,      57                    
+# 1,      38                    
+
+#Property Value          Property Value (EST)    Feature 0 Value         Feature 1 Value        
+ 1.431871750857350e+06,  1.431871750857357e+06,  2.697060006837511e-06,  1.740412400466280e+04
+ 2.132341513916110e+06,  2.132341513916109e+06,  2.065168448030928e-08,  2.591821708442330e+04
+ 1.849456859032140e+06,  1.849456859032137e+06,  1.626134513838345e-08,  2.247980131031840e+04
+ 4.163774736839510e+05,  4.163774736839511e+05,  2.378687110561754e-08,  5.060960524677020e+03
+ 1.834852243834940e+06,  1.834852243834940e+06,  2.057971012989485e-03,  2.230228483021140e+04
+ 2.030615002138700e+06,  2.030615002138702e+06,  2.033750916695686e-08,  2.468174830924870e+04
+ 4.182049069917290e+05,  4.182049069917287e+05,  5.342688466073691e-08,  5.083172671585090e+03
+ 1.600764653367910e+06,  1.600764653367909e+06,  1.574249399443083e-07,  1.945698905067160e+04
+-2.374423038913250e+05, -2.374423038913255e+05,  9.522984388019029e-08, -2.886109766416170e+03
+ 3.895694030199360e+05,  3.895694030199360e+05,  2.998984401431883e-07,  4.735112899342180e+03
+ 1.097874595585220e+06,  1.097874595585217e+06,  8.407905381849567e-08,  1.334444431744320e+04
+ 8.965124261335440e+05,  8.965124261335426e+05,  6.976681145109002e-08,  1.089692074980790e+04
+ 1.247533441655420e+04,  1.247533441655385e+04,  4.240950437796524e-07,  1.515974225629470e+02
+ 6.432185312889291e+05,  6.432185312889284e+05,  2.623773234135730e-08,  7.818175726058230e+03
+ 8.880982463097370e+05,  8.880982463097404e+05,  5.111909378807164e-08,  1.079464779815330e+04
+ 1.636015660236120e+06,  1.636015660236114e+06,  1.749950739140699e-05,  1.988545915820950e+04
+ 1.523022284718580e+06,  1.523022284718574e+06,  7.444571741652756e-09,  1.851204353288280e+04
+-1.806691656141680e+04, -1.806691656141721e+04,  5.124135740795190e-09, -2.196385414124870e+02
+ 7.535749948523890e+05,  7.535749948523892e+05,  2.530373185333834e-04,  9.159540147270080e+03
+ 4.846796705070550e+05,  4.846796705070554e+05,  1.516116594933527e-08,  5.891162329220520e+03
+ 1.418886295186410e+06,  1.418886295186410e+06,  5.602337173621954e-08,  1.724628795768190e+04
+ 7.468643665926130e+05,  7.468643665926139e+05,  7.350449259326487e-08,  9.077973558414231e+03
+ 8.266764695919290e+05,  8.266764695919303e+05,  1.227812638317768e-08,  1.004807635182430e+04
+ 9.048709052557090e+05,  9.048709052557111e+05,  9.684184878175625e-06,  1.099851666957070e+04
+ 1.081673040470480e+06,  1.081673040470487e+06,  3.095754649186859e-08,  1.314751711863250e+04
+ 1.602766311029420e+06,  1.602766311029422e+06,  1.942687305315588e-07,  1.948131886552650e+04
+ 8.482960813663350e+05,  8.482960813663343e+05,  1.994254893455139e-07,  1.031085912521390e+04
+ 8.819870504835790e+05,  8.819870504835781e+05,  5.815040616166881e-08,  1.072036723268480e+04
+ 1.384967839241260e+06,  1.384967839241261e+06,  4.585957732720945e-08,  1.683401478576610e+04
+ 1.435243993088210e+06,  1.435243993088209e+06,  3.530173492054727e-08,  1.744511304600680e+04
+ 1.482822441554200e+06,  1.482822441554193e+06,  2.365805697619391e-08,  1.802342114751790e+04
+ 1.159462504579730e+06,  1.159462504579732e+06,  3.955375090312368e-09,  1.409303507386200e+04
+ 1.385445915520980e+06,  1.385445915520977e+06,  7.215270937489370e-09,  1.683982572316430e+04
+ 1.078840903789160e+06,  1.078840903789154e+06,  7.719292312078009e-08,  1.311309296988410e+04
+ 3.220723182574270e+05,  3.220723182574275e+05,  6.725877658255970e-08,  3.914697097522030e+03
+ 1.547503571926120e+06,  1.547503571926123e+06,  3.349707138495564e-07,  1.880960949363620e+04
+ 1.174714507507300e+06,  1.174714507507300e+06,  4.195999687630714e-08,  1.427842062429170e+04
+ 9.487534028084230e+04,  9.487534028084182e+04,  6.018658340475335e-07,  1.153155743464770e+03
+ 3.621603641205080e+05,  3.621603641205075e+05,  3.428967326845969e-09,  4.401960605341470e+03
+ 6.736173787551570e+05,  6.736173787551573e+05,  9.033100765464531e-08,  8.187668644247590e+03
+ 8.823510522257930e+05,  8.823510522257956e+05,  3.039762576167721e-08,  1.072479161311670e+04
+ 2.240093900663180e+04,  2.240093900663139e+04,  2.331427770314304e-07,  2.722414900161900e+02
+ 1.781136797772570e+06,  1.781136797772572e+06,  7.253817683831683e-08,  2.164938236653500e+04
+ 6.214166082804410e+05,  6.214166082804408e+05,  6.796324227463879e-08,  7.553176990061370e+03
+ 7.504118855811940e+05,  7.504118855811941e+05,  2.948893254754258e-08,  9.121093059728930e+03
+ 1.525062498013260e+06,  1.525062498013265e+06,  4.374201765323614e-08,  1.853684198502500e+04
+ 6.790682085352920e+05,  6.790682085352917e+05,  3.117208882706876e-08,  8.253922570619779e+03
+ 4.478266872045060e+05,  4.478266872045057e+05,  3.922258820073917e-06,  5.443220467314520e+03
+ 3.368902807230350e+05,  3.368902807230347e+05,  3.974555365702647e-08,  4.094806958560790e+03
+ 4.680791492170390e+05,  4.680791492170393e+05,  7.178175366749162e-09,  5.689385763130150e+03
+ 1.404060535190450e+06,  1.404060535190449e+06,  3.310728859521713e-07,  1.706608331898460e+04
+ 1.827150953904310e+06,  1.827150953904310e+06,  5.763294101941985e-08,  2.220867675576230e+04
+-3.339445722172610e+04, -3.339445722172646e+04,  1.705876199821157e-07, -4.059422403605510e+02
+ 1.443453595965310e+06,  1.443453595965309e+06,  3.511555629631250e-08,  1.754489939901110e+04
+ 1.550858369653510e+06,  1.550858369653510e+06,  1.829091447530946e-08,  1.885038650015730e+04
+ 3.296237786603260e+05,  3.296237786603260e+05,  9.525996048208857e-07,  4.006483838656740e+03
+ 5.963622714767930e+05,  5.963622714767937e+05,  1.373023008233051e-06,  7.248645707237480e+03
+-9.149156549019570e+05, -9.149156549019575e+05,  1.052237002617895e-07,  6.426471507941080e+03
+-1.293976980851750e+06, -1.293976980851752e+06,  1.242384899016401e-08,  9.089056620953350e+03
+-5.569920711895199e+05, -5.569920711895211e+05,  3.136787828168589e-08,  3.912361150617040e+03
+-2.294033396379730e+06, -2.294033396379733e+06,  2.185232400397477e-08,  1.611360685053020e+04
+-1.213629166754780e+06, -1.213629166754777e+06,  2.705431178856616e-08,  8.524681204540260e+03
+ 2.928090050997690e+05,  2.928090050997690e+05,  3.294636654535805e-08, -2.056772443095590e+03
+-2.235342648617320e+06, -2.235342648617312e+06,  2.123583872053065e-06,  1.570135400378780e+04
+-3.732932410426960e+06, -3.732932410426958e+06,  1.584814746579403e-08,  2.622065505551910e+04
+-2.522104747768270e+05, -2.522104747768275e+05,  2.866436191254619e-07,  1.771528293961170e+03
+-9.888966566957420e+04, -9.888966566957440e+04,  2.468066756691430e-08,  6.945793256111270e+02
+-1.370204376681970e+06, -1.370204376681970e+06,  7.268458266839257e-08,  9.624489585140651e+03
+-3.104420517264010e+06, -3.104420517264019e+06,  4.422776414490086e-08,  2.180589075339250e+04
+-2.388277988221880e+06, -2.388277988221888e+06,  7.983548813793096e-07,  1.677559537389950e+04
+-7.249776584633330e+05, -7.249776584633330e+05,  1.789320551006325e-08,  5.092317778380260e+03
+-1.794477913928580e+06, -1.794477913928585e+06,  2.085418494861554e-06,  1.260465223255950e+04
+-2.232131158999780e+05, -2.232131158999782e+05,  4.613095030252132e-08,  1.567846380661040e+03
+ 4.230460058498780e+05,  4.230460058498792e+05,  1.927528616754291e-07, -2.971577187309380e+03
+-3.047818615882230e+06, -3.047818615882234e+06,  1.560034821681974e-06,  2.140831028481470e+04
+-2.409342601537700e+06, -2.409342601537698e+06,  3.854824353113469e-08,  1.692355646037090e+04
+-7.428145854664950e+05, -7.428145854664957e+05,  8.907502601228915e-09,  5.217607099785680e+03
+-5.715794306470060e+05, -5.715794306470070e+05,  4.249579387725456e-08,  4.014825009298010e+03
+-2.195610268663400e+06, -2.195610268663394e+06,  1.508229600050627e-09,  1.542226764838730e+04
+-9.640203794275450e+05, -9.640203794275469e+05,  9.425436737159602e-07,  6.771390653667730e+03
+-2.102214669944520e+06, -2.102214669944522e+06,  2.461775162911714e-05,  1.476624260110920e+04
+-8.906491793373150e+05, -8.906491793373162e+05,  7.320119248823469e-08,  6.256020047529450e+03
+-2.207063832186290e+06, -2.207063832186288e+06,  1.440749268450898e-07,  1.550271924204160e+04
+-7.494023253802230e+05, -7.494023253802244e+05,  1.997001442624077e-08,  5.263880399184100e+03
+-2.494089085594850e+06, -2.494089085594850e+06,  7.881977896794234e-04,  1.751882936068880e+04
+-7.584800943859300e+05, -7.584800943859307e+05,  4.795308237612606e-08,  5.327644046296790e+03
+-2.025827981910110e+06, -2.025827981910114e+06,  5.949481923044429e-06,  1.422969073099650e+04
+-2.354065357355290e+06, -2.354065357355291e+06,  1.863267201710832e-08,  1.653528058683390e+04
+-1.588621543140250e+06, -1.588621543140251e+06,  1.563477342062043e-08,  1.115868538942120e+04
+-3.175419951886790e+06, -3.175419951886789e+06,  1.516968856813277e-08,  2.230460171310890e+04
+-2.152215923304610e+06, -2.152215923304610e+06,  3.558510316535865e-10,  1.511745908567040e+04
+-5.471576306240950e+05, -5.471576306240946e+05,  2.732675013220533e-05,  3.843282545001370e+03
+-2.672876703571220e+06, -2.672876703571219e+06,  9.276119341407247e-09,  1.877466056627420e+04
+-2.080211959730500e+06, -2.080211959730501e+06,  5.366932486308832e-08,  1.461169215996120e+04
+-2.578377052468330e+06, -2.578377052468333e+06,  1.245275388419069e-05,  1.811088047659560e+04
diff --git a/tests/pytest/test_descriptor_identifier/test_log_regressor.py b/tests/pytest/test_descriptor_identifier/test_log_regressor.py
new file mode 100644
index 00000000..bfb78239
--- /dev/null
+++ b/tests/pytest/test_descriptor_identifier/test_log_regressor.py
@@ -0,0 +1,60 @@
+import shutil
+import numpy as np
+from cpp_sisso import (
+    FeatureNode,
+    generate_fs,
+    Unit,
+    initialize_values_arr,
+    SISSOLogRegressor,
+)
+
+
+def test_sisso_log_regressor():
+    initialize_values_arr(90, 10, 10)
+    phi_0 = [
+        FeatureNode(
+            ff,
+            f"feat_{ff}",
+            np.random.random(90) * 1e2,
+            np.random.random(10) * 1e2,
+            Unit(),
+        )
+        for ff in range(10)
+    ]
+
+    a0 = 0.95
+    a1 = 1.01
+    c0 = np.random.random() * 100.0
+    prop = c0 * np.power(phi_0[0].value, a0) * np.power(phi_0[2].value, a1)
+    prop_test = (
+        c0 * np.power(phi_0[0].test_value, a0) * np.power(phi_0[2].test_value, a1)
+    )
+
+    op_set = ["add", "sub", "mult", "sq", "cb", "sqrt", "cbrt"]
+
+    feat_space = generate_fs(phi_0, prop, [90], op_set, "log_regression", 0, 5)
+
+    sisso = SISSOLogRegressor(
+        feat_space,
+        "prop",
+        Unit("m"),
+        prop,
+        prop_test,
+        [90],
+        [10],
+        list(range(10)),
+        2,
+        1,
+        1,
+        False,
+    )
+    sisso.fit()
+    shutil.rmtree("models/")
+    shutil.rmtree("feature_space/")
+
+    assert sisso.models[1][0].rmse < 1e-7
+    assert sisso.models[1][0].test_rmse < 1e-7
+
+
+if __name__ == "__main__":
+    test_sisso_log_regressor()
diff --git a/tests/test_descriptor_identifier/test_model.py b/tests/pytest/test_descriptor_identifier/test_model.py
similarity index 88%
rename from tests/test_descriptor_identifier/test_model.py
rename to tests/pytest/test_descriptor_identifier/test_model.py
index 0f156dba..165eb1f9 100644
--- a/tests/test_descriptor_identifier/test_model.py
+++ b/tests/pytest/test_descriptor_identifier/test_model.py
@@ -21,12 +21,12 @@ def test_model():
     assert np.all(np.abs(model.train_error - (model.fit - model.prop_train)) < 1e-12)
     assert np.all(np.abs(model.test_error - (model.predict - model.prop_test)) < 1e-12)
 
-    assert model.feats[0].postfix_expr == "1|0|div|0|div"
+    assert model.feats[0].postfix_expr == "1|0|sq|div"
     assert model.feats[1].postfix_expr == "2"
 
     actual_coefs = [
-        [-7.215479313269452e00, 8.227180000000008e01, 3.141589999289981e0],
-        [9.914521078639838e01, -1.423659000000000e02, -5.254859999998278e00],
+        [-7.215479542045696e0, 8.227180000000007e1, 3.141589999622815e0],
+        [9.914521341156421e1, -1.423659000000001e2, -5.254860000136937e0],
     ]
     assert np.all(
         [
diff --git a/tests/test_descriptor_identifier/test_regressor.py b/tests/pytest/test_descriptor_identifier/test_regressor.py
similarity index 98%
rename from tests/test_descriptor_identifier/test_regressor.py
rename to tests/pytest/test_descriptor_identifier/test_regressor.py
index 9b13f8a7..662aa945 100644
--- a/tests/test_descriptor_identifier/test_regressor.py
+++ b/tests/pytest/test_descriptor_identifier/test_regressor.py
@@ -40,6 +40,7 @@ def test_sisso_regressor():
 
     sisso = SISSORegressor(
         feat_space,
+        "prop",
         Unit("m"),
         prop,
         prop_test,
diff --git a/tests/test_feature_creation/test_feat_generation/test_abs_diff_node.py b/tests/pytest/test_feature_creation/test_feat_generation/test_abs_diff_node.py
similarity index 100%
rename from tests/test_feature_creation/test_feat_generation/test_abs_diff_node.py
rename to tests/pytest/test_feature_creation/test_feat_generation/test_abs_diff_node.py
diff --git a/tests/test_feature_creation/test_feat_generation/test_abs_node.py b/tests/pytest/test_feature_creation/test_feat_generation/test_abs_node.py
similarity index 100%
rename from tests/test_feature_creation/test_feat_generation/test_abs_node.py
rename to tests/pytest/test_feature_creation/test_feat_generation/test_abs_node.py
diff --git a/tests/test_feature_creation/test_feat_generation/test_add_node.py b/tests/pytest/test_feature_creation/test_feat_generation/test_add_node.py
similarity index 100%
rename from tests/test_feature_creation/test_feat_generation/test_add_node.py
rename to tests/pytest/test_feature_creation/test_feat_generation/test_add_node.py
diff --git a/tests/test_feature_creation/test_feat_generation/test_cb_node.py b/tests/pytest/test_feature_creation/test_feat_generation/test_cb_node.py
similarity index 100%
rename from tests/test_feature_creation/test_feat_generation/test_cb_node.py
rename to tests/pytest/test_feature_creation/test_feat_generation/test_cb_node.py
diff --git a/tests/test_feature_creation/test_feat_generation/test_cbrt_node.py b/tests/pytest/test_feature_creation/test_feat_generation/test_cbrt_node.py
similarity index 100%
rename from tests/test_feature_creation/test_feat_generation/test_cbrt_node.py
rename to tests/pytest/test_feature_creation/test_feat_generation/test_cbrt_node.py
diff --git a/tests/test_feature_creation/test_feat_generation/test_cos_node.py b/tests/pytest/test_feature_creation/test_feat_generation/test_cos_node.py
similarity index 100%
rename from tests/test_feature_creation/test_feat_generation/test_cos_node.py
rename to tests/pytest/test_feature_creation/test_feat_generation/test_cos_node.py
diff --git a/tests/test_feature_creation/test_feat_generation/test_div_node.py b/tests/pytest/test_feature_creation/test_feat_generation/test_div_node.py
similarity index 100%
rename from tests/test_feature_creation/test_feat_generation/test_div_node.py
rename to tests/pytest/test_feature_creation/test_feat_generation/test_div_node.py
diff --git a/tests/test_feature_creation/test_feat_generation/test_exp_node.py b/tests/pytest/test_feature_creation/test_feat_generation/test_exp_node.py
similarity index 100%
rename from tests/test_feature_creation/test_feat_generation/test_exp_node.py
rename to tests/pytest/test_feature_creation/test_feat_generation/test_exp_node.py
diff --git a/tests/test_feature_creation/test_feat_generation/test_inv_node.py b/tests/pytest/test_feature_creation/test_feat_generation/test_inv_node.py
similarity index 100%
rename from tests/test_feature_creation/test_feat_generation/test_inv_node.py
rename to tests/pytest/test_feature_creation/test_feat_generation/test_inv_node.py
diff --git a/tests/test_feature_creation/test_feat_generation/test_log_node.py b/tests/pytest/test_feature_creation/test_feat_generation/test_log_node.py
similarity index 100%
rename from tests/test_feature_creation/test_feat_generation/test_log_node.py
rename to tests/pytest/test_feature_creation/test_feat_generation/test_log_node.py
diff --git a/tests/test_feature_creation/test_feat_generation/test_mult_node.py b/tests/pytest/test_feature_creation/test_feat_generation/test_mult_node.py
similarity index 100%
rename from tests/test_feature_creation/test_feat_generation/test_mult_node.py
rename to tests/pytest/test_feature_creation/test_feat_generation/test_mult_node.py
diff --git a/tests/test_feature_creation/test_feat_generation/test_neg_exp_node.py b/tests/pytest/test_feature_creation/test_feat_generation/test_neg_exp_node.py
similarity index 100%
rename from tests/test_feature_creation/test_feat_generation/test_neg_exp_node.py
rename to tests/pytest/test_feature_creation/test_feat_generation/test_neg_exp_node.py
diff --git a/tests/test_feature_creation/test_feat_generation/test_sin_node.py b/tests/pytest/test_feature_creation/test_feat_generation/test_sin_node.py
similarity index 100%
rename from tests/test_feature_creation/test_feat_generation/test_sin_node.py
rename to tests/pytest/test_feature_creation/test_feat_generation/test_sin_node.py
diff --git a/tests/test_feature_creation/test_feat_generation/test_six_pow_node.py b/tests/pytest/test_feature_creation/test_feat_generation/test_six_pow_node.py
similarity index 100%
rename from tests/test_feature_creation/test_feat_generation/test_six_pow_node.py
rename to tests/pytest/test_feature_creation/test_feat_generation/test_six_pow_node.py
diff --git a/tests/test_feature_creation/test_feat_generation/test_sq_node.py b/tests/pytest/test_feature_creation/test_feat_generation/test_sq_node.py
similarity index 100%
rename from tests/test_feature_creation/test_feat_generation/test_sq_node.py
rename to tests/pytest/test_feature_creation/test_feat_generation/test_sq_node.py
diff --git a/tests/test_feature_creation/test_feat_generation/test_sqrt_node.py b/tests/pytest/test_feature_creation/test_feat_generation/test_sqrt_node.py
similarity index 100%
rename from tests/test_feature_creation/test_feat_generation/test_sqrt_node.py
rename to tests/pytest/test_feature_creation/test_feat_generation/test_sqrt_node.py
diff --git a/tests/test_feature_creation/test_feat_generation/test_sub_node.py b/tests/pytest/test_feature_creation/test_feat_generation/test_sub_node.py
similarity index 100%
rename from tests/test_feature_creation/test_feat_generation/test_sub_node.py
rename to tests/pytest/test_feature_creation/test_feat_generation/test_sub_node.py
diff --git a/tests/test_feature_creation/test_feature_space/test_feature_space.py b/tests/pytest/test_feature_creation/test_feature_space/test_feature_space.py
similarity index 100%
rename from tests/test_feature_creation/test_feature_space/test_feature_space.py
rename to tests/pytest/test_feature_creation/test_feature_space/test_feature_space.py
diff --git a/tests/test_feature_creation/test_feature_space/test_units.py b/tests/pytest/test_feature_creation/test_feature_space/test_units.py
similarity index 100%
rename from tests/test_feature_creation/test_feature_space/test_units.py
rename to tests/pytest/test_feature_creation/test_feature_space/test_units.py
diff --git a/tests/test_sisso.py b/tests/pytest/test_sisso.py
similarity index 100%
rename from tests/test_sisso.py
rename to tests/pytest/test_sisso.py
diff --git a/tests/sisso.json b/tests/sisso.json
deleted file mode 100644
index 830a1079..00000000
--- a/tests/sisso.json
+++ /dev/null
@@ -1,15 +0,0 @@
-{
-    "desc_dim": 2,
-    "n_sis_select": 10,
-    "max_rung": 2,
-    "n_residual": 1,
-    "data_file": "data.csv",
-    "property_key": "Prop",
-    "task_key": "Task",
-    "leave_out_frac": 0.2,
-    "n_rung_generate": 0,
-    "n_models_store": 10,
-    "leave_out_inds": [],
-    "fix_intercept": false,
-    "opset": ["add", "sub", "mult", "div", "exp", "inv", "sq", "cb", "sqrt", "cbrt", "abs_diff"]
-}
diff --git a/tests/test_descriptor_identifier/model_files/test.dat b/tests/test_descriptor_identifier/model_files/test.dat
deleted file mode 100644
index 5c328f39..00000000
--- a/tests/test_descriptor_identifier/model_files/test.dat
+++ /dev/null
@@ -1,36 +0,0 @@
-# c0 + a0 * [([(B) / (A)]) / (A)] + a1 * C
-# RMSE: 2.40529777281224e-09; Max AE: 1.02445483207703e-08
-# Coefficients
-# Task;    a0                      a1                      c0
-# 0,      -7.215479313269452e+00,  8.227180000000008e+01,  3.141589999289981e+00, 
-# 1,       9.914521078639838e+01, -1.423659000000000e+02, -5.254859999998278e+00, 
-# Feature Rung, Units, and Expressions
-# 0,  2, 1 / m^2 * s,                                      1|0|div|0|div,[([(B) / (A)]) / (A)]
-# 1,  0, ,                                                 2,C
-# Number of Samples Per Task
-# Task;   n_mats_test             
-# 0,      12                    
-# 1,      8                     
-# Test Indexes: [ 2, 4, 8, 11, 16, 32, 40, 42, 47, 50, 51, 57, 62, 64, 68, 71, 84, 86, 88, 93 ]
-
-#Property Value          Property Value (EST)    Feature 0 Value         Feature 1 Value        
- 5.945479900349240e+05,  5.945479900349235e+05,  7.732840323401594e-08,  7.226593418953780e+03
- 2.132341513916110e+06,  2.132341513916109e+06,  2.065168448030928e-08,  2.591821708442330e+04
- 2.030615002138700e+06,  2.030615002138702e+06,  2.033750916695687e-08,  2.468174830924870e+04
--2.374423038913250e+05, -2.374423038913259e+05,  9.522984388019029e-08, -2.886109766416170e+03
- 6.432185312889291e+05,  6.432185312889281e+05,  2.623773234135730e-08,  7.818175726058230e+03
- 1.435243993088210e+06,  1.435243993088208e+06,  3.530173492054727e-08,  1.744511304600680e+04
- 9.487534028084230e+04,  9.487534028084150e+04,  6.018658340475335e-07,  1.153155743464770e+03
- 6.736173787551570e+05,  6.736173787551571e+05,  9.033100765464530e-08,  8.187668644247590e+03
- 7.504118855811940e+05,  7.504118855811938e+05,  2.948893254754258e-08,  9.121093059728930e+03
- 4.478266872045060e+05,  4.478266872045055e+05,  3.922258820073917e-06,  5.443220467314520e+03
- 3.368902807230350e+05,  3.368902807230345e+05,  3.974555365702647e-08,  4.094806958560790e+03
- 1.550858369653510e+06,  1.550858369653510e+06,  1.829091447530946e-08,  1.885038650015730e+04
--9.149156549019570e+05, -9.149156549019564e+05,  1.052237002617895e-07,  6.426471507941080e+03
--5.569920711895199e+05, -5.569920711895204e+05,  3.136787828168589e-08,  3.912361150617040e+03
--2.235342648617320e+06, -2.235342648617310e+06,  2.123583872053065e-06,  1.570135400378780e+04
--9.888966566957420e+04, -9.888966566957417e+04,  2.468066756691430e-08,  6.945793256111270e+02
--9.640203794275450e+05, -9.640203794275458e+05,  9.425436737159603e-07,  6.771390653667730e+03
--8.906491793373150e+05, -8.906491793373151e+05,  7.320119248823469e-08,  6.256020047529450e+03
--7.494023253802230e+05, -7.494023253802236e+05,  1.997001442624077e-08,  5.263880399184100e+03
--1.588621543140250e+06, -1.588621543140249e+06,  1.563477342062043e-08,  1.115868538942120e+04
diff --git a/tests/test_descriptor_identifier/model_files/train.dat b/tests/test_descriptor_identifier/model_files/train.dat
deleted file mode 100644
index 88812c21..00000000
--- a/tests/test_descriptor_identifier/model_files/train.dat
+++ /dev/null
@@ -1,95 +0,0 @@
-# c0 + a0 * [([(B) / (A)]) / (A)] + a1 * C
-# RMSE: 2.85813215504592e-09; Max AE: 7.91624188423157e-09
-# Coefficients
-# Task;    a0                      a1                      c0
-# 0,      -7.215479313269452e+00,  8.227180000000008e+01,  3.141589999289981e+00, 
-# 1,       9.914521078639838e+01, -1.423659000000000e+02, -5.254859999998278e+00, 
-# Feature Rung, Units, and Expressions
-# 0,  2, 1 / m^2 * s,                                      1|0|div|0|div,[([(B) / (A)]) / (A)]
-# 1,  0, ,                                                 2,C
-# Number of Samples Per Task
-# Task;   n_mats_train            
-# 0,      48                    
-# 1,      32                    
-
-#Property Value          Property Value (EST)    Feature 0 Value         Feature 1 Value        
- 1.031303343104370e+06,  1.031303343104372e+06,  5.087963053254981e-08,  1.253528185252710e+04
- 2.071791819726890e+05,  2.071791819726894e+05,  6.929206698600503e-07,  2.518190198679130e+03
- 1.431871750857350e+06,  1.431871750857357e+06,  2.697060006837512e-06,  1.740412400466280e+04
- 1.849456859032140e+06,  1.849456859032137e+06,  1.626134513838345e-08,  2.247980131031840e+04
- 4.163774736839510e+05,  4.163774736839510e+05,  2.378687110561754e-08,  5.060960524677020e+03
- 1.834852243834940e+06,  1.834852243834940e+06,  2.057971012989485e-03,  2.230228483021140e+04
- 4.182049069917290e+05,  4.182049069917285e+05,  5.342688466073691e-08,  5.083172671585090e+03
- 1.600764653367910e+06,  1.600764653367909e+06,  1.574249399443082e-07,  1.945698905067160e+04
- 3.895694030199360e+05,  3.895694030199358e+05,  2.998984401431883e-07,  4.735112899342180e+03
- 1.097874595585220e+06,  1.097874595585217e+06,  8.407905381849565e-08,  1.334444431744320e+04
- 8.965124261335440e+05,  8.965124261335423e+05,  6.976681145109002e-08,  1.089692074980790e+04
- 1.247533441655420e+04,  1.247533441655352e+04,  4.240950437796523e-07,  1.515974225629470e+02
- 8.880982463097370e+05,  8.880982463097401e+05,  5.111909378807165e-08,  1.079464779815330e+04
- 1.636015660236120e+06,  1.636015660236114e+06,  1.749950739140699e-05,  1.988545915820950e+04
- 1.523022284718580e+06,  1.523022284718574e+06,  7.444571741652756e-09,  1.851204353288280e+04
--1.806691656141680e+04, -1.806691656141755e+04,  5.124135740795190e-09, -2.196385414124870e+02
- 7.535749948523890e+05,  7.535749948523891e+05,  2.530373185333834e-04,  9.159540147270080e+03
- 4.846796705070550e+05,  4.846796705070552e+05,  1.516116594933527e-08,  5.891162329220520e+03
- 1.418886295186410e+06,  1.418886295186410e+06,  5.602337173621954e-08,  1.724628795768190e+04
- 7.468643665926130e+05,  7.468643665926136e+05,  7.350449259326487e-08,  9.077973558414231e+03
- 8.266764695919290e+05,  8.266764695919300e+05,  1.227812638317768e-08,  1.004807635182430e+04
- 9.048709052557090e+05,  9.048709052557108e+05,  9.684184878175625e-06,  1.099851666957070e+04
- 1.081673040470480e+06,  1.081673040470486e+06,  3.095754649186858e-08,  1.314751711863250e+04
- 1.602766311029420e+06,  1.602766311029422e+06,  1.942687305315588e-07,  1.948131886552650e+04
- 8.482960813663350e+05,  8.482960813663341e+05,  1.994254893455140e-07,  1.031085912521390e+04
- 8.819870504835790e+05,  8.819870504835779e+05,  5.815040616166881e-08,  1.072036723268480e+04
- 1.384967839241260e+06,  1.384967839241261e+06,  4.585957732720945e-08,  1.683401478576610e+04
- 1.482822441554200e+06,  1.482822441554193e+06,  2.365805697619391e-08,  1.802342114751790e+04
- 1.159462504579730e+06,  1.159462504579731e+06,  3.955375090312369e-09,  1.409303507386200e+04
- 1.385445915520980e+06,  1.385445915520977e+06,  7.215270937489370e-09,  1.683982572316430e+04
- 1.078840903789160e+06,  1.078840903789154e+06,  7.719292312078010e-08,  1.311309296988410e+04
- 3.220723182574270e+05,  3.220723182574273e+05,  6.725877658255970e-08,  3.914697097522030e+03
- 1.547503571926120e+06,  1.547503571926123e+06,  3.349707138495565e-07,  1.880960949363620e+04
- 1.174714507507300e+06,  1.174714507507300e+06,  4.195999687630714e-08,  1.427842062429170e+04
- 3.621603641205080e+05,  3.621603641205073e+05,  3.428967326845969e-09,  4.401960605341470e+03
- 8.823510522257930e+05,  8.823510522257955e+05,  3.039762576167721e-08,  1.072479161311670e+04
- 2.240093900663180e+04,  2.240093900663106e+04,  2.331427770314304e-07,  2.722414900161900e+02
- 1.781136797772570e+06,  1.781136797772572e+06,  7.253817683831682e-08,  2.164938236653500e+04
- 6.214166082804410e+05,  6.214166082804406e+05,  6.796324227463879e-08,  7.553176990061370e+03
- 1.525062498013260e+06,  1.525062498013265e+06,  4.374201765323614e-08,  1.853684198502500e+04
- 6.790682085352920e+05,  6.790682085352915e+05,  3.117208882706876e-08,  8.253922570619779e+03
- 4.680791492170390e+05,  4.680791492170391e+05,  7.178175366749161e-09,  5.689385763130150e+03
- 1.404060535190450e+06,  1.404060535190449e+06,  3.310728859521714e-07,  1.706608331898460e+04
- 1.827150953904310e+06,  1.827150953904310e+06,  5.763294101941985e-08,  2.220867675576230e+04
--3.339445722172610e+04, -3.339445722172679e+04,  1.705876199821157e-07, -4.059422403605510e+02
- 1.443453595965310e+06,  1.443453595965309e+06,  3.511555629631250e-08,  1.754489939901110e+04
- 3.296237786603260e+05,  3.296237786603257e+05,  9.525996048208856e-07,  4.006483838656740e+03
- 5.963622714767930e+05,  5.963622714767933e+05,  1.373023008233051e-06,  7.248645707237480e+03
--1.747903770607640e+06, -1.747903770607635e+06,  3.089720438913970e-07,  1.227750827816400e+04
--6.020310027164250e+05, -6.020310027164243e+05,  7.913240374082342e-08,  4.228721539808830e+03
--1.293976980851750e+06, -1.293976980851751e+06,  1.242384899016401e-08,  9.089056620953350e+03
--2.294033396379730e+06, -2.294033396379731e+06,  2.185232400397477e-08,  1.611360685053020e+04
--1.213629166754780e+06, -1.213629166754776e+06,  2.705431178856616e-08,  8.524681204540260e+03
- 2.928090050997690e+05,  2.928090050997689e+05,  3.294636654535805e-08, -2.056772443095590e+03
--3.732932410426960e+06, -3.732932410426955e+06,  1.584814746579403e-08,  2.622065505551910e+04
--2.522104747768270e+05, -2.522104747768271e+05,  2.866436191254619e-07,  1.771528293961170e+03
--1.370204376681970e+06, -1.370204376681969e+06,  7.268458266839257e-08,  9.624489585140651e+03
--3.104420517264010e+06, -3.104420517264016e+06,  4.422776414490086e-08,  2.180589075339250e+04
--2.388277988221880e+06, -2.388277988221886e+06,  7.983548813793096e-07,  1.677559537389950e+04
--7.249776584633330e+05, -7.249776584633322e+05,  1.789320551006325e-08,  5.092317778380260e+03
--1.794477913928580e+06, -1.794477913928583e+06,  2.085418494861554e-06,  1.260465223255950e+04
--2.232131158999780e+05, -2.232131158999779e+05,  4.613095030252133e-08,  1.567846380661040e+03
- 4.230460058498780e+05,  4.230460058498789e+05,  1.927528616754292e-07, -2.971577187309380e+03
--3.047818615882230e+06, -3.047818615882231e+06,  1.560034821681974e-06,  2.140831028481470e+04
--2.409342601537700e+06, -2.409342601537696e+06,  3.854824353113469e-08,  1.692355646037090e+04
--7.428145854664950e+05, -7.428145854664948e+05,  8.907502601228917e-09,  5.217607099785680e+03
--5.715794306470060e+05, -5.715794306470063e+05,  4.249579387725456e-08,  4.014825009298010e+03
--2.195610268663400e+06, -2.195610268663392e+06,  1.508229600050627e-09,  1.542226764838730e+04
--2.102214669944520e+06, -2.102214669944520e+06,  2.461775162911713e-05,  1.476624260110920e+04
--2.207063832186290e+06, -2.207063832186286e+06,  1.440749268450898e-07,  1.550271924204160e+04
--2.494089085594850e+06, -2.494089085594850e+06,  7.881977896794234e-04,  1.751882936068880e+04
--7.584800943859300e+05, -7.584800943859298e+05,  4.795308237612606e-08,  5.327644046296790e+03
--2.025827981910110e+06, -2.025827981910112e+06,  5.949481923044429e-06,  1.422969073099650e+04
--2.354065357355290e+06, -2.354065357355289e+06,  1.863267201710832e-08,  1.653528058683390e+04
--3.175419951886790e+06, -3.175419951886786e+06,  1.516968856813277e-08,  2.230460171310890e+04
--2.152215923304610e+06, -2.152215923304608e+06,  3.558510316535865e-10,  1.511745908567040e+04
--5.471576306240950e+05, -5.471576306240940e+05,  2.732675013220533e-05,  3.843282545001370e+03
--2.672876703571220e+06, -2.672876703571216e+06,  9.276119341407247e-09,  1.877466056627420e+04
--2.080211959730500e+06, -2.080211959730499e+06,  5.366932486308832e-08,  1.461169215996120e+04
--2.578377052468330e+06, -2.578377052468331e+06,  1.245275388419069e-05,  1.811088047659560e+04
-- 
GitLab