diff --git a/nn_regression.ipynb b/nn_regression.ipynb
index 241a967031f8480a1b0678b3ba1d593ce9a883fe..65fe3da5a49a5fc1ab11bc4a799af1375167e2fa 100644
--- a/nn_regression.ipynb
+++ b/nn_regression.ipynb
@@ -172,7 +172,7 @@
     "\n",
     "The ReLU activation function is most frequently used. Non-linear functions are essential to increase the space of possible (complex) functions that the model can learn. If  no activation function would be used, i.e., the identity - also called *linear activation function*- the class of possible functions that the model can represent would be drastically reduced.\n",
     "\n",
-    "![activation_functions.png](./assets/Neural_network_regression/activation_functions.png)"
+    "![activation_functions.png](./assets/nn_regression/activation_functions.png)"
    ]
   },
   {