diff --git a/doc/images/gradientDescent1D01.png b/doc/images/gradientDescent1D01.png
index d7061d1225b481c9387727ee41eba3ee5e5fe226..d80bd7ad70a7bf073b664cbe92341d39a116ed4f 100755
Binary files a/doc/images/gradientDescent1D01.png and b/doc/images/gradientDescent1D01.png differ
diff --git a/doc/images/intrinsicGeom_vs_extrinsicGeom01.jpg b/doc/images/intrinsicGeom_vs_extrinsicGeom01.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..317a53d258c6d514cfbf6bf69813d9f4d6b50f01
Binary files /dev/null and b/doc/images/intrinsicGeom_vs_extrinsicGeom01.jpg differ
diff --git a/doc/images/linesearch01.png b/doc/images/linesearch01.png
new file mode 100755
index 0000000000000000000000000000000000000000..fdf393dac704c49eaad214a8fda6f0d98f4d0eb9
Binary files /dev/null and b/doc/images/linesearch01.png differ
diff --git a/doc/rcfs.pdf b/doc/rcfs.pdf
index 94dba56647bd69e46037b70fc7f2f7656d0c5f03..6b5784de458bf1cd3ea1f5bb7cc255dcb2c33552 100644
Binary files a/doc/rcfs.pdf and b/doc/rcfs.pdf differ
diff --git a/doc/rcfs.tex b/doc/rcfs.tex
index f3005c5ab0b180f4adad7b03608b93ecd02acb20..abda8708533461647d05bc376ed73787f7e3e2dc 100644
--- a/doc/rcfs.tex
+++ b/doc/rcfs.tex
@@ -122,7 +122,7 @@ innerleftmargin=0.3em,innerrightmargin=0.3em,innertopmargin=0.3em,innerbottommar
 %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
 \section{Introduction}
 
-This cookbook presents several learning and optimal control recipes for robotics (essentially for robot manipulators), complemented by simple toy problems that can be easily coded. It accompanies \textbf{Robotics Codes From Scratch (RCFS)}, a website containing interactive sandbox examples and exercises, together with a set of standalone source code examples gathered in a git repository, which can be accessed at:
+This cookbook presents learning and optimal control recipes for robotics (essentially for robot manipulators), complemented by simple toy problems that can be easily coded. It accompanies \textbf{Robotics Codes From Scratch (RCFS)}, a website containing interactive sandbox examples and exercises, together with a set of standalone source code examples gathered in a git repository, which can be accessed at:
 \begin{center}
 \url{https://rcfs.ch}
 %\url{https://gitlab.idiap.ch/rli/robotics-codes-from-scratch}
@@ -137,6 +137,15 @@ Each section in this document lists the corresponding source codes in Python and
 %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
 \section{Quadratic costs minimization as a product of Gaussians (PoG)}\label{sec:PoG}
 
+\begin{figure}[ht]
+\centering
+\includegraphics[width=.8\columnwidth]{images/PoG01.png}
+\caption{\footnotesize
+Quadratic costs minimization as a product of Gaussians (PoG).
+}
+\label{fig:PoG}
+\end{figure}
+
 The solution of a quadratic cost function can be viewed probabilistically as corresponding to a Gaussian distribution. Indeed, given a precision matrix $\bm{W}$, the quadratic cost
 \begin{align}
 	c(\bm{x}) &= (\bm{x}-\bm{\mu})^\trsp \bm{W} (\bm{x}-\bm{\mu}),\\
@@ -177,15 +186,6 @@ so that
 = \bm{\Sigma}_2 {\left(\bm{\Sigma}_1+\bm{\Sigma}_2\right)}^{-1} \bm{\mu}_1 + \bm{\Sigma}_1 {\left(\bm{\Sigma}_1+\bm{\Sigma}_2\right)}^{-1} \bm{\mu}_2.
 \end{align*}
 
-\begin{figure}
-\centering
-\includegraphics[width=.8\columnwidth]{images/PoG01.png}
-\caption{\footnotesize
-Quadratic costs minimization as a product of Gaussians (PoG).
-}
-\label{fig:PoG}
-\end{figure}
-
 Figure \ref{fig:PoG} shows an illustration for 2 Gaussians in a 2-dimensional space. It also shows that when one of the Gaussians is singular, the product corresponds to a projection operation, that we for example find in nullspace projections to solve prioritized tasks in robotics. 
 
 %The solution of a cost composed of quadratic terms of the form 
@@ -206,13 +206,14 @@ Figure \ref{fig:PoG} shows an illustration for 2 Gaussians in a 2-dimensional sp
 %Solving an objective function composed of quadratic terms as a product of Gaussians offers a probabilistic perspective by representing the solution in the form of a distribution. 
 
 \newpage
+
 %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
 \section{Cost function minimization problems}\label{sec:Newton}
 
-\begin{wrapfigure}{r}{.28\textwidth}
+\begin{wrapfigure}{r}{.24\textwidth}
 %\vspace{-20pt}
 \centering
-\includegraphics[width=.26\textwidth]{images/NewtonMethod1D_problem01.png}
+\includegraphics[width=.23\textwidth]{images/NewtonMethod1D_problem01.png}
 \caption{\footnotesize
 Problem formulation.
 }
@@ -230,6 +231,16 @@ Now that we have this intuition, we can move to a more formal problem formulatio
 %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
 \subsection{Gradient descent} 
 
+\begin{wrapfigure}{r}{.42\textwidth}
+\centering
+\includegraphics[width=.36\textwidth]{images/gradientDescent1D01.png}
+\caption{\footnotesize
+Gradient descent for minimization, starting from an initial estimate $x_1$ and converging to a local minimum (red point) after 8 iterations.
+}
+\label{fig:gradientDescent}
+\vspace{30pt}
+\end{wrapfigure}
+
 \begin{algorithm}
 \caption{Backtracking line search method with parameter $\alpha_{\min}$ (presented here for decision variable $\bm{x}$)}
 \label{alg:linesearch}
@@ -248,26 +259,28 @@ The first-order Taylor expansion around the point $x_k$ can be expressed as
 \end{equation*}
 where $c'(x_k)$ is the derivative of $c(x_k)$ at point $x_k$.
 
-\begin{wrapfigure}{r}{.42\textwidth}
-\centering
-\includegraphics[width=.36\textwidth]{images/gradientDescent1D01.png}
-\caption{\footnotesize
-Gradient descent for minimization, starting from an initial estimate $x_1$ and converging to a local minimum (red point) after 8 iterations.
-}
-\label{fig:gradientDescent}
-\end{wrapfigure}
-
 By starting from a point $x_k$ at each step $k$, we are interested in applying a correction $\Delta x_k$ that would decrease the cost $c(x_k)$. 
 If the cost function follows a linear trend at point $x_k$ with a slope defined by its gradient $c'(x_k)$, one direction would increase the cost while the other would decrease it. Thus, by applying a correction $\Delta x_k = -\alpha c'(x_k)$,  where $\alpha$ is a positive scaling factor, we go down the slope estimated at $x_k$. 
 
 The scaling factor $\alpha$ can be either constant or variable. If $\alpha$ is too large, there is the risk that our local linear approximation is not valid anymore when we move far away from $x_k$. If it is too small, the iterative algorithm will require many iteration steps to converge to a local minimum of the cost function. 
 
-In practice, a simple backtracking line search procedure can be considered with Algorithm \ref{alg:linesearch}, by considering a small value for $\alpha_{\min}$. For more elaborated methods, see Ch.~3 of \cite{Nocedal06}. 
+In practice, a simple backtracking line search procedure can be considered with Algorithm \ref{alg:linesearch}, by considering a small value for $\alpha_{\min}$, see Figure \ref{fig:linesearch}. For more elaborated methods, see Ch.~3 of \cite{Nocedal06}. 
 
+\newpage
 
 %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
 \subsubsection*{Multidimensional case}
 
+\begin{wrapfigure}{r}{.24\textwidth}
+\centering
+\includegraphics[width=.22\textwidth]{images/linesearch01.png}
+\caption{\footnotesize
+backtracking line search to scale the update vector $\Delta x_k$ until the update decreases the cost. In this example, by starting with $\alpha=1$ and by iteratively dividing $\alpha$ by two, the procedure provides a scaling factor $\alpha=0.25$.    
+}
+\label{fig:linesearch}
+\vspace{20pt}
+\end{wrapfigure}
+
 For functions that depend on multiple variables stored as multidimensional vectors $\bm{x}$, the cost function $c(\bm{x})$ can similarly be approximated by a first-order Taylor expansion around the point $\bm{x}_k$ with
 %\begin{equation*}
 %	c(\bm{x}_k\!+\!\Delta\bm{x}_k) \approx c(\bm{x}_k) + \Delta\bm{x}_k^\trsp \, \frac{\partial c}{\partial\bm{x}}\Big|_{\bm{x}_k}, 
@@ -563,6 +576,7 @@ minimizes the constrained cost. The first part of this augmented state then give
 
 
 \newpage
+
 %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
 \section{Forward kinematics (FK) for a planar robot manipulator}\label{sec:FK}
 \begin{flushright}
@@ -3943,7 +3957,7 @@ A smooth $d$-dimensional manifold $\mathcal{M}$ is a topological space that loca
 
 \begin{figure}
 \centering
-\includegraphics[width=\textwidth]{images/manifold-mappingAndTransportFcts01.png}
+\includegraphics[width=.7\columnwidth]{images/manifold-mappingAndTransportFcts01.png}
 \caption{\footnotesize 
 Applications in robotics using Riemannian manifolds rely on two well-known principles of Riemannian geometry: exponential/logarithmic mapping (\emph{left}) and parallel transport (\emph{right}), which are depicted here on a $\mathcal{S}^2$ manifold embedded in $\mathbb{R}^3$. 
 \emph{Left:} Bidirectional mappings between tangent space and manifold. \emph{Right:} Parallel transport of a vector along a geodesic (see main text for details).
@@ -4097,6 +4111,15 @@ The parallel transport of $\bm{V}\in\mathcal{T}_{\bm{X}}\mathcal{S}_{++}^d$ to $
 %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
 \subsection{Non-homogeneous manifolds in robotics}
 
+\begin{figure*}
+\centering
+\includegraphics[width=.7\columnwidth]{images/intrinsicGeom_vs_extrinsicGeom01.jpg}
+\caption{\footnotesize 
+Intrinsic geometry defined by a Riemannian metric, with two examples of corresponding extrinsic geometries (here, embedded in a 3D space).   
+} 
+\label{fig:intrinsicGeom_vs_extrinsicGeom}
+\end{figure*}
+
 \begin{figure*}
 \centering
 \includegraphics[height=52mm]{images/kinEnergyGeodesics01.png}
@@ -4112,9 +4135,9 @@ The movement in dashed lines show the baseline movements that would be produced
 \end{figure*}
 %\emph{Left:} Metric field constructed from a signed distance field, which allow to generate paths that naturally curve around obstacles when the obstacles are close. \emph{Right:} Metric field provided by inertia matrices to generate movements from one joint configuration to another while minimizing kinetic energy (in solid lines). The movement in dashed lines show the baseline movements that would be produced by ignoring inertia (corresponding to linear interpolation between the two robot poses).
 
-Manifolds with nonconstant curvature can also be employed, such as spaces endowed with a metric, characterized by a weighting matrix used to compute distances. Many problems in robotics can be formulated with such a smoothly varying matrix $\bm{M}$ that measures the distance between two points $\bm{x}_1$ and $\bm{x}_2$ as a quadratic error term $c=(\bm{x}_1-\bm{x}_2)^\trsp\bm{M}(\bm{x}_1-\bm{x}_2)$, forming a Riemannian metric that describes the underlying manifold (with non-homogeneous curvature). This weighting matrix can for example represent levels of kinetic energy or stiffness gains to model varying impedance behaviors. Computation is often more costly for these manifolds with nonconstant curvature, because it typically requires iterative algorithms instead of the direct analytic expressions typically provided by homogeneous manifolds. 
+Manifolds with nonconstant curvature can also be employed, such as spaces endowed with a metric, characterized by a weighting matrix used to compute distances. Many problems in robotics can be formulated with such a smoothly varying matrix $\bm{G}(\bm{x})$ that can for example be used to evaluate displacements $\Delta\bm{x}$ as a quadratic error term $c(\Delta\bm{x})=\Delta\bm{x}^\trsp\bm{G}(\bm{x})\Delta\bm{x}$, forming a Riemannian metric that describes the underlying manifold (with non-homogeneous curvature). This weighting matrix can for example represent levels of kinetic energy or stiffness gains to model varying impedance behaviors. Computation is often more costly for these manifolds with nonconstant curvature, because it typically requires iterative algorithms instead of the direct analytic expressions typically provided by homogeneous manifolds. 
 
-Figure \ref{fig:nonhomogeneousManifolds} presents examples exploiting non-homogeneous Riemannian manifolds.
+Figures \ref{fig:intrinsicGeom_vs_extrinsicGeom} and \ref{fig:nonhomogeneousManifolds} presents examples exploiting non-homogeneous Riemannian manifolds.
 
 
 \newpage