summaryrefslogtreecommitdiff
path: root/main.tex
diff options
context:
space:
mode:
Diffstat (limited to 'main.tex')
-rwxr-xr-xmain.tex160
1 files changed, 140 insertions, 20 deletions
diff --git a/main.tex b/main.tex
index e1fbc9e..35354b9 100755
--- a/main.tex
+++ b/main.tex
@@ -770,8 +770,8 @@
\end{figure}
\end{frame}
-\begin{frame}{Summary and perspectives for future work}
- In this talk, we presented
+\begin{frame}{Summary of part I and perspectives for future work}
+ In this part, we presented
\begin{itemize}
\item a variance reduction approach for efficiently estimating the mobility;
\item numerical results showing that the scaling of the mobility is \emph{not universal}.
@@ -782,11 +782,6 @@
\item Use alternative methods (PINNs, Gaussian processes) to solve the Poisson equation;
\item Improve and study variance reduction approaches for other transport coefficients.
\end{itemize}
-
- \vspace{1cm}
- \begin{center}
- Thank you for your attention!
- \end{center}
\end{frame}
\section{Optimal importance sampling for overdamped Langevin dynamics}
@@ -1020,7 +1015,7 @@
\label{eq:lower_bound_asymvar}
\sigma^2_f[U] \geq \frac{2}{Z^2} \inf_{A \in \real} \bigg(\int_{\torus} \bigl\lvert F(x) + A \bigr\rvert \d x \bigg)^2.
\end{equation}
- where
+ where
\[
F(x) := \int_0^x \bigl( f(\xi)-I \bigr) \e^{-V(\xi)}\d \xi.
\]
@@ -1061,27 +1056,152 @@
\begin{frame}
{Finding the optimal $U$ in the multidimensional setting}
- In \emph{dimension one}, it holds that
- \begin{equation}
- \label{eq:lower_bound_asymvar}
- \sigma^2_f[U] \geq \frac{2}{Z^2} \inf_{A \in \real} \bigg(\int_{\torus} \bigl\lvert F(x) + A \bigr\rvert \d x \bigg)^2.
- \end{equation}
- where
+
+ \begin{proposition}
+ [Functional derivative of the asymptotic variance]
+ Let $\phi_U$ denote the solution to
+ \begin{equation}
+ \label{eq:poisson}
+ -\mathcal L_U \phi_{U} = (f- I) \e^U.
+ \end{equation}
+ Under appropriate conditions,
+ it holds for all $\delta U \in C^{\infty}(\torus^d)$ that
+ \begin{align}
+ \notag
+ \frac{1}{2} \d \sigma^2_f[U] \cdot \delta U
+ &:= \lim_{\varepsilon \to 0} \frac{1}{\varepsilon} \bigl(\sigma^2_f[U + \varepsilon \delta U] - \sigma^2_f[U]\bigr) \\
+ \label{eq:funcder}
+ &= \frac{Z_U^2}{Z^2} \int_{\torus^d} \delta U \bigg( \abs*{\nabla{\phi_{U}}}^2 - \int_{\torus^d} \abs*{\nabla {\phi_{U}}}^2 \, \d \mu_{U} \bigg) \, \d \mu_{U}.
+ \end{align}
+ \end{proposition}
+ \textbf{Steepest descent approach}:
+ \begin{itemize}
+ \item Solve the Poisson equation~\eqref{eq:poisson} numerically;
+ \item Construct an ascent direction $G$ for $\sigma^2_f$ using~\eqref{eq:funcder}, e.g.\ $\delta U = \abs*{\nabla{\phi_{U}}}^2$;
+ \item Perform a step in this direction: $U \leftarrow U - \eta G$;
+ \item Repeat until convergence.
+ \end{itemize}
+\end{frame}
+
+\begin{frame}
+ {No smooth minimizers}
+ \begin{corollary}
+ [No smooth minimizer]
+ \label{corollary:no_smooth_minimizer}
+ Unless~$f$ is constant,
+ there is no perturbation potential~$U \in C^\infty(\torus^n)$ that is a critical point of $\sigma^2_f[U]$.
+ \end{corollary}
+ \textbf{Proof.}
+ Assume by contradiction that $U_*$ is smooth critical point.
+ Then
\[
- F(x) := \int_0^x \bigl( f(\xi)-I \bigr) \e^{-V(\xi)}\d \xi.
+ 0 = \frac{1}{2} \d \sigma^2_f[U_*] \cdot \delta U
+ = \frac{Z_U^2}{Z^2} \int_{\torus^d} \delta U \bigg( \abs*{\nabla{\phi_{U_*}}}^2 - \int_{\torus^d} \abs*{\nabla {\phi_{U_*}}}^2 \, \d \mu_{U_*} \bigg) \, \d \mu_{U_*},
\]
- This inequality~\eqref{eq:lower_bound_asymvar} is an equality for
+ for all $\delta U \in C^{\infty}(\torus^d)$.
+ \begin{itemize}
+ \item Therefore, it must hold that $\abs*{\nabla {\phi_{U}}}^2 = C$ is constant.
+ \item Since $\phi_U$ is a smooth function, there is $x \in \torus^d$ such that $\nabla \phi_U(x) = 0$.
+ \item Consequently $C = 0$ and so $\nabla \phi_U = 0$: \alert{contradiction} because then $\mathcal L_{U_*} \phi_U = 0$.
+ \end{itemize}
+
+ \vspace{.5cm}
+ $\rightsquigarrow$ The optimal perturbation potential is \alert{not convenient} in practice\dots
+\end{frame}
+
+\begin{frame}
+ {Example (1/2)}
+ Assume that $V = 0$ and $f(x) = \sin(x_1) + \sin(x_2)$.
+ \begin{figure}[ht]
+ \centering
+ \includegraphics[width=0.49\linewidth]{figures/driftopt/2d_optimal.pdf}
+ \includegraphics[width=0.49\linewidth]{figures/driftopt/2d_optimal_poisson.pdf}
+ \caption{%
+ Optimal total potential (left)
+ together with the solution to the associated Poisson equation (right).
+ }
+ \label{fig:2d_first_example}
+ \end{figure}
+ $\rightsquigarrow$ The domain is again divided into subdomains that suffice for estimating~$I$.
+\end{frame}
+
+\begin{frame}
+ {Example (2/2): multimodal target $\e^{-V}$}
+ Assume that $V(x) = 2\cos(x_1) - \cos(x_2)$ and~$f(x) = \sin(x_1)$.
+ \begin{figure}[ht]
+ \centering
+ \includegraphics[width=0.49\linewidth]{figures/driftopt/2d_optimal_multimodal.pdf}
+ \includegraphics[width=0.49\linewidth]{figures/driftopt/2d_optimal_multimodal_heatmap.pdf}
+ \label{fig:2d_metastable}
+ \end{figure}
+ \emph{Variance reduction} by a factor $\approx 6$!
+\end{frame}
+
+\subsection{Minimizing the asymptotic variance for a class of observables}
+\begin{frame}
+ {Alternative: minimizing the expected variance over \textcolor{yellow}{a class of observables}}
+ Assume that the observables are well described by a Gaussian random field
\[
- U(x) = U_*(x) = - V(x) -\ln\abs*{F(x) + A_*},
+ f = \sum_{j=1}^{J} \sqrt{\lambda_j} u_j f_j,
+ \qquad u_j \sim \mathcal N(0, 1),
+ \qquad \lambda_j \in (0, \infty).
\]
- where $A_*$ is the constant achieving the infimum in~\eqref{eq:lower_bound_asymvar}.
+ \textbf{Question:} can we find~$U$ such that $\sigma^2[U] := \expect \bigl( \sigma^2_f[U] \bigr)$ is minimized?
\begin{itemize}
- \item The potential $U_*$ is generally \alert{singular}: impractical for numerics\dots
- \item The lower bound in~\eqref{eq:lower_bound_asymvar} can be approached by a smooth~$U$.
+ \item It holds that
+ \[
+ \sigma^2[U] = \sum_{j=1}^{J} \lambda_j \sigma^2_{f_j}.
+ \]
+
+ \item
+ The functional derivative of $\sigma^2[U]$ is given by
+ \[
+ \frac{1}{2} \d\sigma^2[U] \cdot \delta U
+ = \frac{Z_U^2}{Z^2} \int_{\torus^d} \left( \delta U - \int_{\torus^d} \delta U \, \d \mu_U \right) \left( \sum_{j=1}^{J} \lambda_j \abs*{\nabla{\phi_j}}^2 \right) \, \d \mu_{U}.
+ \]
+
+ \item
+ The steepest descent approach can be employed in this case too!
\end{itemize}
\end{frame}
+\begin{frame}
+ {Example}
+ Here $V(x) = 2 \cos(2 x_1) - \cos(x_2)$ and $f \sim \mathcal N\bigl(0, (\laplacian + \mathcal I)^{-1}\bigr)$.
+ \begin{figure}[ht]
+ \centering
+ \includegraphics[width=0.49\linewidth]{figures/driftopt/2d_initial_class_metastable.pdf}
+ \includegraphics[width=0.49\linewidth]{figures/driftopt/2d_optimal_class_metastable.pdf}
+ \caption{%
+ Potential~$V$ (left) and optimal potential~$V+U$ (right).
+ }
+ \end{figure}
+\end{frame}
+
+\begin{frame}
+ {Summary of part II and perspectives for future work}
+ In this part,
+ \begin{itemize}
+ \item We studied an importance sampling approach for the overdamped Langevin dynamics.
+ \item We proposed an approach for calculating the optimal perturbation potential.
+ \end{itemize}
+
+ \textbf{Perspectives}:
+ \begin{itemize}
+ \item Solving the Poisson equation accurately is not possible in high dimension.
+ \item Application to high-dimensional systems:
+ \[
+ U(x) = U\bigl(\xi(x)\bigr), \qquad \xi \text{ reaction coordinate}.
+ \]
+ \end{itemize}
+ \vspace{1cm}
+ \begin{center}
+ \Large
+ \emph{Thank you for your attention!}
+ \end{center}
+\end{frame}
+
\appendix
\begin{frame}[noframenumbering,plain]