Changeset - be633a788363
[Not reviewed]
0 1 0
András Gilyén - 8 years ago 2017-09-14 00:25:36
gilyen@cwi.nl
General distance-degree lemma
1 file changed with 2 insertions and 2 deletions:
0 comments (0 inline, 0 general)
main.tex
Show inline comments
 
@@ -357,100 +357,100 @@ The following Lemma says that if a set $S$ splits the graph in two, then those t
 
    \end{align*}
 
    and similar for $\xi^{G\setminus X}$.
 
    Instead of choosing a step in $Y,X,X,Y,\cdots$ we could have chosen other orderings. The following diagram illustrates all possible interleavings, and the red line corresponds to the particular interleaving $Y,X,X,Y$ in the example above.
 
    \begin{center}
 
        \includegraphics{diagram_paths3.pdf}
 
    \end{center}
 
    For the labels shown within the grid, define $p_{ij} = \frac{z^X_i}{z^X_i + z^Y_j}$.
 
    The probability of this particular interleaving $\xi^G$ is given by
 
    \begin{align*}
 
        \P^{G}_S(\xi^{G})
 
        &= (1-p)^{|b^X\; b^Y|} p^{|X\cup Y|-|b^X\;b^Y|} \quad
 
        \frac{1}{z^X_1+z^Y_1} \P(r^Y_1) \cdot
 
        \frac{1}{z^X_1+z^Y_2} \P(r^X_1) \cdots \\
 
        &= (1-p)^{|b^X|} p^{|X|-|b^X|} \cdot (1-p)^{|b^Y|} p^{|Y|-|b^Y|} \\
 
        &\qquad \cdot
 
        \frac{z^Y_1}{z^X_1+z^Y_1} \frac{1}{z^Y_1} \P(r^Y_1) \;
 
        \frac{z^X_1}{z^X_1+z^Y_2} \frac{1}{z^X_1} \P(r^X_1) \;
 
        \frac{z^X_2}{z^X_2+z^Y_2} \frac{1}{z^X_2} \P(r^X_2)
 
        \cdots \tag{rewrite fractions}\\
 
        &=
 
        \frac{z^Y_1}{z^X_1+z^Y_1} 
 
        \frac{z^X_1}{z^X_1+z^Y_2} 
 
        \frac{z^X_2}{z^X_2+z^Y_2} 
 
        \cdots
 
        \P^{G\setminus Y}_S(\xi^{G\setminus Y}) \; \P^{G\setminus X}_S(\xi^{G\setminus X})
 
        \tag{definition} \\
 
        &= (1-p_{1,1}) \; p_{1,2} \; p_{2,2} \; (1-p_{3,2}) \cdots \P^{G\setminus Y}_S(\xi^{G\setminus Y}) \; \P^{G\setminus X}_S(\xi^{G\setminus X})
 
        \tag{definition of $p_{i,j}$} \\
 
        &= \P(\text{path in grid}) \; \P^{G\setminus Y}_S(\xi^{G\setminus Y}) \; \P^{G\setminus X}_S(\xi^{G\setminus X})
 
    \end{align*}
 
    In the grid we see that at every point the probabilities sum to 1, and we always reach the end, so we know the sum of all paths in the grid is 1. This proves the required equality.
 
    We obtain
 
    \begin{align*}
 
        \P^{G}_S(\NZ{S} \cap A^X \cap A^Y)
 
        &= \sum_{\xi^G \in \NZ{S}\cap A^X \cap A^Y} \P^{G}_S(\xi^G) \\
 
        &= \sum_{\xi^{G\setminus Y} \in \NZ{S}\cap A^X}
 
           \sum_{\xi^{G\setminus X} \in \NZ{S}\cap A^Y}
 
            \P^{G\setminus Y}_S(\xi^{G\setminus Y}) \cdot
 
            \P^{G\setminus X}_S(\xi^{G\setminus X}) \\
 
        &= \P^{G\setminus Y}_S(\NZ{S} \cap A^X) \; \cdot \; \P^{G\setminus X}_S(\NZ{S} \cap A^Y)
 
    \end{align*}
 
\end{proof}
 

	
 
The intuition of the following lemma is that if two sites have distance $d$ in the graph, then the only way they can affect each other is that an interaction chain is forming between them, meaning that every vertex should get resampled to $0$ at least once in between them.
 

	
 
\begin{lemma}\label{lemma:distancePower}
 
	Suppose $G=(V,E)$ is a graph, $X,Y\subseteq V$ and $A^X$ is a local event on $X$. Then
 
	$$\P^{G}(A^X)-\P^{G\setminus Y}(A^X)=\bigO{p^{d(X,Y)}}.$$
 
	(Should be true with $+1$ in the degree!)
 
	(Should be true with $+1$ in the degree, when $d(X,Y)>0$!)
 
\end{lemma}
 
\begin{proof}
 
	We can assume without loss of generality, that $X\neq \emptyset\neq Y$, otherwise the statement is trivial.
 
	We can assume without loss of generality, that $X\neq \emptyset\neq Y$, otherwise the statement is trivial. Also we can assume without loss of generality that $d(X,Y)\leq \infty$, i.e., $X,Y$ are in the same connected component of $G$, otherwise we can use Lemma~\ref{lemma:splitting} with $S=\emptyset$.
 
	
 
	The proof goes by induction on $d(X,Y)$. The statement is trivial for $d(X,Y)=0$, and is easy to check for $d(X,Y)=1$, by looking at resample sequences that reach the all $1$ state in at most $0$ step (which is simply the case when everything is sampled to $1$ initially).
 
	
 
	Now we show the inductive step, assuming we know the statement for $d$, and that $d(X,Y)=d+1$.
 
	First we assume, that $\NZ{X}\subseteq\overline{A^X}$, i.e., $A^X\subseteq \Z{X}$.
 
	
 
	For $i\in[d]$ we define $A_i^X:=A^X\cap{\NZ{\overline{\partial}(X,i)}}\cap\bigcap_{j\in[i-1]}\Z{\overline{\partial}(X,j)}$, 
 
	and define $A_{d+1}^X:=A^X\cap\bigcap_{j\in[d]}\Z{\overline{\partial}(X,j)}$,
 
	so that they form a partition $A^X=\dot\bigcup_{i\in [d+1]}A_i^X$. 
 
	It is easy to see that for all $i\in[d+1]$ we have $A_{i}^X\subseteq\Z{X}\cap\bigcap_{j\in[i-1]}\Z{\overline{\partial}(X,j)}$, and therefore 
 
	\begin{equation}\label{eq:AXorder}
 
		\P^G(A_{i}^X)=\bigO{p^{i}}.
 
	\end{equation}
 
	Now we use the Splitting lemma~\ref{lemma:splitting} to show that for all $i\in[d]$
 
	\begin{align}
 
		\P^G(A_{i}^X)
 
		&=\P^{G\cap B(X,i)}_{\overline{\partial}(X,i)}(A_{i}^X)\cdot \P^{G\setminus B(X,i-1)}(X,i)(\NZ{\overline{\partial}(X,i)}) \tag{by Lemma~\ref{lemma:splitting}}\\
 
		&=\P^{G\cap B(X,i)}_{\overline{\partial}(X,i)}(A_{i}^X)\cdot \left(\P^{G\setminus Y\setminus B(X,i-1)}(X,i)(\NZ{\overline{\partial}(X,i)})+\bigO{p^{d+1-i}}\right) \tag{by induction}\\
 
		&=\P^{G\cap B(X,i)}_{\overline{\partial}(X,i)}(A_{i}^X)\cdot \P^{G\setminus Y\setminus B(X,i-1)}(X,i)(\NZ{\overline{\partial}(X,i)})+\bigO{p^{d+1}} \tag{by equation \eqref{eq:AXorder}}\\
 
		&=\P^{G\setminus Y}(A_{i}^X)+\bigO{p^{d+1}} \tag{by Lemma~\ref{lemma:splitting}}\\
 
		&=\P^{G\setminus Y}(A_{i}^X)+\bigO{p^{d(Y,Y)}}. \label{eq:indStep}
 
	\end{align}
 
	Therefore 
 
	$$\P^G(A^X)
 
	\overset{\eqref{eq:AXorder}}{=}\sum_{i\in[d]}\P^G(A_i^X)+\bigO{p^{d(Y,Y)}}
 
	\overset{\eqref{eq:indStep}}{=}\sum_{i\in[d]}\P^{G\setminus Y}(A_i^X)+\bigO{p^{d(Y,Y)}}
 
	\overset{\eqref{eq:AXorder}}{=}\P^{G\setminus Y}(A^X)+\bigO{p^{d(Y,Y)}}.
 
	$$
 
	We finish the proof by observing that if $\NZ{X}\nsubseteq\overline{A^X}$,
 
	then we necessarily have $\NZ{X}\subseteq A^X$, and therefore we can use the above proof with $B^X:=\overline{A^X}$ and use that $\P(A^X)=1-\P(B^X)$.
 
\end{proof}
 
	
 
	\begin{theorem} If $2< 2m\leq n$ and $m\leq M$, then $R^{(n)}=\E^{[-M,M]}(\Res{0})+\bigO{p^{m}}$.
 
	\end{theorem}
 
	\begin{proof} 
 
		\vskip-12mm
 
		\begin{align*}
 
			R^{(n)}
 
			&= \E^{(n)}(\Res{0}) \tag{by translation invariance}\\
 
			&= \sum_{k=1}^{\infty}\P^{(n)}(\Res{0}\!\geq\! k) \\		
 
			&= \sum_{k=1}^{\infty}\P^{[-m+1,m-1]}(\Res{0}\!\geq\! k)+ \bigO{p^{m}} \tag{by Lemma~\ref{lemma:distancePower}}\\
 
			&= \sum_{k=1}^{\infty}\P^{[-M,M]}(\Res{0}\!\geq\! k)+ \bigO{p^{m}} \tag{by Lemma~\ref{lemma:distancePower}}\\	
 
			&=\E^{[-M,M]}(\Res{0})+\bigO{p^{m}}.
 
		\end{align*}  
 
		\vskip-7mm		
 
	\end{proof} 	
 
\begin{comment}
 
		Let $N\geq \max(2n,2m)$, then
0 comments (0 inline, 0 general)