Changeset - be633a788363
[Not reviewed]
0 1 0
András Gilyén - 8 years ago 2017-09-14 00:25:36
gilyen@cwi.nl
General distance-degree lemma
1 file changed with 2 insertions and 2 deletions:
0 comments (0 inline, 0 general)
main.tex
Show inline comments
 
@@ -309,196 +309,196 @@ The following Lemma says that if a set $S$ splits the graph in two, then those t
 
    Let $b|_{G\setminus X}$ be the restriction of $b$ to $G\setminus X$ and similar for $b|_{G\setminus Y}$.
 
    To construct $\xi^{G\setminus Y}$ and $\xi^{G\setminus X}$, start with $\xi^{G\setminus Y} = \left( (\text{initialize }b|_{G\setminus Y}) \right)$ and $\xi^{G\setminus X} = \left( (\text{initialize }b|_{G\setminus X}) \right)$. For each step $(z_i,v_i,r_i)$ in $\xi^G$ do the following: if $v_i \in X$ then append $(z^{X}_i,v_i,r_i)$ to $\xi^{G\setminus Y}$ and if $v_i \in Y$ then append $(z^{Y}_i,v_i,r_i)$ to $\xi^{G\setminus X}$.
 
    Here $z^{X}_i$ is the number of zeroes that were in $X$ and $z^{Y}_i$ is the number of zeroes in $Y$. We have $z_i = z^{X}_i + z^{Y}_i$ because $\xi^G\in\NZ{S}$ so there can not be any zero in $S$; they all have to be in $X$ or $Y$. Furthermore, this also makes sure that we always have either $v_i\in X$ or $v_i\in Y$ since only vertices that are zero can be chosen to resample.
 

	
 
    Now $\xi^{G\setminus Y}$ is a valid path of the Markov Chain associated to the graph $G\setminus Y$ (i.e. with vertices $X\cup S$), because in the original path $\xi^G$, all zeroes in $X$ have been resampled by resamplings in $X$. There can not be a vertex $v\in Y$ such that the resampling of $v$ changed a vertex in $X$, since $X$ and $Y$ are only connected through $S$ and we know $\xi^G\in\NZ{S}$.
 

	
 
    Vice versa, any two paths $\xi^{G\setminus Y}\in\NZ{S}$ and $\xi^{G\setminus X}\in\NZ{S}$ also induce a path $\xi^G\in\NZ{S}$ by simply interleaving the resampling positions. Note that $\xi^{G\setminus Y},\xi^{G\setminus X}$ actually induce $\binom{|\xi^{G\setminus Y}|+|\xi^{G\setminus X}|}{|\xi^{G\setminus Y}|}$ paths $\xi^G$ because of the possible orderings of interleaving the resamplings in $\xi^{G\setminus Y}$ and $\xi^{G\setminus X}$.
 
    For a fixed $\xi^{G\setminus Y},\xi^{G\setminus X}$ we will now show the following:
 
    \begin{align*}
 
        \sum_{\substack{\xi^G\in\NZ{S} \text{ s.t.}\\ \xi^G \text{ decomposes into } \xi^{G\setminus Y},\xi^{G\setminus X} }} \P^{G}_S(\xi^G) &=
 
        \sum_{\text{interleavings of }\xi^{G\setminus Y},\xi^{G\setminus X}} \P(\text{interleaving}) \cdot \P^{G\setminus Y}_S(\xi^{G\setminus Y}) \cdot \P^{G\setminus X}_S(\xi^{G\setminus X}) \\
 
        &= \P^{G\setminus Y}_S(\xi^{G\setminus Y}) \cdot \P^{G\setminus X}_S(\xi^{G\setminus X}) 
 
    \end{align*}
 
    where both sums are over $\binom{|\xi^{G\setminus Y}|+|\xi^{G\setminus X}|}{|\xi^{G\setminus Y}|}$ terms.
 
    This is best explained by an example. Lets consider the following fixed $\xi^{G\setminus Y},\xi^{G\setminus X}$ and an example interleaving where we choose vertices from $Y,X,X,Y,\cdots$:
 
    \begin{align*}
 
        \xi^{G\setminus Y} &= \left( (\text{initialize to }b^X\;1^S),
 
        (z^X_1, v^X_1, r^X_1),
 
        (z^X_2, v^X_2, r^X_2),
 
        (z^X_3, v^X_3, r^X_3),
 
        (z^X_4, v^X_4, r^X_4),
 
        \cdots  \right) \\
 
        \xi^{G\setminus X} &= \left( (\text{initialize to }1^S\;b^Y),
 
        (z^Y_1, v^Y_1, r^Y_1),
 
        (z^Y_2, v^Y_2, r^Y_2),
 
        (z^Y_3, v^Y_3, r^Y_3),
 
        (z^Y_4, v^Y_4, r^Y_4),
 
        \cdots  \right) \\
 
        \xi^G             &= \big( (\text{initialize to }b^X \; 1^S \; b^Y),
 
        (z^X_1+z^Y_1, v^Y_1, r^Y_1),
 
        (z^X_1+z^Y_2, v^X_1, r^X_1), \\
 
        &\qquad\qquad\qquad\qquad\qquad\qquad\qquad\qquad
 
        (z^X_2+z^Y_2, v^X_2, r^X_2),
 
        (z^X_3+z^Y_2, v^Y_2, r^Y_2),
 
        \cdots \big)
 
    \end{align*}
 
    Here $b^X\in \{0,1\}^{X}$ and $b^Y\in\{0,1\}^Y$. Since we condition on the event that $S$ is initialized to ones, we know the initial state is of the form $b^X\;1^S$ in $\xi^{G\setminus Y}$. Similarly, since these paths satisfy the $\NZ{S}$ event, we know all the vertices $v_i$ resampled in $\xi^{G\setminus Y}$ are vertices in $X$, and the resampled bits $r_i$ are bits corresponding to vertices in $X$.
 
    In the newly constructed path $\xi^G$ the number of zeroes is the number of zeroes in $X$ and $Y$ together, so this starts as $z^X_1 + z^Y_1$. Then in this example, after the first step the number of zeroes is $z^X_1+z^Y_2$ since a step of $\xi^{G\setminus X}$ was done (so a vertex in $Y$ was resampled).
 
    The probability of $\xi^{G\setminus Y}$ is given by
 
    \begin{align*}
 
        \P^{G\setminus Y}_S(\xi^{G\setminus Y}) &=
 
        \P(\text{initialize }b^X\;1^S \mid \text{initialize $S$ to }1)
 
        \P(\text{pick }v^X_1 \mid z^X_1) \P(r^X_1)
 
        \P(\text{pick }v^X_2 \mid z^X_2) \P(r^X_2) \cdots \\ 
 
        &= (1-p)^{|b^X|} p^{|X|-|b^X|} \cdot
 
        \frac{1}{z^X_1} \P(r^X_1) \cdot
 
        \frac{1}{z^X_2} \P(r^X_2) \cdots
 
        \frac{1}{z^X_{|\xi^{G\setminus Y}|}} \P(r^X_{|\xi^{G\setminus Y}|}) .
 
    \end{align*}
 
    and similar for $\xi^{G\setminus X}$.
 
    Instead of choosing a step in $Y,X,X,Y,\cdots$ we could have chosen other orderings. The following diagram illustrates all possible interleavings, and the red line corresponds to the particular interleaving $Y,X,X,Y$ in the example above.
 
    \begin{center}
 
        \includegraphics{diagram_paths3.pdf}
 
    \end{center}
 
    For the labels shown within the grid, define $p_{ij} = \frac{z^X_i}{z^X_i + z^Y_j}$.
 
    The probability of this particular interleaving $\xi^G$ is given by
 
    \begin{align*}
 
        \P^{G}_S(\xi^{G})
 
        &= (1-p)^{|b^X\; b^Y|} p^{|X\cup Y|-|b^X\;b^Y|} \quad
 
        \frac{1}{z^X_1+z^Y_1} \P(r^Y_1) \cdot
 
        \frac{1}{z^X_1+z^Y_2} \P(r^X_1) \cdots \\
 
        &= (1-p)^{|b^X|} p^{|X|-|b^X|} \cdot (1-p)^{|b^Y|} p^{|Y|-|b^Y|} \\
 
        &\qquad \cdot
 
        \frac{z^Y_1}{z^X_1+z^Y_1} \frac{1}{z^Y_1} \P(r^Y_1) \;
 
        \frac{z^X_1}{z^X_1+z^Y_2} \frac{1}{z^X_1} \P(r^X_1) \;
 
        \frac{z^X_2}{z^X_2+z^Y_2} \frac{1}{z^X_2} \P(r^X_2)
 
        \cdots \tag{rewrite fractions}\\
 
        &=
 
        \frac{z^Y_1}{z^X_1+z^Y_1} 
 
        \frac{z^X_1}{z^X_1+z^Y_2} 
 
        \frac{z^X_2}{z^X_2+z^Y_2} 
 
        \cdots
 
        \P^{G\setminus Y}_S(\xi^{G\setminus Y}) \; \P^{G\setminus X}_S(\xi^{G\setminus X})
 
        \tag{definition} \\
 
        &= (1-p_{1,1}) \; p_{1,2} \; p_{2,2} \; (1-p_{3,2}) \cdots \P^{G\setminus Y}_S(\xi^{G\setminus Y}) \; \P^{G\setminus X}_S(\xi^{G\setminus X})
 
        \tag{definition of $p_{i,j}$} \\
 
        &= \P(\text{path in grid}) \; \P^{G\setminus Y}_S(\xi^{G\setminus Y}) \; \P^{G\setminus X}_S(\xi^{G\setminus X})
 
    \end{align*}
 
    In the grid we see that at every point the probabilities sum to 1, and we always reach the end, so we know the sum of all paths in the grid is 1. This proves the required equality.
 
    We obtain
 
    \begin{align*}
 
        \P^{G}_S(\NZ{S} \cap A^X \cap A^Y)
 
        &= \sum_{\xi^G \in \NZ{S}\cap A^X \cap A^Y} \P^{G}_S(\xi^G) \\
 
        &= \sum_{\xi^{G\setminus Y} \in \NZ{S}\cap A^X}
 
           \sum_{\xi^{G\setminus X} \in \NZ{S}\cap A^Y}
 
            \P^{G\setminus Y}_S(\xi^{G\setminus Y}) \cdot
 
            \P^{G\setminus X}_S(\xi^{G\setminus X}) \\
 
        &= \P^{G\setminus Y}_S(\NZ{S} \cap A^X) \; \cdot \; \P^{G\setminus X}_S(\NZ{S} \cap A^Y)
 
    \end{align*}
 
\end{proof}
 

	
 
The intuition of the following lemma is that if two sites have distance $d$ in the graph, then the only way they can affect each other is that an interaction chain is forming between them, meaning that every vertex should get resampled to $0$ at least once in between them.
 

	
 
\begin{lemma}\label{lemma:distancePower}
 
	Suppose $G=(V,E)$ is a graph, $X,Y\subseteq V$ and $A^X$ is a local event on $X$. Then
 
	$$\P^{G}(A^X)-\P^{G\setminus Y}(A^X)=\bigO{p^{d(X,Y)}}.$$
 
	(Should be true with $+1$ in the degree!)
 
	(Should be true with $+1$ in the degree, when $d(X,Y)>0$!)
 
\end{lemma}
 
\begin{proof}
 
	We can assume without loss of generality, that $X\neq \emptyset\neq Y$, otherwise the statement is trivial.
 
	We can assume without loss of generality, that $X\neq \emptyset\neq Y$, otherwise the statement is trivial. Also we can assume without loss of generality that $d(X,Y)\leq \infty$, i.e., $X,Y$ are in the same connected component of $G$, otherwise we can use Lemma~\ref{lemma:splitting} with $S=\emptyset$.
 
	
 
	The proof goes by induction on $d(X,Y)$. The statement is trivial for $d(X,Y)=0$, and is easy to check for $d(X,Y)=1$, by looking at resample sequences that reach the all $1$ state in at most $0$ step (which is simply the case when everything is sampled to $1$ initially).
 
	
 
	Now we show the inductive step, assuming we know the statement for $d$, and that $d(X,Y)=d+1$.
 
	First we assume, that $\NZ{X}\subseteq\overline{A^X}$, i.e., $A^X\subseteq \Z{X}$.
 
	
 
	For $i\in[d]$ we define $A_i^X:=A^X\cap{\NZ{\overline{\partial}(X,i)}}\cap\bigcap_{j\in[i-1]}\Z{\overline{\partial}(X,j)}$, 
 
	and define $A_{d+1}^X:=A^X\cap\bigcap_{j\in[d]}\Z{\overline{\partial}(X,j)}$,
 
	so that they form a partition $A^X=\dot\bigcup_{i\in [d+1]}A_i^X$. 
 
	It is easy to see that for all $i\in[d+1]$ we have $A_{i}^X\subseteq\Z{X}\cap\bigcap_{j\in[i-1]}\Z{\overline{\partial}(X,j)}$, and therefore 
 
	\begin{equation}\label{eq:AXorder}
 
		\P^G(A_{i}^X)=\bigO{p^{i}}.
 
	\end{equation}
 
	Now we use the Splitting lemma~\ref{lemma:splitting} to show that for all $i\in[d]$
 
	\begin{align}
 
		\P^G(A_{i}^X)
 
		&=\P^{G\cap B(X,i)}_{\overline{\partial}(X,i)}(A_{i}^X)\cdot \P^{G\setminus B(X,i-1)}(X,i)(\NZ{\overline{\partial}(X,i)}) \tag{by Lemma~\ref{lemma:splitting}}\\
 
		&=\P^{G\cap B(X,i)}_{\overline{\partial}(X,i)}(A_{i}^X)\cdot \left(\P^{G\setminus Y\setminus B(X,i-1)}(X,i)(\NZ{\overline{\partial}(X,i)})+\bigO{p^{d+1-i}}\right) \tag{by induction}\\
 
		&=\P^{G\cap B(X,i)}_{\overline{\partial}(X,i)}(A_{i}^X)\cdot \P^{G\setminus Y\setminus B(X,i-1)}(X,i)(\NZ{\overline{\partial}(X,i)})+\bigO{p^{d+1}} \tag{by equation \eqref{eq:AXorder}}\\
 
		&=\P^{G\setminus Y}(A_{i}^X)+\bigO{p^{d+1}} \tag{by Lemma~\ref{lemma:splitting}}\\
 
		&=\P^{G\setminus Y}(A_{i}^X)+\bigO{p^{d(Y,Y)}}. \label{eq:indStep}
 
	\end{align}
 
	Therefore 
 
	$$\P^G(A^X)
 
	\overset{\eqref{eq:AXorder}}{=}\sum_{i\in[d]}\P^G(A_i^X)+\bigO{p^{d(Y,Y)}}
 
	\overset{\eqref{eq:indStep}}{=}\sum_{i\in[d]}\P^{G\setminus Y}(A_i^X)+\bigO{p^{d(Y,Y)}}
 
	\overset{\eqref{eq:AXorder}}{=}\P^{G\setminus Y}(A^X)+\bigO{p^{d(Y,Y)}}.
 
	$$
 
	We finish the proof by observing that if $\NZ{X}\nsubseteq\overline{A^X}$,
 
	then we necessarily have $\NZ{X}\subseteq A^X$, and therefore we can use the above proof with $B^X:=\overline{A^X}$ and use that $\P(A^X)=1-\P(B^X)$.
 
\end{proof}
 
	
 
	\begin{theorem} If $2< 2m\leq n$ and $m\leq M$, then $R^{(n)}=\E^{[-M,M]}(\Res{0})+\bigO{p^{m}}$.
 
	\end{theorem}
 
	\begin{proof} 
 
		\vskip-12mm
 
		\begin{align*}
 
			R^{(n)}
 
			&= \E^{(n)}(\Res{0}) \tag{by translation invariance}\\
 
			&= \sum_{k=1}^{\infty}\P^{(n)}(\Res{0}\!\geq\! k) \\		
 
			&= \sum_{k=1}^{\infty}\P^{[-m+1,m-1]}(\Res{0}\!\geq\! k)+ \bigO{p^{m}} \tag{by Lemma~\ref{lemma:distancePower}}\\
 
			&= \sum_{k=1}^{\infty}\P^{[-M,M]}(\Res{0}\!\geq\! k)+ \bigO{p^{m}} \tag{by Lemma~\ref{lemma:distancePower}}\\	
 
			&=\E^{[-M,M]}(\Res{0})+\bigO{p^{m}}.
 
		\end{align*}  
 
		\vskip-7mm		
 
	\end{proof} 	
 
\begin{comment}
 
		Let $N\geq \max(2n,2m)$, then
 
		\begin{align*}
 
		R^{(n)}
 
		&= \E^{(n)}(\Res{1}) \tag{by translation invariance}\\
 
		&= \sum_{k=1}^{\infty}\P^{(n)}(\Res{1}\geq k) \\
 
		%&= \sum_{k=1}^{\infty}\sum_{\underset{\ell\geq r-1}{\ell,r\in[n]}}\P^{(n)}(\Res{1}\geq k\,\&\, [\ell+1,r-1]\in\mathcal{P}) \tag{partition}\\
 
		%&= \sum_{k=1}^{\infty}\sum_{\underset{\ell\geq r}{\ell,r\in[n]}}\P^{(n)}(\Res{1}\geq k\,\&\, [\ell+1,r-1]\in\mathcal{P})  +\bigO{p^{n}} \\	
 
		%&= \sum_{k=1}^{\infty}\sum_{\underset{\ell\geq r}{\ell,r\in[n]}}\P^{[l,r]}_{b_{\ell}=b_{r}=1}(\Res{1}\geq k\,\&\, [\ell+1,r-1]\in\mathcal{P}) \P^{[r,\ell]}(\NZ{\ell,r}) +\bigO{p^{n}} \tag{by Lemma~\ref{lemma:eventindependenceNewGen}}\\				
 
		&= \sum_{k=1}^{\infty}\sum_{P\text{ patch}:1\in P}\P^{(n)}(\Res{1}\geq k\,\&\, P\in\mathcal{P}) \tag{partition}\\
 
		&= \sum_{k=1}^{\infty}\sum_{P\text{ patch}:1\in P}^{|P|<n}\P^{(n)}(\Res{1}\geq k\,\&\, P\in\mathcal{P}) +\bigO{p^{n}}\\
 
		&= \sum_{k=1}^{\infty}\sum_{P\text{ patch}:1\in P}^{|P|<n}\P^{[P\cup \partial P]}_{b_{\partial P}=1}(\Res{1}\geq k\,\&\, P\in\mathcal{P}) \P^{[\overline{P}]}(\NZ{\partial P}) +\bigO{p^{n}} \tag{by Lemma~\ref{lemma:eventindependenceNewGen}}\\
 
		&= \sum_{k=1}^{\infty}\sum_{P\text{ patch}:1\in P}^{|P|<n}\P^{[P\cup \partial P]}_{b_{\partial P}=1}(\Res{1}\geq k\,\&\, P\in\mathcal{P}) \left(\left(\P^{[|\overline{P}|]}(\NZ{1})\right)^2+\bigO{p^{|\overline{P}|}}\right) +\bigO{p^{n}} \tag{by Lemma~\ref{lemma:independenetSidesNewGen}}\\
 
		&= \sum_{k=1}^{\infty}\sum_{P\text{ patch}:1\in P}^{|P|<n}\P^{[P\cup \partial P]}_{b_{\partial P}=1}(\Res{1}\geq k\,\&\, P\in\mathcal{P}) \left(\left(\P^{[N]}(\NZ{1})\right)^2+\bigO{p^{|\overline{P}|}}\right) +\bigO{p^{n}} \tag{by Corollary~\ref{cor:probIndepNewGen}}\\
 
		&= \sum_{k=1}^{\infty}\sum_{P\text{ patch}:1\in P}^{|P|<n}\P^{[-N,N]}(\Res{1}\geq k\,\&\, P\in\mathcal{P}) +\bigO{p^{n}} \tag{by Lemma~\ref{lemma:eventindependenceNewGen}}\\
 
		&= \sum_{k=1}^{\infty}\sum_{P\text{ patch}:1\in P}\P^{[-N,N]}(\Res{1}\geq k\,\&\, P\in\mathcal{P}) +\bigO{p^{n}} \tag{by Lemma~\ref{lemma:eventindependenceNewGen}}\\
 
		&= \E^{[-N,N]}(\Res{1})+\bigO{p^{n}}.
 
		\end{align*}	
 
\end{comment}			
 

	
 
~
 

	
 
Questions:
 
\begin{itemize}
 
	\item Can we prove some upper bound of the coefficients in the difference, other than they are zero for small powers?
 
	\item In view of this proof, can we better characterise $a_k^{(k+1)}$?
 
\end{itemize} 
 

	
 
\newpage
 
\section{Proving that $a_k^{(k+1)}=a_k^{(n)}$ for all $n>k$}
 
	Let $$P_{C}:=\NZ{\overline{\partial}(C,1)}\cap\bigcap_{v\in C}\Z{\{v\}}$$ be the event that every points of $C$ gets to $0$ at some time, but not its boundary. If $P_{C}$ holds, we say $C$ is a patch of the $0$-s.
 
 	\begin{lemma}\label{lemma:independenetSidesNew}	
 
 		$$\P^{[k]}(\Z{1}\cap \Z{k})=\P^{[k]}(\Z{1})\P^{[k]}(\Z{k})+\bigO{p^{k}}=\left(\P^{[k]}(\Z{1})\right)^2+\bigO{p^{k}}.$$
 
 	\end{lemma}   
 
 	Note that using De Morgan's law and the inclusion-exclusion formula we can see that this is equivalent to saying:
 
 	$$\P^{[k]}(\NZ{1}\cap \NZ{k})=\P^{[k]}(\NZ{1})\P^{[k]}(\NZ{k})+\bigO{p^{k}}.$$
 
 	\begin{proof}
 
 		We proceed by induction on $k$. For $k=1,2$ the statement is trivial.
 
 		
 
 		Now observe that:
 
 		$$\P^{[k]}(\Z{1})=\sum_{C\text{ connected}\,:\,1\in C}\P^{[k]}(P_{C})$$
 
 		$$\P^{[k]}(\Z{k})=\sum_{C\text{ connected}\,:\,k\in C}\P^{[k]}(P_{C})$$
 
 		
 
 		Suppose we proved the statement up to $k-1$, then we proceed using induction similarly to the above
 
 		\begin{align*}
 
 		&\P^{[k]}(\Z{1}\cap \Z{k})=\\
 
 		&=\!\!\!\sum_{\ell, r\in [k]: \ell<r-1}\!\!\!\P^{[k]}(P_{[\ell]}\cap P_{[r,k]})
 
 		+\P^{[k]}([k]\in\mathcal{P})\\
 
 		&=\!\!\!\sum_{\ell, r\in [k]: \ell<r-1}\!\!\!\P^{[k]}(P_{[\ell]}\cap P_{[r,k]})
 
 		+\bigO{p^{k}} \tag*{$\left(\P^{[k]}([k]\in\mathcal{P})=\bigO{p^{k}}\right)$}\\	
0 comments (0 inline, 0 general)