Changeset - bf301c907039
[Not reviewed]
0 1 0
Tom Bannink - 8 years ago 2017-09-08 14:25:41
tom.bannink@cwi.nl
Start writing new definitions
1 file changed with 18 insertions and 10 deletions:
main.tex
18
10
0 comments (0 inline, 0 general)
main.tex
Show inline comments
 
@@ -587,19 +587,13 @@ Questions:
 

	
 
\newpage
 
\section{General graphs proof}
 
We consider $R^{(n)}(p)$ as a power series in $p$ and our main aim in this section is to show that $R^{(n)}(p)$ and $R^{(n+k)}(p)$ are the same up to order $n-1$.
 

	
 
The proof will consider variations of the Markov Chain:
 
\begin{itemize}
 
    \item $\P^{(n)}$ refers to the original process on the length-$n$ cycle.
 
    \item $\P^{[a,b]}$ or $\P^{[n]}$ refers to a similar Markov Chain but on a finite chain ($[a,b]$ or $[1,n]$).
 
\end{itemize}
 
The process on the finite chain has the following modification at the boundary: if a boundary site is resampled, it can only resample itself and its single neighbour so it draws only two new bits. 
 
We consider the following generalization of the Markov Chain.
 

	
 
We use the notation $\E^{(n)}$,$\E^{[a,b]}$ and $\E^{[n]}$ similarly for denoting expectations.
 
Let $G=(V,E)$ be a graph with vertex set $V$ and edge set $E$. We define a Markov Chain $\mathcal{M}_G$ as the following process: initialize every vertex of $G$ independently to 0 with probability $p$ and 1 with probability $1-p$. Then at each step, select a uniformly random vertex that has value $0$ and resample it and its neighbourhood, all of them independently with the same probability $p$. The Markov Chain terminates when all vertices have value $1$. We use $\P^{G}$ to denote probabilities associated to this Markov Chain.
 

	
 
%Note that an \emph{event} is a subset of all possible paths of the Markov Chain.
 
\begin{definition}[Events conditioned on starting state] \label{def:conditionedevents}
 
\begin{definition}[Events] \label{def:events}
 
    For any subset $S\subseteq V$ of vertices.
 
    For any state $b\in\{0,1\}^n$, define $\start{b}$ as the event that the starting state of the chain is the state $b$. For any event $A$ and any $v\in[n]$, define
 
    \begin{align*}
 
        \P^{(n)}_b(A) &= \P^{(n)}(A \;|\; \start{b}) \\
 
@@ -608,6 +602,20 @@ We use the notation $\E^{(n)}$,$\E^{[a,b]}$ and $\E^{[n]}$ similarly for denotin
 
    \end{align*}
 
    The last two probabilities are not conditioned on any other bits of the starting state.
 
\end{definition}
 

	
 

	
 
$\NZ{S}$
 
$\Z{S}$
 

	
 
patch
 

	
 
$B(S;d)$
 

	
 

	
 

	
 
We consider $R^{(n)}(p)$ as a power series in $p$ and our main aim in this section is to show that $R^{(n)}(p)$ and $R^{(n+k)}(p)$ are the same up to order $n-1$.
 

	
 

	
 
%Note that we have $\P^{(n)}(\start{b}) = (1-p)^{|b|}p^{n-|b|}$ by definition of our Markov Chain.
 
\begin{definition}[Vertex visiting event] \label{def:visitingResamplings}
 
    Denote by $\mathrm{Z}^{(v)}$ the event that site $v$ becomes zero at any point in time before the Markov Chain terminates. Denote the complement by $\mathrm{NZ}^{(v)}$, i.e. the event that site $v$ does \emph{not} become zero before it terminates. Furthermore define $\mathrm{NZ}^{(v,w)} := \mathrm{NZ}^{(v)} \cap \mathrm{NZ}^{(w)}$, i.e. the event that \emph{both} $v$ and $w$ do not become zero before termination.
0 comments (0 inline, 0 general)