Changeset - 09b0b0e67df9
[Not reviewed]
0 1 0
Shravas K. Rao - 9 years ago 2016-10-06 18:36:10
shravas@gup-130-157.cwi.nl
:(
1 file changed with 4 insertions and 2 deletions:
0 comments (0 inline, 0 general)
disctospec.tex
Show inline comments
 
@@ -102,12 +102,13 @@ where $R = \{z \in \mathbb{C} : \mathcal{R}(z) \geq 0\}$ is the set of all numbe
 
\end{align}
 
By convexity,~\eqref{eq:reduce} is maximized when $x'[j] \in \{0, 1\}^n$ for all $j$, or in other words, is an indicator vector.  Letting $T_j = \{i : x'[j]_{i} = 1\}$ proves the lemma.
 
\end{proof}
 

	
 
By a straightforward generalization of the Expander Mixing Lemma, it follows that if a Cayley hypergraph has spectral expansion $\lambda$, then it also has discrepancy $\lambda$.  We attempt to prove the converse, stated below.
 

	
 
\snote{This is misleading!}
 
\begin{lem}\label{lem:disctospec}
 
Let $\Gamma = F_p^n$. For $t \leq p$ and every $\lambda$ there exists an $\epsilon$ such that if a Cayley $t$-uniform hypergraph $H = \cay^{(t)}(\Gamma, q, S')$ has discrepancy $\epsilon$ with respect to $K = \cay^{(t)}(\Gamma, q, S)$, then it also has spectral expansion $\lambda$ with respect to $K = \cay^{(t)}(\Gamma, q, S)$.
 
\end{lem}
 

	
 
To prove this we use the generalized von Neumann inequality and the inverse Gowers theorem, which we state below.  We first state the definition of the Gowers norm.
 

	
 
@@ -200,18 +201,19 @@ This is a system of linear equations whose coefficients come from the first $i+1
 

	
 
\begin{proof}[Proof of Lemma~\ref{lem:disctospec}]
 
We prove the contrapositive.  Assume that $\cay^{(t)}(\Gamma, q, S')$ does not have spectral expansion $\lambda$, that is that there exist vectors $x_1, \ldots, x_t$ such that $\|x_i\|_{\ell_t} = 1$ for all $i$, but $(A_H-A_K)(x_1, \ldots, x_t) \geq \lambda$.  Let $1_{H, K} = \frac{1_{S'}}{|S'|}-\frac{1_{S}}{|S|}$.  Then
 
 \begin{equation}
 
(A_H-A_K)(x_1, \ldots, x_t) = \frac{|\Gamma|^2}{t!} \sum_{\sigma \in S_t} \mathbb{E}_{u, g}[x_1(ug^{\sigma(1)-1})x_2(ug^{\sigma(2)-1})\cdots x_t(ug^{\sigma(t)-1})1_{H, K}(g)]. \label{eq:unravel}
 
\end{equation}
 
  By the Lemma~\ref{lem:genvn},~\eqref{eq:unravel} is bounded above by $\|1_{H, K}\|_{U^t}$.  By the inverse Gowers theorem, there exists a polynomial $P$ of degree $t-1$ such that $\mathbb{E}_{h}[e(P(g)) 1_{H, K}] \geq \epsilon$ for some $\epsilon$.  By Lemma~\ref{lem:decomp}, there exist $P_0, \ldots, P_{t-1}$ such that \[e(P_0(x))e(P_1(x+y))\cdots e(P_{t-1}(x+(t-1)y)) = e(P(y)),\] and therefore \[\mathbb{E}_{u, g}[e(P_0(u))e(P_1(ug))\cdots e(P_{t-1}(ug^{t-1})) 1_{H, K}] = \mathbb{E}_{h}[e(P(g)) 1_{H, K}] \geq \epsilon,\]
 
  By the Lemma~\ref{lem:genvn},~\eqref{eq:unravel} is bounded above by $\Gamma|^2\|1_{H, K}\|_{U^t}$.  By the inverse Gowers theorem, there exists a polynomial $P$ of degree $t-1$ such that $\mathbb{E}_{h}[e(P(g)) 1_{H, K}] \geq \epsilon$ for some $\epsilon$, that depends on $\lambda/|\Gamma|^2$. \snote{This is really bad}
 
  By Lemma~\ref{lem:decomp}, there exist $P_0, \ldots, P_{t-1}$ such that \[e(P_0(x))e(P_1(x+y))\cdots e(P_{t-1}(x+(t-1)y)) = e(P(y)),\] and therefore \[\mathbb{E}_{u, g}[e(P_0(u))e(P_1(ug))\cdots e(P_{t-1}(ug^{t-1})) 1_{H, K}] = \mathbb{E}_{h}[e(P(g)) 1_{H, K}] \geq \epsilon,\]
 
and
 
\[
 
(A_H-A_K)(e(P_0), \ldots, e(P_{t-1})) \geq \frac{\epsilon|\Gamma|^2}{t!}.
 
\]
 
Because the coordinates of $e(P_i)$ are bounded above in absolute value by $1$ for all $i$, by Lemma~\ref{lem:disctoinfty}, this implies that $H$ has discrepancy at least $\epsilon|\Gamma|^2/(t!\pi^t)$ with respect to $K$.
 
Because the coordinates of $e(P_i)$ are bounded above in absolute value by $1$ for all $i$, by Lemma~\ref{lem:disctoinfty}, this implies that $H$ has discrepancy at least $\epsilon|\Gamma|/(t!\pi^t)$ with respect to $K$.
 
\end{proof}
 

	
 

	
 
\bibliographystyle{alphaabbrv}
 
\bibliography{disctospec.bib}
 

	
0 comments (0 inline, 0 general)