Changeset - 09b0b0e67df9
[Not reviewed]
0 1 0
Shravas K. Rao - 9 years ago 2016-10-06 18:36:10
shravas@gup-130-157.cwi.nl
:(
1 file changed with 4 insertions and 2 deletions:
0 comments (0 inline, 0 general)
disctospec.tex
Show inline comments
 
@@ -96,24 +96,25 @@ z = \pi \mathbb{E}_{w \in \mathbb{D}} [1_R(z \overline{w}) |z| w],
 
\]
 
where $R = \{z \in \mathbb{C} : \mathcal{R}(z) \geq 0\}$ is the set of all numbers with non-negative real part, and $1_R$ is the corresponding indicator function.  For a vector $x \in \mathbb{C}^{n}$, let $x'$ be the random vector defined by $x'_i = 1_R(x_i \overline{w})|x_i|$ where $w$ is again a uniformly random unit complex vector $w$.  Then we have
 
\begin{align}
 
\|A\|_{\ell_{\infty}, \ldots, \ell_{\infty}} &= \sup_{x[1], \ldots, x[t] \in \mathbb{D}^n} \sum_{i_1, \ldots, i_t \in [n]^t} A_{i_1, \ldots, i_t} x[1]_{i_1}\ldots x[t]_{i_t} \nonumber \\
 
&= \sup_{x[1], \ldots, x[t] \in \mathbb{D}^n} \left| \pi^t \mathbb{E}_{w_1, \ldots, w_t \in \mathbb{D}} \sum_{i_1, \ldots, i_t \in [n]^t} A_{i_1, \ldots, i_t} x'[1]_{i_1}\ldots x'[t]_{i_t} w_1 \ldots w_t \right| \nonumber \\
 
&\leq \pi^t \mathbb{E}_{w_1, \ldots, w_t \in \mathbb{D}} \sup_{x[1], \ldots, x[t] \in \mathbb{D}^n} \left| \sum_{i_1, \ldots, i_t \in [n]^t} A_{i_1, \ldots, i_t} x'[1]_{i_1}\ldots x'[t]_{i_t} \right|. \label{eq:reduce}
 
\end{align}
 
By convexity,~\eqref{eq:reduce} is maximized when $x'[j] \in \{0, 1\}^n$ for all $j$, or in other words, is an indicator vector.  Letting $T_j = \{i : x'[j]_{i} = 1\}$ proves the lemma.
 
\end{proof}
 

	
 
By a straightforward generalization of the Expander Mixing Lemma, it follows that if a Cayley hypergraph has spectral expansion $\lambda$, then it also has discrepancy $\lambda$.  We attempt to prove the converse, stated below.
 

	
 
\snote{This is misleading!}
 
\begin{lem}\label{lem:disctospec}
 
Let $\Gamma = F_p^n$. For $t \leq p$ and every $\lambda$ there exists an $\epsilon$ such that if a Cayley $t$-uniform hypergraph $H = \cay^{(t)}(\Gamma, q, S')$ has discrepancy $\epsilon$ with respect to $K = \cay^{(t)}(\Gamma, q, S)$, then it also has spectral expansion $\lambda$ with respect to $K = \cay^{(t)}(\Gamma, q, S)$.
 
\end{lem}
 

	
 
To prove this we use the generalized von Neumann inequality and the inverse Gowers theorem, which we state below.  We first state the definition of the Gowers norm.
 

	
 
\begin{define}
 
Given a function $f: \Gamma \rightarrow \mathbb{C}$, its Gowers uniformity norm is defined by
 
\[
 
\|f\|_{U^d} = [\mathbb{E}_{h_1, \ldots, h_d, x \in G} \Delta_{h_1}\cdots\Delta_{h_d} f(x)]^{1/2^d}
 
\]
 
where $\Delta_{h}$ is the multiplicative derivative
 
@@ -194,25 +195,26 @@ where $|s|$ denotes the Hamming weight of $s$.  Then~\eqref{eq:need} holds if fo
 
and
 
\[
 
\sum_{j=0}^d \alpha^i_j j^{i} = 1.
 
\]
 
This is a system of linear equations whose coefficients come from the first $i+1$ rows of a $(d+1) \times (d+1)$ Vandermonde matrix.  Because $d < p$, the determinant of such a matrix is non-zero and thus the matrix is invertible and $\alpha^i_j$ exist as desired.
 
\end{proof}
 

	
 
\begin{proof}[Proof of Lemma~\ref{lem:disctospec}]
 
We prove the contrapositive.  Assume that $\cay^{(t)}(\Gamma, q, S')$ does not have spectral expansion $\lambda$, that is that there exist vectors $x_1, \ldots, x_t$ such that $\|x_i\|_{\ell_t} = 1$ for all $i$, but $(A_H-A_K)(x_1, \ldots, x_t) \geq \lambda$.  Let $1_{H, K} = \frac{1_{S'}}{|S'|}-\frac{1_{S}}{|S|}$.  Then
 
 \begin{equation}
 
(A_H-A_K)(x_1, \ldots, x_t) = \frac{|\Gamma|^2}{t!} \sum_{\sigma \in S_t} \mathbb{E}_{u, g}[x_1(ug^{\sigma(1)-1})x_2(ug^{\sigma(2)-1})\cdots x_t(ug^{\sigma(t)-1})1_{H, K}(g)]. \label{eq:unravel}
 
\end{equation}
 
  By the Lemma~\ref{lem:genvn},~\eqref{eq:unravel} is bounded above by $\|1_{H, K}\|_{U^t}$.  By the inverse Gowers theorem, there exists a polynomial $P$ of degree $t-1$ such that $\mathbb{E}_{h}[e(P(g)) 1_{H, K}] \geq \epsilon$ for some $\epsilon$.  By Lemma~\ref{lem:decomp}, there exist $P_0, \ldots, P_{t-1}$ such that \[e(P_0(x))e(P_1(x+y))\cdots e(P_{t-1}(x+(t-1)y)) = e(P(y)),\] and therefore \[\mathbb{E}_{u, g}[e(P_0(u))e(P_1(ug))\cdots e(P_{t-1}(ug^{t-1})) 1_{H, K}] = \mathbb{E}_{h}[e(P(g)) 1_{H, K}] \geq \epsilon,\]
 
  By the Lemma~\ref{lem:genvn},~\eqref{eq:unravel} is bounded above by $\Gamma|^2\|1_{H, K}\|_{U^t}$.  By the inverse Gowers theorem, there exists a polynomial $P$ of degree $t-1$ such that $\mathbb{E}_{h}[e(P(g)) 1_{H, K}] \geq \epsilon$ for some $\epsilon$, that depends on $\lambda/|\Gamma|^2$. \snote{This is really bad}
 
  By Lemma~\ref{lem:decomp}, there exist $P_0, \ldots, P_{t-1}$ such that \[e(P_0(x))e(P_1(x+y))\cdots e(P_{t-1}(x+(t-1)y)) = e(P(y)),\] and therefore \[\mathbb{E}_{u, g}[e(P_0(u))e(P_1(ug))\cdots e(P_{t-1}(ug^{t-1})) 1_{H, K}] = \mathbb{E}_{h}[e(P(g)) 1_{H, K}] \geq \epsilon,\]
 
and
 
\[
 
(A_H-A_K)(e(P_0), \ldots, e(P_{t-1})) \geq \frac{\epsilon|\Gamma|^2}{t!}.
 
\]
 
Because the coordinates of $e(P_i)$ are bounded above in absolute value by $1$ for all $i$, by Lemma~\ref{lem:disctoinfty}, this implies that $H$ has discrepancy at least $\epsilon|\Gamma|^2/(t!\pi^t)$ with respect to $K$.
 
Because the coordinates of $e(P_i)$ are bounded above in absolute value by $1$ for all $i$, by Lemma~\ref{lem:disctoinfty}, this implies that $H$ has discrepancy at least $\epsilon|\Gamma|/(t!\pi^t)$ with respect to $K$.
 
\end{proof}
 

	
 

	
 
\bibliographystyle{alphaabbrv}
 
\bibliography{disctospec.bib}
 

	
 
\end{document}
 
\ No newline at end of file
0 comments (0 inline, 0 general)