Insensitive.tex: Difference between revisions

From Polymath Wiki
Jump to navigationJump to search
No edit summary
No edit summary
Line 3: Line 3:




\begin{theorem}  \label{thm:dhj-dv} Assume that for all $n \geq n_{\ref{thm:dhj}}(k,d,\delta)$,  
\begin{theorem}  \label{thm:dhj-dv} Let $m = n_{\ref{thm:dhj-d-ens}}(k,d,\delta/2,\ens{k})$.\noteryan{I.e., $m$ large enough that every set with $\ens{k}^m$ at least $\delta/2$ has a nondegen $d$-dim comb.\ subspace.}  Then for all $n \geq n_{\ref{thm:dhj-dv}}(k,d,\delta) := m$ and $A \subseteq [k]^n$,  
\[
\[
\ens{k}^n(A) \geq \delta \quad\Rightarrow\quad \text{$A$ contains a nondegenerate $d$-dimensional combinatorial subspace}.
\ens{k}^n(A) \geq \delta \quad\Rightarrow\quad \Pr_{\sigma \sim \ens{k+d}^n}[\sigma \subseteq A \text{ as a $d$-dimensional subspace}] \geq \eta_{\ref{thm:dhj-dv}}(k,d,\delta) := \frac{\delta}{2(k+d)^{2m}}.
\]
\]
Then for all $n \geq n_{\ref{thm:dhj-dv}}(k,d,\delta) := XXX$,  
\end{theorem}
\begin{proof}
Let $x \sim \ens{m}^n$ and $y \sim \ens{k}^m$.\noteryan{Surely $m \geq k$.}  By Lemma~\ref{lem:ens-compose}, $y \circ x \sim \ens{k}^n$.  Hence if $A \subseteq [k]^n$ satisfies $\ens{k}^n(A) \geq \delta$, we get that $\ens{k}^m(A_x) \geq \delta/2$ with probability at least $\delta/2$ over $x$, where $A_x$ denotes $\{z \in [k]^m : z \circ x \in A\}$.  Call such $x$'s ``good''.  By our choice of $m$, for all good~$x$ we have that $A_x$ contains a nondegenerate $d$-dimensional subspace.  Thus by Proposition~\ref{prop:min-eqs}, a randomly drawn subspace $w \sim \ens{k+d}^m$ is in $A_x$ with probability at least $(k+d)^{-2m}$.  We conclude that with probability at least $(\delta/2)(k+d)^{-2m}$ over the choice of $x$ and $w$, the nondegenerate $d$-dimensional subspace $w \circ x$ is in $A$.  This completes the proof, as $w \circ x$ is distributed as $\ens{k+d}^n$ by Lemma~\ref{lem:ens-compose}.
\end{proof}
 
In the particular case of $k = 2$, $d = 1$, this theorem can be proved directly.\noteryan{Something like the following should appear somewhere in Section~\ref{sec:sperner}.}
\begin{theorem} \label{thm:sperner-v} For $n \geq 2$,
\[
\ens{k}^n(A) \geq \delta \quad\Rightarrow\quad \Pr_{\sigma \sim \ens{3}^n}[\sigma \subseteq A \text{ as a line}] \geq \delta^2 - \frac{10}{n}.
\]
\noteryan{Not sure the $10$ is exactly what follows from Proposition~\ref{prop:degen}, but whatever.}  In particular, we can take $n_{\ref{thm:dhj-dv}}(2,1,\delta) = 20/\delta^2$ and $\eta_{\ref{thm:dhj-dv}}(2,1,\delta) = \delta^2/2$.
\end{theorem}
 
 
 
\noteryan{The following \emph{could} be done under equal-slices, using Lemma~\ref{lem:distributions}.\ref{eqn:distrs-eqs-eqs}, if necessary.}
\begin{theorem}  \label{thm:dhj-d-prod} Let $d \in \N$, $0 < \delta < 1$, and let $\pi$ be a distribution on $[k]$.  Let $m_1 = n_{\ref{thm:dhj-d-prod}}(k,1,\delta/2,\pi)$\noteryan{Really, just a bound for DHJ(k) under $\pi^{\otimes n}$.}, $\eta = \delta/2(k+1)^{m_1}$, and $m_d = n_{\ref{thm:dhj-d-prod}}(k,d,\eta,\pi)$.  Then for all $n \geq n_{\ref{thm:dhj-d-prod}}(k,d+1,\delta,\pi) := m_1 + m_d$ and
and $A \subseteq [k]^n$,
\[
\[
\ens{k}^n(A) \geq \delta \quad\Rightarrow\quad \Pr_{\sigma \sim \ens{k+d}^n}[\sigma \subseteq A \text{ as a $d$-dimensional subspace}] \geq \eta_{\ref{thm:dhj-v}}(k,d,\delta) := XXX.
\pi^{\otimes n}(A) \geq \delta \quad\Rightarrow\quad A \text{ contains a nondegenerate $(d+1)$-dimensional subspace.}
\]
\]
\end{theorem}
\end{theorem}
\begin{proof}
\begin{proof}
Let $m = n_{\ref{thm:dhj}}(k,d,\delta/2)$.  Let $x \sim \ens{m}^n$ and $y \sim \ens{k}^m$.\noteryan{Surely $k \leq m \leq n$.}  By Lemma~\ref{lem:ens-compose}, $y \circ x \sim \ens{k}^n$.  Hence if $A \subseteq [k]^n$ satisfies $\ens{k}^n(A) \geq \delta$, we get that $\ens{k}^m(A_x) \geq \delta/2$ with probability at least $\delta/2$ over $x$, where $A_x$ denotes $\{z \in \ens{k}^m : z \circ x \in A\}$.  Call such $x$'s ``good''.  By our choice of $m$, for all good $x$ we have that $A_x$ contains a nondegenerate $d$-dimensional subspaceThus by Proposition~\ref{prop:min-eqs}, a randomly drawn subspace $w \sim \ens{k+d}^m$ is in $A_x$ with probability at least BLAHWe conclude that with probability at least $(\delta/2)BLAH$ over the choice of $x$ and $w$, the nondegenerate $d$-dimensional subspace $w \circ x$ is in $A$.  This completes the proof, as $w \circ x$ is distributed as $\ens{k+d}^n$ by Lemma~\ref{lem:ens-compose}.
Choose $J \subseteq [n]$ with $|J| = m_1$ arbitrarily, and let $\barJ = [n] \setminus J$, so $|\barJ| \geq m_d$ by assumptionSince $\pi^{\otimes n}(A) \geq \delta$, with probability at least $\delta/2$ over $y \sim \pi^{\otimes \barJ}$ we have $\pi^{\otimes J}(A_y) \geq \delta/2$.  Call such $y$'s ``good''.  Since $|J| \geq n_{\ref{thm:dhj-d-prod}}(k,1,\delta/2,\pi)$, we conclude that whenever $y$ is good, $A_y \subseteq [k]^J$ contains a nondegenerate lineSince there are at most $(k+1)^{m_1}$ such lines in $[k]^J$, we conclude there must be a particular line  $\lambda$ over $[k]^J$ such that $\lambda \subseteq A_y$ for at least an $\eta$ fraction of $y \sim \pi^{\otimes \barJ}$Write $L$ for the set of such $y$'s.  Since $|\barJ| \geq n_{\ref{thm:dhj-d-prod}}(k,d,\eta,\pi)$, there must exist a nondegenerate $d$-dimensional subspace $\sigma$ over $[k]^{\barJ}$ such that $\sigma \subseteq L$.  Now $\lambda \times \sigma$ is a nondegenerate $(d+1)$-dimensional subspace contained in $A$.
\end{proof}
\end{proof}


In the particular case of $k = 2$ and $\pi$ uniform, we can use a significantly better bound due to Gunderson, R{\"o}dl, and Sidorenko~\cite{GRS99}:\noteryan{And I think it \emph{will} be important to use it to get a good bound for DHJ(k).}
\begin{theorem}  Let $A \subseteq [2]^n$ have (uniform) density at least $\delta$, and assume $n \geq (10d)^{d2^d} \cdot (1/\delta)^{2^d}$.  Then $A$ contains a nondegenerate $d$-dimensional subspace.
\end{theorem}
\begin{proof}
Theorem 4.2 and equation~(8) of~\cite{GRS99} taken together state that the maximum density of a $d$-dimensional subspace-free subset of $[2]^n$ is at most
\[
c(d) \cdot n^{-1/2^d}, \quad \text{where } c(d) = 10^d 2^{-1/2^{d-1}}d^{d - 1/2^d}.
\]
Note that $c(d) < (10d)^d$.  Hence it suffices to check that our lower bound on $n$ implies $\delta \geq (10d)^d n^{-1/2^d}$.
\end{proof}








\subsection{\texorpdfstring{$ij$}{ij}-insensitive sets}


\begin{definition}  Let $i, j \in [k]$ be distinct, and $I \subseteq [n]$.  We say that $A \subseteq [k]^n$ is \emph{$ij$-insensitive on~$I$} if the following condition holds:  $x \in A$ iff $\chg{x}{i}{j} \in A$.
\begin{definition}  Let $i, j \in [k]$ be distinct, and $I \subseteq [n]$.  We say that $A \subseteq [k]^n$ is \emph{$ij$-insensitive on~$I$} if the following condition holds:  $x \in A$ iff $\chg{x}{i}{j} \in A$.
Line 26: Line 55:
\end{remark}
\end{remark}


\begin{remark}  If $A$ and $B$ are $ij$-insensitive on $I$, then so too are $\overline{A}$\noteryan{Okay to use this as $A$-complement?}, $A \cap B$, $A \cup B$.\noteryan{Redundant, I know}
\end{remark}


\noteryan{Just putting the following statement of subspace-DHJ(k) under product distributions here for the future}
\begin{theorem} \ref{thm:dhj-d-prod-insens}
\begin{theorem} \label{thm:subsp} Let $d \in \N$, $0 < \eta < 1$, and let $\pi$ be a distribution on $[k]$.  Then assuming
\textbf{[[\noteryan{} Here and in the rest of this subsection, we should now show that all of the statements in the previous subsection about $[k]^n$ can be easily pulled into the analogous statements for $[k+1]^n$ when the set is $ij$-insensitive.  Just condition on the location of the $(k+1)$'s.  (Minor annoyance: make sure this leaves you with enough coordinates still free.) It's nicest under equal-slices / uniform, because the resulting distribution is again equal-slices / uniform.  General product measures unfortunately change; however, their $\min$ can only go up, which should probably mean the bounds need not change.]]}
\[
n \geq n_{\ref{thm:subsp}}(k,d,\eta,\pi) := [[to be determined]],
\]
every set $A \subseteq [k]^n$ with $\pi^{\otimes n}(A) \geq \eta$ contains a nondegenerate $d$-dimensional subspace.
\end{theorem}
\end{theorem}

Revision as of 20:47, 24 May 2009

\section{Easy deductions, and \texorpdfstring{$ij$}{ij}-insensitive sets}


\begin{theorem} \label{thm:dhj-dv} Let $m = n_{\ref{thm:dhj-d-ens}}(k,d,\delta/2,\ens{k})$.\noteryan{I.e., $m$ large enough that every set with $\ens{k}^m$ at least $\delta/2$ has a nondegen $d$-dim comb.\ subspace.} Then for all $n \geq n_{\ref{thm:dhj-dv}}(k,d,\delta) := m$ and $A \subseteq [k]^n$, \[ \ens{k}^n(A) \geq \delta \quad\Rightarrow\quad \Pr_{\sigma \sim \ens{k+d}^n}[\sigma \subseteq A \text{ as a $d$-dimensional subspace}] \geq \eta_{\ref{thm:dhj-dv}}(k,d,\delta) := \frac{\delta}{2(k+d)^{2m}}. \] \end{theorem} \begin{proof} Let $x \sim \ens{m}^n$ and $y \sim \ens{k}^m$.\noteryan{Surely $m \geq k$.} By Lemma~\ref{lem:ens-compose}, $y \circ x \sim \ens{k}^n$. Hence if $A \subseteq [k]^n$ satisfies $\ens{k}^n(A) \geq \delta$, we get that $\ens{k}^m(A_x) \geq \delta/2$ with probability at least $\delta/2$ over $x$, where $A_x$ denotes $\{z \in [k]^m : z \circ x \in A\}$. Call such $x$'s ``good. By our choice of $m$, for all good~$x$ we have that $A_x$ contains a nondegenerate $d$-dimensional subspace. Thus by Proposition~\ref{prop:min-eqs}, a randomly drawn subspace $w \sim \ens{k+d}^m$ is in $A_x$ with probability at least $(k+d)^{-2m}$. We conclude that with probability at least $(\delta/2)(k+d)^{-2m}$ over the choice of $x$ and $w$, the nondegenerate $d$-dimensional subspace $w \circ x$ is in $A$. This completes the proof, as $w \circ x$ is distributed as $\ens{k+d}^n$ by Lemma~\ref{lem:ens-compose}. \end{proof}

In the particular case of $k = 2$, $d = 1$, this theorem can be proved directly.\noteryan{Something like the following should appear somewhere in Section~\ref{sec:sperner}.} \begin{theorem} \label{thm:sperner-v} For $n \geq 2$, \[ \ens{k}^n(A) \geq \delta \quad\Rightarrow\quad \Pr_{\sigma \sim \ens{3}^n}[\sigma \subseteq A \text{ as a line}] \geq \delta^2 - \frac{10}{n}. \] \noteryan{Not sure the $10$ is exactly what follows from Proposition~\ref{prop:degen}, but whatever.} In particular, we can take $n_{\ref{thm:dhj-dv}}(2,1,\delta) = 20/\delta^2$ and $\eta_{\ref{thm:dhj-dv}}(2,1,\delta) = \delta^2/2$. \end{theorem}


\noteryan{The following \emph{could} be done under equal-slices, using Lemma~\ref{lem:distributions}.\ref{eqn:distrs-eqs-eqs}, if necessary.} \begin{theorem} \label{thm:dhj-d-prod} Let $d \in \N$, $0 < \delta < 1$, and let $\pi$ be a distribution on $[k]$. Let $m_1 = n_{\ref{thm:dhj-d-prod}}(k,1,\delta/2,\pi)$\noteryan{Really, just a bound for DHJ(k) under $\pi^{\otimes n}$.}, $\eta = \delta/2(k+1)^{m_1}$, and $m_d = n_{\ref{thm:dhj-d-prod}}(k,d,\eta,\pi)$. Then for all $n \geq n_{\ref{thm:dhj-d-prod}}(k,d+1,\delta,\pi) := m_1 + m_d$ and and $A \subseteq [k]^n$, \[ \pi^{\otimes n}(A) \geq \delta \quad\Rightarrow\quad A \text{ contains a nondegenerate $(d+1)$-dimensional subspace.} \] \end{theorem} \begin{proof} Choose $J \subseteq [n]$ with $|J| = m_1$ arbitrarily, and let $\barJ = [n] \setminus J$, so $|\barJ| \geq m_d$ by assumption. Since $\pi^{\otimes n}(A) \geq \delta$, with probability at least $\delta/2$ over $y \sim \pi^{\otimes \barJ}$ we have $\pi^{\otimes J}(A_y) \geq \delta/2$. Call such $y$'s ``good. Since $|J| \geq n_{\ref{thm:dhj-d-prod}}(k,1,\delta/2,\pi)$, we conclude that whenever $y$ is good, $A_y \subseteq [k]^J$ contains a nondegenerate line. Since there are at most $(k+1)^{m_1}$ such lines in $[k]^J$, we conclude there must be a particular line $\lambda$ over $[k]^J$ such that $\lambda \subseteq A_y$ for at least an $\eta$ fraction of $y \sim \pi^{\otimes \barJ}$. Write $L$ for the set of such $y$'s. Since $|\barJ| \geq n_{\ref{thm:dhj-d-prod}}(k,d,\eta,\pi)$, there must exist a nondegenerate $d$-dimensional subspace $\sigma$ over $[k]^{\barJ}$ such that $\sigma \subseteq L$. Now $\lambda \times \sigma$ is a nondegenerate $(d+1)$-dimensional subspace contained in $A$. \end{proof}

In the particular case of $k = 2$ and $\pi$ uniform, we can use a significantly better bound due to Gunderson, R{\"o}dl, and Sidorenko~\cite{GRS99}:\noteryan{And I think it \emph{will} be important to use it to get a good bound for DHJ(k).} \begin{theorem} Let $A \subseteq [2]^n$ have (uniform) density at least $\delta$, and assume $n \geq (10d)^{d2^d} \cdot (1/\delta)^{2^d}$. Then $A$ contains a nondegenerate $d$-dimensional subspace. \end{theorem} \begin{proof} Theorem 4.2 and equation~(8) of~\cite{GRS99} taken together state that the maximum density of a $d$-dimensional subspace-free subset of $[2]^n$ is at most \[ c(d) \cdot n^{-1/2^d}, \quad \text{where } c(d) = 10^d 2^{-1/2^{d-1}}d^{d - 1/2^d}. \] Note that $c(d) < (10d)^d$. Hence it suffices to check that our lower bound on $n$ implies $\delta \geq (10d)^d n^{-1/2^d}$. \end{proof}



\subsection{\texorpdfstring{$ij$}{ij}-insensitive sets}

\begin{definition} Let $i, j \in [k]$ be distinct, and $I \subseteq [n]$. We say that $A \subseteq [k]^n$ is \emph{$ij$-insensitive on~$I$} if the following condition holds: $x \in A$ iff $\chg{x}{i}{j} \in A$. \end{definition} \begin{remark} This definition is symmetric in $i$ and $j$. It is perhaps easier to understand the condition as follows: ``altering some $i$'s to $j$'s and some $j$'s to $i$'s does not affect presence/absence in $A$. \end{remark}

\begin{remark} If $A$ and $B$ are $ij$-insensitive on $I$, then so too are $\overline{A}$\noteryan{Okay to use this as $A$-complement?}, $A \cap B$, $A \cup B$.\noteryan{Redundant, I know} \end{remark}

\begin{theorem} \ref{thm:dhj-d-prod-insens} \textbf{[[\noteryan{} Here and in the rest of this subsection, we should now show that all of the statements in the previous subsection about $[k]^n$ can be easily pulled into the analogous statements for $[k+1]^n$ when the set is $ij$-insensitive. Just condition on the location of the $(k+1)$'s. (Minor annoyance: make sure this leaves you with enough coordinates still free.) It's nicest under equal-slices / uniform, because the resulting distribution is again equal-slices / uniform. General product measures unfortunately change; however, their $\min$ can only go up, which should probably mean the bounds need not change.]]} \end{theorem}