% vim: tw=50 % 25/10/2022 10AM \begin{flashcard} \begin{theorem*}[Strong Markov Property] Let $X$ be $\Markov(\lambda, P)$ and $T$ be a stopping time. Conditional \cloze{on $T < \infty$ and $X_T = i$, $(X_{T + n})_{n \ge 0}$ is $\Markov(\delta_{i}, P)$ and it is independent of $X_0, \dots, X_T$.} \end{theorem*} \end{flashcard} \begin{proof} Let $x_0, \dots, x_n \in I$, $\omega \in \bigcup_k I^k$. Need to show \begin{align*} \PP(X_T = x_0, \dots, X_{T + n}, (x_0, \dots, X_T) = \omega \mid T < \infty, X_T = i) \\ = \delta_{ix_0} P(x_0, x_1) \cdots P(x_{n - 1}, x_n) \PP((x_0, \dots, X_T) = \omega \mid T < \infty, X_T = i) \end{align*} Let $\omega$ have length $k$. Then \begin{align*} &\phantom{{}={}} \frac{\PP(X_k = x_0, \dots, X_{k + n} = x_n, (x_0, \dots, X_k) = \omega, T = k \mid T < \infty, X_T = i)}{\PP(T < \infty, X_T = i)} \\ &= \PP(X_k = x_0, \dots, X_{k + n} = x_n \mid (X_0, \dots, X_k) = \omega, T = k, X_k = i) \\ &~~~\times \frac{\PP((X_0, \dots, X_k) = \omega, T = k, X_k = i)}{\PP(T < \infty, X_T = i)} \tag{$*$} \end{align*} The event $\{T = k\}$ only depends on $X_0, \dots, X_k$ ($T$ stopping time). So \begin{align*} &\phantom{{}={}} \PP(X_k = x_0, \dots, X_{k + n} \mid (X_0, \dots, X_k) = \omega, T = k, X_k = i) \\ &= \PP(X_k = x_0, \dots, X_{k + n} = x_n \mid X_k = i) \end{align*} by the Markov property. This is also equal to \[ \delta_{ix_0} p(x_0, x_1) \cdots p(x_{n - 1}, x_n) \] So the expression in ($*$) is equal to \[ \PP((X_0, \dots, X_k) = \omega, T = k \mid T < \infty, X_T = i) = \PP((X_0, \dots, X_T) = \omega \mid T < \infty, X_T = i) \] ($\omega$ has length $k$). \end{proof} \subsubsection*{Example} Consider a Markov chain with $P(0, 1) = 1$, $P(i, i + 1) = P(i, i - 1) = \half$. (like a random walk but restricted to $X \ge 0$). Let \[ T_0 = \inf\{n \ge 0 : X_n = 0\} \] Let $h_i = \PP_1(T_0 < \infty)$, $h_0 = 1$. What is $h_1$ equal to? \[ h_1 = \half + \half h_2 \] \begin{align*} h_2 &= \PP_2(T_0 < \infty) \\ &= \PP_2(T_1 < \infty, T_0 < \infty) \\ &= \PP_2(T_0 < \infty \mid T_1 < \infty) \cdot \PP_2(T_1 < \infty) \\ &= \PP_2(T_0 < \infty \mid T_1 < \infty) h_1 \end{align*} Conditional on $T_1 < \infty$ ($X_{T_1} = 1$), by the Markov property $(X_{T_1 + n}){n \ge 0}$ is $\Markov(\delta_1, P)$. So (under the conditioning) we can express $T_0 = T_1 + \tilde{T}_0$, where $\tilde{T}_0$ is \emph{independent} of $T_1$ and has the same law as $T_0$ under $\PP_1$. \begin{align*} \PP_2(T_0 < \infty \mid T_1 < \infty) &= \PP_2(\tilde{T}_0 + \tilde{T}_1 < \infty \mid T_1 < \infty) \\ &= \PP_1(T_0 < \infty) \\ &= h_1 \end{align*} so $h_2 = h_1^2$. So \[ h_1 = \half + \half h_1^2 \implies h_1 = 1 \] \subsubsection*{Transience and recurrence} \begin{definition*} A state $i$ is called \emph{recurrent} if \[ \PP_i(X_n = i \text{ for infinitely many $n$}) = 1 \] A state $i$ is called \emph{transient} if \[ \PP_i(X_n = i \text{ for infinitely many $n$}) = 0 \] \end{definition*} \begin{hiddenflashcard} \begin{definition*}[Recurrent state] A state $i$ is called recurrent if \[ \cloze{\PP_i(X_n = i \text{ for infinitely many $n$}) = 1} \] \end{definition*} \end{hiddenflashcard} \begin{hiddenflashcard} \begin{definition*}[Transient state] A state $i$ is called \emph{transient} if \[ \cloze{\PP_i(X_m = i \text{ for infinitely many $n$}) = 0} \] \end{definition*} \end{hiddenflashcard} Let \[ V_i = \sum_{l = 0}^\infty \mathbbm{1}(X_l = i) = \text{total number of visits to $i$} \] We will calculate $\PP_i(V_i > r)$ for some values of $r$. Let $T_i^(k)$ denote the $k$-th return time. \[ \PP_(V_i > 0) = 1 \] \[ \PP_i(V_i > 1) = \PP_i(T_i^{(1)} < \infty \] \[ \PP_i(V_i > 2) = \PP_i(T_i^(1) < \infty)^2 \] More formally: define $T_i^{(0)} = 0$ and for $k \ge 1$: \[ T_i^{(k)} = \inf\{n > T_i^{(k - 1)} : X_n = i\} \] ($k$-th return time to $i$). Then \begin{align*} T_i^{(1)} = \inf\{n > 0 : X_n = i\} \end{align*} Let $f_i = \PP_i(T_i^{(1)} < \infty)$. \begin{lemma*} For all $r \in \NN$, $\PP_i(V_i > r) = f_i^r$. So $V_i$ has a geometric distribution. \end{lemma*} \begin{proof} True for $r = 0$. Suppose it is true for $r \le k$. We will prove it for $k + 1$. \begin{align*} \PP_i(V_i > k + 1) &= \PP_i(T_i^{(k + 1)} < \infty) \\ &= \PP_i(T_i^{(k + 1)} < \infty, T_i^{(k)} < \infty) \\ &= \PP_i(T_i^{(k + 1)} < \infty \mid T_i^{(k)} < \infty) \PP_i(T_i^{(k)} < \infty) \end{align*} The successive return times to $i$ are stopping times, so conditional on $T_i^{(k)} < \infty$ (and hence $X_{T_i^{(k)}} = i$) $(X_{T_i^{(k)} + n})_{n \ge 0}$ is $\Markov(\delta_i, P)$ is independent of $X_0, \dots, X_{T_i^{(k)}}$. So \[ \PP_i(T_i^{(k + 1)} < \infty \mid T_i^{(k)} < \infty) = \PP_i(T_i^{(1)} < \infty) = f_i \] \end{proof} \begin{theorem*} \begin{enumerate}[(a)] \item If $f_i = 1$, then $i$ is recurrent and \[ \sum_{n \ge 0} p_{ii}(n) = \infty \] \item If $f_i < 1$, then $i$ is transient and \[ \sum_{n \ge 0} p_{ii}(n) < \infty \] \end{enumerate} \end{theorem*} \begin{proof} \[ \EE_i [V_i] = \EE_i \left[ \sum_{l = 0}^\infty \mathbbm{1} (X_l = i) \right] = \sum_{l = 0}^\infty p_{ii}(l) \] \begin{enumerate}[(a)] \item If $f_i = 1$ then by the lemma, $\PP_i(V_i = \infty) = 1$, so $i$ is recurrent, so $\EE_i [V_i] = \infty$ so $\sum_n p_{ii}(n) = \infty$. \item If $f_i < 1$ then by the lemma, $\EE_i [V_i] = \frac{1}{1 - f_i} < \infty$, so $\sum_n p_{ii}(n) < \infty$ so $\PP_i(V_i < \infty) = 1$, so $i$ is transient. \end{enumerate} \end{proof} \begin{hiddenflashcard} \begin{theorem*} Every state is either \emph{recurrent} or \emph{transient}. \end{theorem*} \begin{proof} \cloze{ \begin{itemize} \item Let $i$ be a state, and define \[ f_i = \text{probability that we reach $i$ again if we start at $i$} \] \item If $f_i = 1$, then \[ \text{probability that we hit $i$ at least $n$ times} = 1 \] so recurrent. \item If $f_i = 1$, then \[ \text{probability that we hit $i$ at least $n$ times} = f_i^n \] summing over all $n$, this is bounded so transient. \end{itemize} } \end{proof} \end{hiddenflashcard} \begin{theorem*} Let $x$ and $y$ communicate. Then they are either both recurrent or both transient. \end{theorem*} \begin{proof} If $x$ is recurrent, we will show $y$ is also recurrent. $x \comm y$ implies that there exists $m, r \ge 0$ such that $p_{xy}(m) > 0$, $p_{yx}(r) > 0$. Then \[ p_{yy}(n + m + r) \ge p_{yx}(r) p_{xx}(n) p_{xy}(m) \] so \[ \sum_{n \ge 0} p_{yy}(n + m + r) \ge p_{yx}(r) p_{xy}(m) \sum_{n \ge 0} p_{xx}(n) = \infty \] so $y$ is also recurrent. \end{proof}