% vim: tw=50 % 20/10/2022 10AM \subsubsection*{Examples} Simple random walk on $\ZZ_+$. \[ P(0, 1) = 1 \] \[ P(i, i + 1) = p = 1 - P(i, i - 1) \quad i \ge 1 \] Want to find $h_i = \PP_i(T_0 < \infty)$. \[ h_0 = 1 \quad h_i = p h_{i + 1} + q h_{i - 1} \] \begin{itemize} \item $p \neq q$. Then \[ h_i = a + b \left( \frac{q}{p} \right)^i = a + (1 - a) \left( \frac{q}{p} \right)^i \] $i = 0$, $a + b = 1$. Assume $q > p$: to get non-negative and minimal solution need to take $a = 1$. So $h_i = 1$ for all $i \ge 1$. If instead we have $q < p$, then $a = 0$ implies $h_i = \left( \frac{q}{p} \right)^i$ for $i \ge 1$. \item If $p = q = \half$. General solution $h_i = a + bi$, $h_0 = 1$ implies $a = 1$. For minimality need to take $b = 0$. So $h_i = 1$ for all $i \ge 1$. \end{itemize} \myskip Birth and death chains. \[ P(0, 0) = 1, \quad P(i, i + 1) = p_i, \quad P(i, i - 1) = q_i, \quad p_i + q_i = 1 \] \[ h_i = \PP_i(T_0 < \infty), \quad h_0 = 1 \] \[ h_i = p_i h_{i + 1} + q_i h_{i - 1} \] \[ \implies p_i (h_{i + 1} - h_i) = q_i (h_i - h_{i - 1}) \] Set $u_i = h_i - h_{i - 1}$. \[ u_{i + 1} = \frac{q_i}{p_i} u_i = \cdots = \prod_{k = 1}^i \frac{q_k}{p_k} u_1 \quad u_1 = h_1 - 1 \] \begin{align*} h_i &= \sum_{j = 1}^i (h_j - h_{j - 1}) + 1 \\ &= 1 + \sum_{j = 1}^i u_j \\ &= 1 + u_1 + \sum_{j = 2}^i u_1 \prod_{k = 1}^{j - 1} \frac{q_k}{p_j} \\ \implies h_i &= 1 + (h_1 - 1) + (h_1 - 1) \sum_{j = 2}^i \prod_{k = 1}^{j - 1} \frac{q_k}{p_k} \end{align*} Set $\gamma_j = \prod_{k = 0}^j \frac{q_k}{p_k}$, $\gamma_0 = 1$. Then \[ h_i = 1 - (1 - h_1) \sum_{j = 0}^{i - 1} \gamma_j \] We want $(h_i)$ to be the minimal non-negative solution, implies: \[ (1 - h_1) \le \frac{1}{\sum_{j = 0}^\infty \gamma_j} \] Minimality implies \[ h_1 = 1 - \frac{1}{\sum_{j = 0}^\infty \gamma_j} \] \begin{itemize} \item $\sum_{j = 0}^\infty \gamma_j < \infty$, then \[ h_i = \frac{\sum_{j = 1}^\infty \gamma_j}{\sum_{j = 0}^\infty \gamma_j} \] \item $\sum_{j = 0}^\infty \gamma_j = \infty$, then $h_i = 1$ for all $i \ge 1$. \end{itemize} \subsubsection*{Mean hitting times} $A \subseteq I$, $\tau_A = \inf\{n \ge 0 : X_n \in A\}$. $k_i^A = \EE_i[\tau_A]$. \begin{flashcard} \begin{theorem*} The vector $(k_i^A : i \in I)$ is \cloze{the minimal non-negative solution to the system \[ k_i^A = \begin{cases} 0 & \text{if $i \in A$} \\ 1 + \sum_{j \not\in A} P(i, j) k_j^A & i \not\in A \end{cases} \]} \end{theorem*} \end{flashcard} \begin{hiddenflashcard} What does $k_i^A$ represent? \[ k_i^A = \cloze{\EE_i[\tau_A]} \] \end{hiddenflashcard} \begin{hiddenflashcard} What does $h_i^A$ represent? \[ h_i^A = \cloze{\PP_i(\tau_A < \infty)} \] \end{hiddenflashcard} \begin{hiddenflashcard} What does $\tau_A$ represent? \[ \tau_A = \cloze{\inf\{n \ge 0 : X_n \in A\}} \] \end{hiddenflashcard} \begin{proof} If $i \in A$, then $k_i^A = 0$. Assume $i \not\in A$. Then \begin{align*} k_i^A &= \EE_i[\tau_A] \\ &= \sum_{n = 0}^\infty \PP_i(\tau_A > n) \\ &= \sum_{n = 0}^\infty \PP_i(X_0 \not\in A, \dots, X_n \not\in A) \\ &= 1 + \sum_{n = 1}^\infty \PP_i(X_1 \not\in A, \dots, X_n \not\in A) \\ &= 1 + \sum_{n = 1}^\infty \sum_j \PP_i(X_1 = j, X_2 \not\in A, \dots, X_n \not\in A) \\ &= 1 + \sum_{n = 1}^\infty \sum_j P(i, j) \PP(X_1 \not\in A,\dots, X_n \not\in A \mid \cancel{X_0 = i}, X_1 = j) \\ &= 1 + \sum_{n = 1}^\infty \sum_j P(i, j) \PP_j(X_0 \not\in A, \dots, X_{n - 1} \not\in A) \\ &= 1 + \sum_j P(i, j) \sum_{n = 0}^\infty \PP_j(X_0 \not\in A, \dots, X_n \not\in A) \\ &= 1 + \sum_j P(i, j) \EE_j[\tau_A] \\ &= 1 + \sum_j P(i, j) k_j^A \\ &= 1 + \sum_{j \not\in A} P(i, j) k_j^A \end{align*} Minimality: Let $(x_i)$ be another non-negative solution. Then $x_i = 0$, $i \in A$. If $i \not\in A$, then \begin{align*} x_i &= 1 + \sum_{j \not\in A} P(i, j) x_j \\ &= 1 + \sum_{j \not\in A} P(i, j) + \sum_{j \not\in A} \sum_{k \not\in A} P(i, j) P(j, k) x_k \\ x_i &= 1 + \sum_{j_1 \not\in A} P(i, j_1) + \cdots + \sum_{j_1, \dots, j_{n + 1} \not\in } P(i, j) \cdots P(j_{n - 2}, j_{n - 1}) + \sum \text{non-negative terms} \\ x_i &\ge 1 + \PP_i(\tau_A > 1) + \PP_i(\tau_A > 2) + \cdots + \PP_i(\tau_n) \end{align*} So $x_i \ge \sum_{k = 0}^n \PP_i(\tau_A > k)$ for all $n$. So \[ x_i \ge \sum_{k = 0}^\infty \PP_i(\tau_k) = \EE_i[\tau_A] = k_i^A \qedhere \] \end{proof} \subsubsection*{Simple Markov Property} Recall that the Simple Markov property states that if $m \in \NN$, $i \in I$, $X \sim \Markov(\lambda, P)$ then conditional on $X_m = i$, $(X_{n + m})_{n \ge 0}$ is $\Markov(\delta_i, P)$ and is independent of $X_0, \dots, X_m$. \\ We would like to generalise this to a value of $m$ that is randomly picked. \begin{flashcard}[stopping-time] \begin{definition*} A random variable $T : \Omega \to \{0, 1, \dots\} \cup \{\infty\}$ is called a \emph{stopping time} if \cloze{the event $\{T = n\}$ depends on $X_0, \dots, X_n$ for all $n \in \NN$.} \end{definition*} \end{flashcard} \begin{example*} $A \subseteq I$, $\tau_A = \inf\{n \ge 0 : X_n \in A\}$. Then $\{\tau_A = n\} = \{X_0 \not\in A, \dots, X_{n - 1} \not\in A, X_n \in A\}$ so first hitting times are always stopping times. What about last hitting time: \[ L_A = \sup\{n \le 10 : X_n \in A\} \] Then $L_A$ is \emph{not} a stopping time, because for example $\{L_A = 5\}$ does not depend on $X_0, \dots, X_5$ only. \end{example*}