\documentclass[a4paper,12pt]{article}
\newcommand{\ds}{\displaystyle}
\newcommand{\pl}{\partial}
\parindent=0pt
\begin{document}


{\bf Question}

Explain what is meant by a population with size varying according
to a generalized birth-death process having birth and death rates
given by the functions $\Gamma_1(n)$ and $\Gamma_2(n)$
respectively. A population of this kind  has size varying between
$N$ and $M$ and $$\Gamma_1(n)  = \lambda(N-n)n \,\ \ \Gamma_2(n)=
\mu(n-M)n ,$$

where $N > M > 0$ and $\lambda$ and $\mu$ are positive constants.
Show that the probability $p_n(t)$ that the population size is $n$
at time $t$  satisfies the following differential-difference
equation for $n = M + 1, M+2 ,\ldots, N-1.$
\begin{eqnarray*}p_n'(t) & = & \lambda(N-n+1)(n-1)p_{n-1}(t) +
\mu(n+1-M)(n+1)p_{n+1}(t) \\ & &- [\lambda(N-n)n +
\mu(n-M)n]p_n(t). \end{eqnarray*}

Obtain corresponding equations for $p_N'(t)$ and $p_M'(t)$.

If $X(t)$ denotes the population size at time $t$ show that
$$\frac{d}{dt}E\{X(t)\} = (M\mu + N\lambda)E\{X(t)\} - (\mu +
\lambda)E\{[X(t)]^2\}$$ where $E\{X(t)\}$ denotes the expected
value of $X(t).$


\vspace{.25in}

{\bf Answer}

Suppose we have a population of individuals reproducing or dying
independently of one another.  Suppose the size of the population
at time $t$ is $X(t)$.  Then a generalized birth - death process
with rates $\Gamma_1(n)$ and $\Gamma_2(n)$ is defined by the
probabilities:

\begin{eqnarray*} P(X(t + \delta t) = n+1 | X(t) = n) & = &
\Gamma_1(n) \delta t + o(\delta t) \\    P(X(t + \delta t) = n-1 |
X(t) = n) & = & \Gamma_2(n) \delta t + o(\delta t) \\ P(X(t +
\delta t) = n | X(t) = n) & = & 1 - (\Gamma_1(n)+ \Gamma_2(n))
\delta t + o(\delta t) \end{eqnarray*}

For $M < n < N$ we have \begin{eqnarray*} p_n(t + \delta t)  & = &
P(X(t + \delta t) = n| X(t) = n+1)P(X(t) = n+1) \\ & + & P(X(t +
\delta t) = n| X(t) = n-1)P(X(t) = n-1) \\ & + & P(X(t + \delta t)
= n| X(t) = n)P(X(t) = n) \\ & = & (\Gamma_2(n+1) \delta t +
o(\delta t)) p_{n+1}(t) \\ & + & (\Gamma_1(n-1) \delta t +
o(\delta t))p_{n-1}(t) \\ & + & (1 - (\Gamma_1(n) + \Gamma_2(n)
\delta t + o(\delta t))p_n(t) \end{eqnarray*}

Thus \begin{eqnarray*} \frac{p_n(t + \delta t) - p_n(t)}{\delta t}
& = & \Gamma_2(n+1) p_{n+1}(t) + \Gamma_1(n-1)p_{n-1}(t) \\ & - &
(\Gamma_1(n) + \Gamma_2(n))p_n(t) + \frac{o(\delta t)}{\delta t}
\end{eqnarray*}

Thus \begin{eqnarray} p_n'(t) & = & \Gamma_2(n+1)p_{n+1}(t) +
\Gamma_1(n-1)p_{n-1}(t) - (\Gamma_1(n) - \Gamma_2(n))p_n(t)
\nonumber
\\ & = & \mu (n+1 -M)(n+1)p_{n+1}(t) + \lambda (N - n
+1)(n-1)p_{n-1}(t) \nonumber \\ & - & [\lambda (N - n)n + \mu(n -
M)n]p_n(t) \end{eqnarray}

By reasoning similar to that above we also obtain:
\begin{eqnarray} p_N'(t) & = & \lambda (N-1)p_{N-1}(t) -
\mu(N-M)Np_N(t) \\ p_M'(t) & = & \mu (M+1)p_{M+1}(t) -
\lambda(N-M)Mp_M(t)
\end{eqnarray}

Now $\ds \frac{d}{dt} E(X(t)) = \sum_{n=M}^N n p_n'(t)$, so
summing (1), (2) and (3) gives:

$$\sum_{n=M}^{N-1} \mu(n+1-M)n(n+1)p_{n+1}(t) - \sum_{n=M}^{N}
n(\lambda(N-n)n + \mu(n-M)n)p_n(t) $$ $$+ \sum_{n=M+1}^{N} \lambda
(N-n+1)n(n-1)p_{n-1}(t)$$


Changing the index of summation in the first and last sums gives

$\ds \sum_{n=M+1}^N \mu(n-M)(n-1)np_n(t) \hspace{.2in}\leftarrow
{\rm summand} =0 {\rm\ for\ } n=M$

$\ds +\sum_{n=M}^{N-1} \lambda (N-n)n(n+1)p_n(t) \hspace{.2in}
\leftarrow {\rm ditto\ for\ } n=N$

$\ds - \sum_{n=M}^Nn(\lambda(N-n)n + \mu(n-M)n)p_n(t)$

$\ds\sum_{n=M}^Np_n(t) \times K,$ \, \, where

\begin{eqnarray*} K & = & \mu(n-M)(n^2-n)- \lambda n^2(N-n)-\mu n^2(n-M) \\ & & +
\lambda(N-n)(n^2+n) \\ & = & (\mu M + \lambda N)n - (\mu +
\lambda)n^2 \end{eqnarray*}

Thus $\ds\frac{d}{dt}E\{X(t)\} = (M\mu + N\lambda)E\{X(t)\} - (\mu
+ \lambda)E\{[X(t)]^2\}$





\end{document}
