\documentclass[a4paper,12pt]{article}

\begin{document}

\parindent=0pt

\begin{center}

MA181 INTRODUCTION TO STATISTICAL MODELLING

ELEMENTS OF PROBABILITY

\end{center}

\begin{description}

\item[1.]
An elementary result of an experiment is called an
\textit{outcome}.

\item[2.]
The totality of the outcomes of an experiment constitutes the
\textit{sample space S}.

\item[3.]
A set of outcomes is called an \textit{event}. Thus an event is a
subset of $S$. Events are often denoted by $A,B,C,\ldots$, or by
$A_1,A_2,A_3,\ldots$.

\item[Example - Birth months:]
Suppose an experiment consists of recording the birth months of my
niece's first-born child. Then the sample space $S$ consists of
the twelve outcomes \lq\lq born in January, \lq\lq born in
February'',\ldots,\lq\lq born in December''. The event $A$,
defined by the set \{\lq\lq born in March'', \lq\lq born in
April'', \lq\lq born in May''\}, may be defined as the event
\lq\lq born in Spring''.

\item[4.]
A function $P(.)$ defined on the sample space $S$ is called a
\textit{probability function} if

\begin{description}

\item[(i)]
$P(S)=1$,

\item[(ii)]
$0\leq P(A)\leq1$ for any event $A$,

\item[(iii)]
$P(A$ or $B)=P(A)+P(B)$ if $A$ and $B$ are mutually exclusive
(i.e. they have no outcomes in common).

\end{description}

Note that the event \lq\lq$A$ or $B$'' is the set of outcomes
contained in $A$ or $B$ or both, and may also be written as $A\cup
B$. Similarly $A\cap B$ or \lq\lq$A$ and $B$''. is the set of
outcomes common to both $A$ and $B$. So, if $A$ and $B$ are
mutually exclusive, then $A\cap B=\emptyset$.

If event $A$ consists of the outcomes $w_1,w_2,\ldots,w_k$, then,
by (iii),

$$P(A)=P(w_1)+P(w_2)+\ldots+P(w_k).$$

If, moreover, the outcomes of a finite sample space are all
equally probable, then the probability of an event $A$ is given by

$$P(A)=\frac{\textrm{Number of outcomes favourable to
}A}{\textrm{Total number of outcomes}}.$$

An example is provided by a perfectly symmetrical die with faces
numbered 1,2,3,4,5,6. All faces (outcomes) are equally probable
with each having probability $\frac{1}{6}$ of being observed. Such
a die is known as a fair die. If, further, $A$ is the event \lq\lq
odd number''. then $P(A)=\frac{3}{6}=\frac{1}{2}$.

\item[Example - Birth months revisited:]
Suppose the months have the following probabilities of being the
birth month:

\begin{tabular}{lll}
$P$(Jan)$=\frac{1}{12}-0.04,$&$P$(Feb)$=\frac{1}{12}-0.02,$
&$P$(Mar)$=\frac{1}{12}$\\ $P$(Apr)$=\frac{1}{12}+0.01,$&
$P$(May)$=\frac{1}{12}+0.02,$&$P$(Jun)$=\frac{1}{12}+0.02,$\\
$P$(Jul)=$\frac{1}{12}+0.02,$&$P$(Aug)$=\frac{1}{12}+0.03,$&
$P$(Sep)$=\frac{1}{12}+0.01,$\\ $P$(Oct)$=\frac{1}{12},$
&$P$(Nov)$=\frac{1}{12}-0.02,$&$P$(Dec)$=\frac{1}{12}-0.03.$
\end{tabular}

Then $P$(born in Spring)$=\frac{3}{12}+0.01+0.02=0.28$.

\item[Theorem 1]
If $A$ is an event in $S$, then $P(\overline{A})=1-P(A)$.

\item[Proof]
Now $A\cup\overline{A}=S$ while $A\cap\overline{A}=\phi$. Hence,
by (iii) above,

$$1=P(S)=P(A\cup\overline{A})=P(A)+P(\overline{A}),$$

so that $P(\overline{A})=1-P(A)$.

\item[Theorem 2]
If $A$ and $B$ are two events in $S$, then $P(A)=P(A\cap
B)+P(A\cap\overline{B})$.

\item[Proof]
In the diagram, $A\cap B$ is hatched horizontally, while
$A\cap\overline{B}$ is hatched vertically. Now

$$A=(A\cap B)\cup(A\cap \overline{B})\textrm{ and }(A\cap
B)\cap(A\cap\overline{B})=\emptyset.$$

Hence, by (iii) above,

$$P(A)=P(A\cap B)+P(A\cap\overline{B}).$$

\item[5.]

\begin{description}

\item[Theorem 3 (Addition rule)]

For any two events $A$ and $B$ in $S$,

$$P(A\textrm{ or }B)=P(A)+P(B)=P(A\textrm{ and }B).$$

\item[Proof]

In the diagram, event $A$ is hatched diagonally while
$\overline{A}\cap B$ is hatched horizontally. Now

$$A\cup B=A\cup(\overline{A}\cap B)\textrm{ and
}A\cap(\overline{A}\cap B)=\emptyset.$$

Hence

\begin{eqnarray*}
P(A\cup B)&=&P(A)+P(\overline{A}\cap B),\textrm{ by (iii),}\\
&=&P(A)_+P(B)=P(A\cap B),\textrm{ by Theorem 2.} \end{eqnarray*}

\end{description}

\item[Example]
Let $A$ be the event \lq\lq even number'' and $B$ the event \lq\lq
number$>3$'' when a fair die is thrown. Then
$P(A)=P(B)=\frac{3}{6}$ and $P(A\textrm{ and }B)=\frac{2}{6}$.
Hence $P(A\textrm{ or
}B)=\frac{3}{6}+\frac{3}{6}-\frac{2}{6}=\frac{4}{6}$, i.e.
$P(\{2,4,5,6\})=\frac{2}{3}$.

\item[6. Multiplication rule]
Let $A$ and $B$ be two events in $S$. Then the \textit{conditional
probability} of $A$ given $B$ is defined as $P(A|B)=P(A\textrm{
and }B)/P(B)$ so long as $P(B)>0$.

Similarly $P(B|A)+P(A\textrm{ and }B)/P(A)$, so long as $P(A)>0/$

Note that $P(A|B)$ represents the probability of $A$ in the
reduced sample space defined by $B$.

When these definitions are written in the form

$$P(A\textrm{ and }B)=P(A)P(B|A)=P(B)P(A|B),$$

this is know as the \textit{multiplication rule}.

\item[Example:]
A bag contains three black balls and three white balls. I draw two
balls from the bag without replacement. The probability that I
draw two white balls is

$$P(\textrm{first ball is white})P(\textrm{second ball is
white|first is white})=\frac{3}{6}\times\frac{2}{5}=\frac{1}{5}.$$

\item[Example:]
I shake a fair die. Then

\begin{eqnarray*}P(\textrm{number$>$3|even number})&=&\frac{P(\textrm{number$>$3
and even number})}{P(\textrm{even number})}\\
&=&\frac{P(\{4,6\})}{P(\{2,4,6\})}=\frac{2}{3}.
\end{eqnarray*}

\item[Example - Birth months revisited:]
Now

\begin{eqnarray*}
P(\textrm{Born in may|born in Spring})&=&\frac{P(\textrm{born in
May and born in Spring})}{P(\textrm{born in Spring})}\\
&=&\frac{\frac{1}{12}+0.02}{\frac{1}{4}+0.03}\\
&=&\frac{31}{84}=0.369 \end{eqnarray*}

\item[Example:]
Suppose a person is chosen at random from the population, that $F$
is the event that he or she speaks French and $G$ is the event
that he or she speaks German. Suppose, moreover, that
$P(F)=\frac{1}{100},\ P(G)=\frac{1}{500}$ and
$P(F|G)=\frac{2}{3}$. Then

$$P(G|F)=\frac{P(F \textrm{ and
}G)}{P(F)}=\frac{P(F|G)P(G)}{P(F)}=\frac{\frac{2}{3}\cdot\frac{1}{5
00}}{\frac{1}{100}}=\frac{2}{15}.$$

\item[Theorem 4]
For any two events $A$ and $B$ in $SW$,

$$P(A)=P(A|B)P(B)+P(A|\overline{B})P(\overline{B}).$$

[This is a simple case of the theorem of total probability.]

\item[Proof]
From Theorem 2,

$$P(A)=P(A\cap B)+P(A\cap \overline{B}).$$

However,

$$P(A\cap B)=P(B)P(A|B)\textrm{ and
}P(A\cap\overline{B}=P(\overline{B})P(A|\overline{B}).$$

Consequently, after substitution, we have the result.

\item[7.]
Events $A$ and $B$ are said to be \textit{independent} if

$$P(A\textrm{ and }B)=P(A)P(B).$$

Thus $P(A|B)=P(A)$, so long as $P(B)>0$, and $P(B|A)=P(B)$, as
long as $P(A)>0$.

\item[Example:]
On a fair die, let $A$ be the event \lq\lq number$\leq$2'' and $B$
the event \lq\lq even number''. Then

$$P(A)=\frac{1}{3},\ P(B)=\frac{1}{2},\ P(A\textrm{ and
}B)=\frac{1}{6}.$$

Hence $A$ and $B$ are independent.

\item[Example:]
Two dice are shaken together. If $A$ is an event on one of them
and $B$ an event on the other, then $A$ and $B$ are assumed to be
independent. Consequently the probability of two sixes is
$\frac{1}{6}\times\frac{1}{6}=\frac{1}{36}$.

\item[Example:]
I shake two fair dice. What is the probability that the total
shown is six?

$P$(total is 6)=$P$(first die shows 1 and second 5)+$P$(first die
shows 2 and second 4)+\ldots+$P$(first die shows 5 and second 1)

=$P$(first die shows1).P(second shows 5)+\ldots+P(first die shows
5).$P$(second shows 1)

$=\left(\frac{1}{6}\cdots\frac{1}{6}\right)+\left(\frac{1}{6}\cdot
\frac{1}{6}\right)+\ldots+\left(\frac{1}{6}\cdot\frac{1}{6}\right)=\frac{5}{36}.$

\item[Example:]
A machine produces metal rods for cars, of which $\frac{1}{100}$
are too long while $\frac{1}{60}$ are dent, these two faults
occurring independently. What is the probability of a faulty rod?
A rod is faulty if it is too long or bent (or both). Hence

$$P(\textrm{faulty
rod})=\frac{1}{100}+\frac{1}{60}-\left(\frac{1}{100}\times\frac{1}{60}\right)=\frac{53}{2000}=0.0265.$$

\item[8.]
The above results can be extended to more than two events. So, for
three events $A,B$ and $C$ in $S$,

\begin{description}

\item[(i)]
$P(A\textrm{ or $B$ or}C)=P(A)+P(B)+BP(C)-P(A\textrm{ and
}B)-P(A\textrm{ and }C)-P(B\textrm{ and }C)+P(A\textrm{ and $B$
and }C)$,

\item[(ii)]
$P(A\textrm{ and $B$ and }C)=P(A)P(B|A)P(C|A\textrm{ and }B)$ etc.

\item[(iii)]
$A<B$ and $C$ are independent if

$$P(A\textrm{ and }B)=P(A)P(B),\ P(A\textrm{ and }C)=P(A)P(C),$$
$$P(B\textrm{ and }C)=P(B)P(C)\textrm{ and }P(A\textrm{ and
}B\textrm{ and }C)=P(A)P(B)P(C).$$

\end{description}

\item[9.]

\begin{description}

\item[ Bayes' formula:]
Let $A$ be an event in $S$, and let $B_1,B_2,\ldots,B_k$ be a set
of mutually exclusive and exhaustive events in $S$. Then

$$P(B_i|A)=\frac{P(A|B_i)P(B_i)}{\sum_{j=1}^kP(A|B_j)P(B_j)}.$$

This is known as \textit{Bayes' formula} after the Reverend Thomas
Bayes (1702-1761).

\item[Derivation]
From the multiplication rule, we have

$$P(B_i|A)=P(A|B_i)P(B_i)/P(A).$$

Since the $B_I$'s are mutually exclusive and exhaustive,

$$\sum_{j=1}^kP(B_j|A)=1$$

so that

$$P(A)=\sum_{j=1}^kP(A|B_j)P(B_j)$$

and the result follows.

\end{description}

\item[Example:]
One bag contains two white balls, a second contains one white and
one black and a third contains two black balls. A bag is chosen at
random and a ball drawn from it, which is found to be white. What
is the probability that the remaining ball in the bag is also
white?

Let the choices of the bags be denoted by $B_1,B_2$ and $B_3$, and
let $A$ be the event \lq\lq a white ball is drawn''. Then

$$P(B_1)=P(B_2)=P(B_3)=\frac{1}{3}$$

and

$$P(A|B_1)=1,\ P(A|B_2)=\frac{1}{2},\ P(A|B_3)=0.$$

Consequently, from Bayes' formula we have

$$P(B_1|A)=\frac{1\times\frac{1}{3}}{\left(1\times\frac{1}{3}\right)+
\left(\frac{1}{2}\times\frac{1}{3}\right)+\left(0\times\frac{1}{3}\right)},\
P(B_2|A)=\frac{1}{3}\textrm{ and }P(B_3|A)=0 $$

\item[Example]
Mum, John and Sue share the weekly washing-up duties. Mum washes
up 14 times a week, Sue 5 times and John twice, the specific
occasions being decided by drawing lots. When washing up, Mum
breaks a crock with probability $\frac{1}{200}$, Sue with
probability $\frac{1}{150}$ and John with probability
$\frac{1}{40}$. Dad is reading the newspaper when he hears a crock
break. What is the probability that John is the culprit?

Let $A,B_1,B_2,B_3$ be the events

\begin{tabular}{cl}
$A$:&\lq\lq crock breaks'',\\ $B_1$:&\lq\lq Mum washing up'',\\
$B_2$:&\lq\lq John washing up'',\\ $B_3$:&\lq\lq Sue washing up''.
\end{tabular}

Now $P(B_1)=\frac{14}{21},\ P(B_2)=\frac{2}{21},\
P(B_3)=\frac{5}{21}$ while $P(A|B_1)=\frac{1}{200},\
P(A|B_2)=\frac{1}{40}$ and $P(A|B_3)=\frac{1}{150}$. From Bayes'
formula, we therefore find

$$P(B_2|A)=\frac{\frac{1}{40}\times\frac{2}{21}}{
\left(\frac{1}{200}\times\frac{14}{21}\right)+
\left(\frac{1}{40}\times\frac{2}{21}\right)
\left(\frac{1}{150}\times\frac{5}{21}\right)}=\frac{15}{46}.$$

\end{description}

\end{document}
