%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% TPs Annee 2000-2001
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\documentclass[12pt]{article}
\usepackage[french]{babel}
\voffset -2.8 cm
\textwidth 17 cm
\textheight 24.7 cm
\oddsidemargin -.54 cm
\evensidemargin -1.54 cm
\usepackage{graphicx}
\usepackage{amsmath}
\usepackage{amsfonts}
\usepackage{amssymb}
\usepackage[latin1]{inputenc}
\pagestyle{empty}
\begin{document}
\centerline {\Large \sc Information and coding theory}
\bigskip
\bigskip
\hrule
\bigskip
\bigskip
\centerline {\underline{\Large Exercise sheet n$^\circ$ 3}:}
\bigskip
\bigskip
\noindent {\bf 3-1.} {\em Data compression}.
Determine whether the following codes are nonsingular, uniquely decodable, instantaneous:
%Are the following codes are they nonsingular, uniquely decodable, instantaneous?
\par
\medskip
\noindent (a) The code $\{0,0\}$.
\par
\medskip
\noindent (b) The code $\{0,010,01,10\}$.
\par
\medskip
\noindent (c) The code $\{10,00,11,110\}$.
\par
\medskip
\noindent (d) The code $\{0,10,110,111\}$.
\bigskip
\noindent {\bf 3-2.}
We consider a source $\{ x_i, p_i \}$ with $i=1,\cdots,m$. The symbols $x_i$
(emitted with probabilities~$p_i$) are encoded in sequences using an alphabet of
cardinality $D$, such that the decoding is instantaneous. For $m=6$ and lengths
of the codewords $\{l_i\}=\{1,1,1,2,2,3\}$, find a lower bound for $D$. Is this
code optimal?
\bigskip
\noindent {\bf 3-3.} {\em Huffman code}. A source emits a random variable $X$ which can take four values with probabilities $({1\over 10},{2\over 10},{3\over 10},{4\over 10})$.
\par
\medskip
\noindent (a) Construct a binary Huffman code for $X$.
\par
\medskip
\noindent (b) Construct a ternary Huffman code for $X$.
\par
\medskip
\noindent (c) Construct a binary Shannon code for $X$ and compare its expected length with the code of~(a).
\par
\bigskip
\noindent {\bf 3-4.} {\em Huffman code}. A source emits a random variable $X$ which can take four values with probabilities $({1\over 3},{1\over 3},{1\over 4},{1\over 12})$.
\par
\medskip
\noindent (a) Construct a binary Huffman code for $X$.
\par
\medskip
\noindent (b) Construct a binary Shannon code for $X$ and compare it with the code of (a).
\par
\bigskip
\noindent {\bf 3-5.} We have a non-balanced coin with probability $p$ to obtain ``head'' (``1'')
and probability $1-p$ to obtain ``tail'' (``0''). Alice flips this coin as many times as needed to obtain ``head'' for the first time, and would like to communicate to Bob the number of flips $k$ that were needed. A naive method is to send to Bob the sequence of the outcomes of the coin flips encoded in a chain of bits of length $k$, like $000\cdots01$ (where 0 stands for ``tail'' and 1 for ``head'').
\par
\medskip
\noindent (a) What is the expected length of this naive code? Compare it with
the entropy of the random variable $k$. In which case the naive code is optimal?
Use
\begin{eqnarray*}
\sum_{i=0}^\infty a^i={1\over 1-a} \qquad \qquad
\sum_{i=0}^\infty i\, a^i = {a\over (1-a)^2 }
\end{eqnarray*}
\par
\medskip
\noindent (b) Alice decides now to encode her random variable $k$ with a Shannon
code, with the aim to approach $H(k)$. Compare the
expected lengths of the naiv code and the Shannon code in the limit $p\to 0$.
\bigskip
\centerline {\underline{\Large Web site}:}
\bigskip
http://quic.ulb.ac.be/teaching/
\end{document}