\documentclass[a4paper,11pt]{article} \usepackage{amsmath} \usepackage{amstext} \usepackage{amsthm} \usepackage{mathtools} \usepackage{braket} %\usepackage{graphicx,subcaption} \usepackage[percent]{overpic} \usepackage{bbm} \usepackage{bm} \usepackage{threeparttable} \usepackage{tabularx} \usepackage{multirow} \usepackage{booktabs} \usepackage[nohyperlinks]{acronym} \usepackage{microtype} \usepackage{pifont} %\usepackage{pdfpages,diagbox} \usepackage[utf8]{inputenc} \usepackage[labelformat=empty,labelsep=none]{caption} \usepackage{enumerate} \usepackage{hyperref} %\definecolor{ulbblue}{cmyk}{1,0.75,0.2,0.18} %\hypersetup{colorlinks, %citecolor=ulbblue, %linkcolor=ulbblue, % urlcolor=ulbblue %} \makeatletter \renewcommand\@pnumwidth{1.8em} \makeatother \usepackage[T1]{fontenc} \usepackage{textcomp} \usepackage{fix-cm} \usepackage[charter,cal=cmcal,sfscaled=false]{mathdesign} \usepackage[paper=a4paper,innermargin=4cm,outermargin=3cm,top=3.5cm,bottom=3.5cm]{geometry} \setlength{\headheight}{13.6pt} \usepackage{setspace} \usepackage[PetersLenny]{fncychap} \usepackage{fancyhdr} \theoremstyle{definition} \newtheorem{exercise}{Exercise} \newtheorem*{definition}{Definition} \setlength {\textheight}{26 cm} \setlength {\textwidth}{16.5 cm} \setlength {\topmargin}{-2 cm} \setlength {\oddsidemargin}{-0.4 cm} \begin{document} \noindent \fbox{\parbox{\textwidth}{ INFOH-422\hfill 2015-2016\\ \centering{ \textsc{\Large Information, Coding, Computing and Complexity Theory}\\ Exercise Sheet 1} }} \section*{} \begin{exercise}~A discrete random variable $X$ has a uniform probability distribution $p(x)$ over the set ${\mathcal{X}}$ of cardinality $m=|{\mathcal{X}}|$. \begin{enumerate}[(a)] \item Give an example for ${\mathcal{X}}$ and calculate $p(x)$ for $x \in {\mathcal{X}}$. \item Determine the entropy $H(X)$ of $X$. If $m=64$, what is $H(X)$? \item How many bits are needed to enumerate the alphabet of ${\mathcal{X}}$ without encoding, when $m=64$? \item How many symbols taken from a quaternary alphabet are necessary to enumerate ${\mathcal{X}}$ without encoding, when $m=64$? \item Show that $H(X)$ is greater than the entropy of any other random variable $Y$, when $Y$ takes values from the same set as $\mathcal{X}$. \emph{(Read the reminder on Lagrange multipliers.)} \end{enumerate} \end{exercise} \begin{exercise}~A random variable $X$ has an alphabet ${\mathcal{X}}=\{A,B,C,D\}$. \begin{enumerate}[(a)] \item Calculate $H(X_p)$ with associated probability distribution $p(X)=\{\frac{1}{2},\frac{1}{4},\frac{1}{8},\frac{1}{8}\}$. \item Calculate $H(X_q)$ with associated probability distribution $q(X)=\{\frac{1}{4},\frac{1}{4},\frac{1}{4},\frac{1}{4}\}$. \item Consider the binary code defined by $\{C($X=A$)=0, C($X=B$)=10, C($X=C$)=110, C($X=D$)=111\}$.\\ Calculate the expected length (in number of bits) of the codewords of $X$, when $X$ obeys the distribution $p(X)$ and $q(X)$ respectively. \item Compare the four results. \end{enumerate} \end{exercise} \begin{exercise}~A fair coin is flipped repeatedly until one obtains ``head''. Let the random variable $X$ be given by the number of flips until one obtains ``head'' for the first time. \begin{enumerate}[(a)] \item Calculate $H(X)$. \item Find a sequence of ``yes/no'' questions to determine the value of $X$. Compare the entropy of $X$ with the expected length of the sequence of questions necessary to fully determine $X$. \end{enumerate} \noindent {\it Hint:} \begin{align*} \sum_{n=1}^\infty r^n&={\frac{r}{1-r}},\\ \sum_{n=1}^\infty n\cdot r^n&={\frac{r}{(1-r)^2}}, r<1. \end{align*} \end{exercise} \begin{exercise}~Let $X$ and $Y$ be two random variables. What is the inequality relating $H(X)$ and $H(Y)$ if, \begin{enumerate}[(a)] \item $Y=2^X$, with ${\mathcal{X}}=\{0,1,2,3,4\}$ with associated probability distribution $\{\frac{1}{2},\frac{1}{4},\frac{1}{8},\frac{1}{16},\frac{1}{16}\}$. \item $Y=\cos(X)$, with ${\mathcal{X}}=\{0,\pi/2,\pi,3 \pi/2,2 \pi \}$ with associated probability distribution $\{\frac{1}{3},\frac{1}{3},\frac{1}{12},\frac{1}{12},\frac{1}{6}\}$. \item Show that the entropy of any function $f(X)$ of the random variable $X$ is less than or equal to the entropy of $X$. In order to prove this, it is useful to calculate the joint entropy $H(X,f(X))$. In which case the is inequality $H(f(X)) \le H(X)$ saturated? \end{enumerate} \end{exercise} \begin{exercise}~Let $X$ and $Y$ be two random variables, which take the values $x_1,x_2,\cdots, x_r$ and $y_1,y_2,\cdots, y_s$. Furthermore, define a random variable $Z=X+Y$. \begin{enumerate}[(a)] \item Show that $H(Z|Y)=H(X|Y)$ and $H(Z|X)=H(Y|X)$. Deduce that if $X$ and $Y$ are independently distributed, then $H(X)\le H(Z)$ and $H(Y)\le H(Z)$. Thus, summing the two random variables can only increase the uncertainty. \item Give an example for $X$ and $Y$ (which should be correlated) such that $H(X)>H(Z)$ and $H(Y)>H(Z)$. \item In which case is the equality $H(Z)=H(X)+H(Y)$ satisfied? \end{enumerate} \end{exercise} \begin{exercise}\textbf{(Optional)}~Give an example of a distribution of two random variables $X$ and $Y$ whose correlation coefficient $r$ (as defined below) is zero, even though they are not independent. Show that in this case $H(X{\rm:}Y) \ne 0$, which shows that the mutual entropy is a better measure of the dependence of $X$ and $Y$. \begin{definition} Correlation coefficient \begin{align*} r=\frac{\langle x\cdot y\rangle -\langle x\rangle \cdot \langle y\rangle }{\sqrt{\langle x^{2}\rangle -\langle x\rangle ^{2}}\sqrt{\langle y^{2}\rangle -\langle y\rangle ^{2}}} \end{align*} \end{definition} \end{exercise} \section*{\centering Reminders} \begin{enumerate}[(a)] \item How to change the base in logarithm: \begin{align*} \log_{a}b=\log_{x}b/\log_{x}a. \end{align*} \item Lagrange theorem: \begin{itemize} \item To maximize\\ $u(x_{1},x_{2},...,x_{n})$ with $k$ constraints $g_{j}(x_{1},x_{2},...,x_{n})=a_{j}$, $j=1,\cdots,k$. \item Introduce the Lagrangian:\\ $ L(x_{1},...,x_{n},\lambda_{1},...,\lambda_{k})=u(x_{1},x_{2},...,x_{n})+\sum_{j=1}^{k}\lambda_{j}[a_{j}-g_{j}(x_{1},x_{2},...,x_{n})]. $ \item With the condition of an extremum being:\\ $\forall i, \frac{\partial L}{\partial x_{i}}=0.$ \item If $u$ is concave, the solution is a maximum. \end{itemize} \end{enumerate} \section*{} \centering \textbf{{The exercises and solutions are available at} \url{http://quic.ulb.ac.be/teaching}} \end{document} % \noindent {\bf 1-7.} On dispose de $n$ pi\`eces de monnaie dont l'une d'entre % elles peut \'eventuellement \^etre contrefaite. % Une pi\`ece contrefaite se reconna\^\i t par le fait qu'elle est soit % l\'eg\`erement plus lourde, soit l\'eg\`erement plus l\'eg\`ere qu'une % pi\`ece authentique. Afin de d\'eterminer s'il y a contrefa\c con, % les pi\`eces sont pes\'ees. % \par % \medskip % \noindent (a) Quelle est la distribution de probabilit\'e de la pi\`ece contrefaite qui maximise l'entropie se Shannon. % Traiter l'existance ou non de pi\`ece contrefaite comme un \'el\'ement de plus de la distribution de probabilit\'e. % \par % \medskip % \noindent (b) \'Etablir l'\'equation qui borne inf\'erieure et sup\'erieurement le nombre de pes\'ees en moyenne. % \par % \medskip % \noindent (c) Sachant que la distribution de probabilit\'e est 2-adique. % Obtenir une borne sup\'erieure du nombre de pi\`eces $n$ % tel que $k$ pes\'ees en moyenne suffisent pour trouver la pi\`ece contrefaite, s'il y en a. % \par %