summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorPhilipp Le <philipp@philipple.de>2022-06-10 22:34:03 +0200
committerPhilipp Le <philipp@philipple.de>2022-06-10 22:46:52 +0200
commit9f618c793a76e4199d69eca5ef802fdb7492ffdf (patch)
treecc9adca82adb48d5799a3e1721f83040abc23e36
parent4029d8e8f3dea39aa4f2ade396b30a41a0b383be (diff)
downloaddcs-lecture-notes-9f618c793a76e4199d69eca5ef802fdb7492ffdf.zip
dcs-lecture-notes-9f618c793a76e4199d69eca5ef802fdb7492ffdf.tar.gz
dcs-lecture-notes-9f618c793a76e4199d69eca5ef802fdb7492ffdf.tar.bz2
Add Exercise 08
-rw-r--r--Makefile2
-rw-r--r--exercise08/exercise08.tex423
-rw-r--r--main/DCS.tex2
-rw-r--r--main/exercise08.tex74
4 files changed, 500 insertions, 1 deletions
diff --git a/Makefile b/Makefile
index 69edd6b..1daf2f5 100644
--- a/Makefile
+++ b/Makefile
@@ -5,7 +5,7 @@ LATEXMK = latexmk -pdf -silent -synctex=1
LATEXMK_PVC = $(LATEXMK) -pvc
ALL_CHAPTERS = $(BUILD_DIR)/chapter00.pdf $(BUILD_DIR)/chapter01.pdf $(BUILD_DIR)/chapter02.pdf $(BUILD_DIR)/chapter03.pdf $(BUILD_DIR)/chapter04.pdf $(BUILD_DIR)/chapter05.pdf $(BUILD_DIR)/chapter06.pdf $(BUILD_DIR)/chapter07.pdf $(BUILD_DIR)/chapter08.pdf $(BUILD_DIR)/chapter09.pdf
-ALL_EXERCISES = $(BUILD_DIR)/exercise00.pdf $(BUILD_DIR)/exercise01.pdf $(BUILD_DIR)/exercise02.pdf $(BUILD_DIR)/exercise03.pdf $(BUILD_DIR)/exercise04.pdf $(BUILD_DIR)/exercise05.pdf $(BUILD_DIR)/exercise06.pdf $(BUILD_DIR)/exercise07.pdf
+ALL_EXERCISES = $(BUILD_DIR)/exercise00.pdf $(BUILD_DIR)/exercise01.pdf $(BUILD_DIR)/exercise02.pdf $(BUILD_DIR)/exercise03.pdf $(BUILD_DIR)/exercise04.pdf $(BUILD_DIR)/exercise05.pdf $(BUILD_DIR)/exercise06.pdf $(BUILD_DIR)/exercise07.pdf $(BUILD_DIR)/exercise08.pdf
ALL_SVGS = $(BUILD_DIR)/svg/cc-by-sa-4-0.pdf $(BUILD_DIR)/svg/ch01_EM_Spectrum_Properties.pdf $(BUILD_DIR)/svg/ch01_Electromagnetic-Spectrum.pdf $(BUILD_DIR)/svg/ch01_NetworkTopologies.pdf $(BUILD_DIR)/svg/ch03_Conv_Corr_Auto.pdf $(BUILD_DIR)/svg/ch04_win_blackman.pdf $(BUILD_DIR)/svg/ch04_win_hamming.pdf $(BUILD_DIR)/svg/ch04_win_hann.pdf $(BUILD_DIR)/svg/ch04_win_rect.pdf $(BUILD_DIR)/svg/ch04_win_tri.pdf $(BUILD_DIR)/svg/ch04_win_gauss.pdf $(BUILD_DIR)/svg/ch06_FFT_Butterfly.pdf
COMMON_DEPS = common/settings.tex common/titlepage.tex common/acronym.tex common/imprint.tex DCS.bib
diff --git a/exercise08/exercise08.tex b/exercise08/exercise08.tex
new file mode 100644
index 0000000..b8259b2
--- /dev/null
+++ b/exercise08/exercise08.tex
@@ -0,0 +1,423 @@
+% SPDX-License-Identifier: CC-BY-SA-4.0
+%
+% Copyright (c) 2022 Philipp Le
+%
+% Except where otherwise noted, this work is licensed under a
+% Creative Commons Attribution-ShareAlike 4.0 License.
+%
+% Please find the full copy of the licence at:
+% https://creativecommons.org/licenses/by-sa/4.0/legalcode
+
+\phantomsection
+\addcontentsline{toc}{section}{Exercise 8}
+\section*{Exercise 8}
+
+
+% Information Content
+% Shannon Entropy
+% Source Efficiency
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+\begin{question}[subtitle={Information}]
+ A discrete memoryless source has the source alphabet with the probabilities of the symbols:
+ \begin{table}[H]
+ \centering
+ \begin{tabular}{|l|l|l|l|l|}
+ \hline
+ Symbol & $s_1$ & $s_2$ & $s_3$ & $s_4$ \\
+ \hline
+ Probability & $0.5$ & $0.25$ & $0.125$ & $0.125$ \\
+ \hline
+ \end{tabular}
+ \end{table}
+
+ \begin{tasks}
+ \task
+ Determine the information content of each symbol!
+
+ \task
+ Determine the Shannon entropy of the source!
+
+ \task
+ How much is the source efficiency?
+ \end{tasks}
+\end{question}
+
+\begin{solution}
+ \begin{tasks}
+ \task
+ The information content is in general:
+ \begin{equation*}
+ \mathrm{I}(s_i) = \log_2 \left(\frac{1}{p_i}\right)
+ \end{equation*}
+
+ For the given symbols, their information content is:
+ \begin{table}[H]
+ \centering
+ \begin{tabular}{|l|l|l|l|l|}
+ \hline
+ Symbol & $s_1$ & $s_2$ & $s_3$ & $s_4$ \\
+ \hline
+ Probability & $0.5$ & $0.25$ & $0.125$ & $0.125$ \\
+ \hline
+ Information Content & $1$ & $2$ & $3$ & $3$ \\
+ \hline
+ \end{tabular}
+ \end{table}
+
+ Note: The information content is a property of the symbol!
+
+ \task
+ The Shannon entropy is in general:
+ \begin{equation*}
+ \begin{split}
+ \mathrm{H}(X) &= - \sum\limits_{i=1}^{M} p_i \log_2 \left(p_i\right) \\
+ &= \sum\limits_{i=1}^{M} p_i \cdot \mathrm{I}(s_i)
+ \end{split}
+ \end{equation*}
+
+ For this task:
+ \begin{equation*}
+ \begin{split}
+ \mathrm{H}(X) &= \left(0.5 \cdot 1\right) + \left(0.25 \cdot 2\right) + \left(0.125 \cdot 3\right) + \left(0.125 \cdot 3\right) \\
+ &= 0.5 + 0.5 + 0.375 + 0.375 \\
+ &= 1.75
+ \end{split}
+ \end{equation*}
+
+ Note: The Shannon entropy is a property of the source!
+
+ \task
+ We need the decision quantity:
+ \begin{equation*}
+ \mathrm{D}(X) = \log_2 \left(M\right) = 2
+ \end{equation*}
+ with $M = 4$ which is the number of symbols in the alphabet.
+
+ The source efficiency is:
+ \begin{equation*}
+ \eta_X = \frac{\mathrm{H}(X)}{\mathrm{D}(X)} = \frac{1.75}{2} = 0.875
+ \end{equation*}
+ \end{tasks}
+\end{solution}
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+\begin{question}[subtitle={Source Coding}]
+ A input source alphabet has the symbols $s_1 = (000)_2$, $s_2 = (001)_2$, $s_3 = (010)_2$, $s_4 = (011)_2$, $s_5 = (100)_2$, $s_6 = (101)_2$, $s_7 = (110)_2$ and $s_8 = (111)_2$ with the following probabilities:
+ \begin{table}[H]
+ \centering
+ \begin{tabular}{|l|l|l|l|l|l|l|l|l|}
+ \hline
+ Symbol & $s_1$ & $s_2$ & $s_3$ & $s_4$ & $s_5$ & $s_6$ & $s_7$ & $s_8$ \\
+ \hline
+ Sequence & $(000)_2$ & $(001)_2$ & $(010)_2$ & $(011)_2$ & $(100)_2$ & $(101)_2$ & $(110)_2$ & $(111)_2$ \\
+ \hline
+ Probability & $0.4$ & $0.1$ & $0.05$ & $0.05$ & $0.15$ & $0.025$ & $0.135$ & $0.09$ \\
+ \hline
+ \end{tabular}
+ \end{table}
+
+ \begin{tasks}
+ \task
+ Define a source coding using the Shannon-Fano algorithm.
+
+ \task
+ Encode the message $\left(000001101110\right)_2$!
+
+ \task
+ Decode the message $\left(01100010100\right)_2$!
+ \end{tasks}
+\end{question}
+
+\begin{solution}
+ \begin{tasks}
+ \task
+ We subsequently divide the symbols into portions of equal probability until each portion only has one symbol:
+ \begin{enumerate}
+ \item
+ Division of all symbols into: $S_{0} = \left\{s_1, s_2\right\}$ and $S_{1} = \left\{s_3, s_4, s_5, s_6, s_7, s_8\right\}$ with $P\left(S_{0}\right) = 0.5$ and $P\left(S_{1}\right) = 0.5$
+ \item
+ Division of $S_{0}$ into: $S_{00} = \left\{s_1\right\}$ and $S_{01} = \left\{s_2\right\}$ with $P\left(S_{00}\right) = 0.4$ and $P\left(S_{01}\right) = 0.1$ (no better balancing possible; all portions contain only one element)
+ \item
+ Division of $S_{1}$ into: $S_{10} = \left\{s_3, s_4, s_5\right\}$ and $S_{11} = \left\{s_6, s_7, s_8\right\}$ with $P\left(S_{10}\right) = 0.25$ and $P\left(S_{11}\right) = 0.25$
+ \item
+ Division of $S_{10}$ into: $S_{100} = \left\{s_3, s_4\right\}$ and $S_{101} = \left\{s_5\right\}$ with $P\left(S_{100}\right) = 0.1$ and $P\left(S_{101}\right) = 0.15$
+ \item
+ Division of $S_{11}$ into: $S_{110} = \left\{s_6, s_8\right\}$ and $S_{111} = \left\{s_7\right\}$ with $P\left(S_{110}\right) = 0.115$ and $P\left(S_{111}\right) = 0.135$
+ \item
+ Division of $S_{100}$ into: $S_{1000} = \left\{s_3\right\}$ and $S_{1001} = \left\{s_4\right\}$ with $P\left(S_{1000}\right) = 0.05$ and $P\left(S_{1001}\right) = 0.05$ (all portions contain only one element)
+ \item
+ Division of $S_{110}$ into: $S_{1100} = \left\{s_6\right\}$ and $S_{1101} = \left\{s_8\right\}$ with $P\left(S_{1100}\right) = 0.025$ and $P\left(S_{1101}\right) = 0.9$ (no better balancing possible; all portions contain only one element)
+ \end{enumerate}
+
+ The encoding is now:
+ \begin{table}[H]
+ \centering
+ \begin{tabular}{|l|l|l|l|}
+ \hline
+ Symbol & Probability & Portion & Codeword \\
+ \hline
+ \hline
+ $s_1 = (000)_2$ & $0.4$ & $S_{00}$ & $(00)_2$ \\
+ \hline
+ $s_2 = (001)_2$ & $0.1$ & $S_{01}$ & $(01)_2$ \\
+ \hline
+ $s_3 = (010)_2$ & $0.05$ & $S_{1000}$ & $(1000)_2$ \\
+ \hline
+ $s_4 = (011)_2$ & $0.05$ & $S_{1001}$ & $(1001)_2$ \\
+ \hline
+ $s_5 = (100)_2$ & $0.15$ & $S_{101}$ & $(101)_2$ \\
+ \hline
+ $s_6 = (101)_2$ & $0.025$ & $S_{1100}$ & $(1100)_2$ \\
+ \hline
+ $s_7 = (110)_2$ & $0.135$ & $S_{111}$ & $(111)_2$ \\
+ \hline
+ $s_8 = (111)_2$ & $0.09$ & $S_{1101}$ & $(1101)_2$ \\
+ \hline
+ \end{tabular}
+ \end{table}
+ Here, you see that a more probable symbol is encoded with less bits than a less probable symbol. We are close to an optimal encoding of the symbols.
+
+ \task
+ Separate the message into symbols:
+ \begin{equation*}
+ \left(000001101110\right)_2 = \left[\left(000\right)_2, \left(001\right)_2, \left(101\right)_2, \left(110\right)_2\right]
+ \end{equation*}
+
+ Encode the symbols using the table:
+ \begin{equation*}
+ \left[\left(00\right)_2, \left(01\right)_2, \left(1100\right)_2, \left(111\right)_2\right] = \left(00011100111\right)_2
+ \end{equation*}
+
+ We have reduced the number of bits from 12 to 11.
+
+ Even if this seems to be less. But this scales with the message length. For example, reducing a file size from \SI{12}{GiB} to \SI{11}{GiB} is quite a lot.
+
+ \task
+ Split the message $\left(011000101\right)_2$ into the codewords:
+ \begin{equation*}
+ \left(01100010100\right)_2 = \left[\left(01\right)_2, \left(1000\right)_2, \left(101\right)_2, \left(00\right)_2\right]
+ \end{equation*}
+
+ Do a reverse lookup in out encoding table:
+ \begin{equation*}
+ \left[\left(001\right)_2, \left(010\right)_2, \left(100\right)_2, \left(000\right)_2\right] = \left(001010100000\right)_2
+ \end{equation*}
+ \end{tasks}
+\end{solution}
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+\begin{question}[subtitle={Bit Errors}]
+ We consider the transmission of binary symbols. The transmitter transmits symbols from the alphabet $X = \left\{0, 1\right\}$. The receiver decodes the received signal to the symbol alphabet $Y = \left\{0, 1\right\}$.
+
+ If there is no noise ($\mathrm{SNR} = \infty$), there are no bit errors. The joint probability matrix for the received symbols is:
+ \begin{equation*}
+ \mat{P}_{Y/X,\SI{0}{\percent}} = \left[\begin{matrix}
+ 1 & 0 \\
+ 0 & 1
+ \end{matrix}\right]
+ \end{equation*}
+
+ Under a noisy transmission, there is a probability of bit errors. The bit error rate (BER) is $\SI{10}{\percent}$. The joint probability matrix for the received symbols is:
+ \begin{equation*}
+ \mat{P}_{Y/X,\SI{10}{\percent}} = \left[\begin{matrix}
+ 0.9 & 0.1 \\
+ 0.1 & 0.9
+ \end{matrix}\right]
+ \end{equation*}
+
+ \begin{tasks}
+ \task
+ The symbols sequence $\left[1, 0, 1, 0\right]$ is transmitted through the \textbf{noise-free} channel. How will the received symbol sequence look like?
+
+ \task
+ The receiver receives the symbol sequence $\left[1, 0, 1, 0, 1, 1, 1, 0, 0, 1\right]$ from the \textbf{noisy} channel. In average, how many bits of this sequence are correct? What countermeasures can be taken?
+ \end{tasks}
+\end{question}
+
+\begin{solution}
+ \begin{tasks}
+ \task
+ There are no bit errors. So the receiver will always decode $\left[1, 0, 1, 0\right]$.
+
+ Note that, this will never be observed in reality where noise is always present. This is just a theoretical consideration.
+
+ \task
+ We have a bit error rate of $\SI{10}{\percent}$, meaning every tenth received bit is wrong. For the received sequence of 10 symbols, only \textbf{9 symbols} are correct in average.
+
+ However, we cannot definitively say which bits are correct and which are wrong. We can only determine the probabilities. So we need error detection and error correction methods (channel coding).
+
+ Every transmission channel is noisy. So there will always be a non-zero probability of observing bit errors. Generally, the better the signal to noise ratio (SNR), the lower the bit error rate (BER).
+ \end{tasks}
+\end{solution}
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+\begin{question}[subtitle={Hamming Distance}]
+ Determine the Hamming distance of the following codeword pairs!
+
+ \begin{tasks}
+ \task
+ $\vec{v}_1 = \left[1, 0, 1\right]$ and $\vec{v}_2 = \left[1, 1, 1\right]$
+
+ \task
+ $\vec{v}_1 = \left[1, 1\right]$ and $\vec{v}_2 = \left[1, 1\right]$
+
+ \task
+ $\vec{v}_1 = \left[1, 0, 0, 0, 0\right]$ and $\vec{v}_2 = \left[0, 1, 1, 0, 0\right]$
+
+ \task
+ $\vec{v}_1 = \left[1, 0\right]$ and $\vec{v}_2 = \left[0, 0, 0, 1\right]$
+ \end{tasks}
+\end{question}
+
+\begin{solution}
+ \begin{tasks}
+ \task
+ $\mathrm{d}\left(\vec{v}_1, \vec{v}_2\right) = 1$
+
+ \task
+ $\mathrm{d}\left(\vec{v}_1, \vec{v}_2\right) = 0$
+
+ The codewords are equal.
+
+ \task
+ $\mathrm{d}\left(\vec{v}_1, \vec{v}_2\right) = 3$
+
+ \task
+ No Hamming distance. The codewords are of different length and not comparable.
+ \end{tasks}
+\end{solution}
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+\begin{question}[subtitle={Channel Coding}]
+ A linear block code with the generator matrix $\mat{G}$ is given.
+ \begin{equation*}
+ \mat{G} = \left[\begin{matrix}
+ 1 & 0 & 0 & 0 & 0 & 1 & 1 \\
+ 0 & 1 & 0 & 0 & 1 & 0 & 1 \\
+ 0 & 0 & 1 & 0 & 1 & 1 & 0 \\
+ 0 & 0 & 0 & 1 & 1 & 1 & 1
+ \end{matrix}\right]
+ \end{equation*}
+
+ \begin{tasks}
+ \task
+ How many bits are in one information block? \textit{(Hint: Information, not codeword length)}
+ \task
+ How many bit errors can be detected and how many can be corrected in one message block?
+ \task
+ Encode the message $\vec{m} = \left[1, 0, 0, 1\right]$!
+ \task
+ Is the code systematic? Explain briefly the characteristic of systematic codes!
+ \task
+ Construct the parity check matrix!
+ \task
+ Is the word $\vec{x} = \left[1, 0, 1, 0, 1, 0, 1\right]$ a valid codeword? What is the message $\vec{m}$?
+ \end{tasks}
+\end{question}
+
+\begin{solution}
+ \begin{tasks}
+ \task
+ It is a $(7,4)$ code. One information block has $k = 4$ bits.
+
+ \task
+ \begin{itemize}
+ \item It is a $(7,4)$ code. $n = 7$, $k = 4$.
+ \item Minimum hamming distance is $d_{min} = n - k + 1 = 4$
+ \item Detectable errors: $d_{min} - 1 = 3$
+ \item Correctable errors: $\frac{d_{min}}{2} - 1 = 1$
+ \end{itemize}
+
+ \task
+ \begin{equation*}
+ \vec{x} = \left[1, 0, 0, 1\right] \cdot \left[\begin{matrix}
+ 1 & 0 & 0 & 0 & 0 & 1 & 1 \\
+ 0 & 1 & 0 & 0 & 1 & 0 & 1 \\
+ 0 & 0 & 1 & 0 & 1 & 1 & 0 \\
+ 0 & 0 & 0 & 1 & 1 & 1 & 1
+ \end{matrix}\right] = \left[1, 0, 0, 1, 1, 0, 0\right]
+ \end{equation*}
+
+ \task
+ \begin{itemize}
+ \item Systematic codes resemble the original message unchanged. Only parity bits are added.
+ \item So a receiver can read the data without the need for decoding, provided that no bit errors are present.
+ \item This code is systematic.
+ \end{itemize}
+
+ \task
+ The parity matrix is
+ \begin{equation*}
+ \mat{P} = \left[\begin{matrix}
+ 0 & 1 & 1 \\
+ 1 & 0 & 1 \\
+ 1 & 1 & 0 \\
+ 1 & 1 & 1
+ \end{matrix}\right]
+ \end{equation*}
+ Its transpose is
+ \begin{equation*}
+ \mat{P}^{\mathrm{T}} = \left[\begin{matrix}
+ 0 & 1 & 1 & 1 \\
+ 1 & 0 & 1 & 1 \\
+ 1 & 1 & 0 & 1
+ \end{matrix}\right]
+ \end{equation*}
+
+ The identity matrix is
+ \begin{equation*}
+ \mat{I} = \left[\begin{matrix}
+ 1 & 0 & 0 \\
+ 0 & 1 & 0 \\
+ 0 & 0 & 1
+ \end{matrix}\right]
+ \end{equation*}
+ The identity matrix is now $3 \times 3$. Otherwise, it cannot be concatenated to the transposed parity matrix.
+
+ The parity check matrix is
+ \begin{equation*}
+ \mat{H} = \left[-\mat{P}^{\mathrm{T}}, \mat{\mathrm{I}}\right] = \left[\begin{matrix}
+ 0 & 1 & 1 & 1 & 1 & 0 & 0 \\
+ 1 & 0 & 1 & 1 & 0 & 1 & 0 \\
+ 1 & 1 & 0 & 1 & 0 & 0 & 1
+ \end{matrix}\right]
+ \end{equation*}
+ Every operation is modulo 2. That is, $(1) \mod 2 \equiv (-1) \mod 2$. So $-1$ (from the negation $-\mat{P}^{\mathrm{T}}$) is equivalent to $1$.
+
+ \task
+ Calculate the syndrome vector:
+ \begin{equation*}
+ \vec{s} = \mat{H} \cdot \vec{x} = \left[\begin{matrix}
+ 0 & 1 & 1 & 1 & 1 & 0 & 0 \\
+ 1 & 0 & 1 & 1 & 0 & 1 & 0 \\
+ 1 & 1 & 0 & 1 & 0 & 0 & 1
+ \end{matrix}\right] \left[1, 0, 1, 0, 1, 0, 1\right]^{\mathrm{T}} = \left[0, 0, 0\right] = \mat{\mathrm{0}}
+ \end{equation*}
+ The codeword is valid because the syndrome vector is zero.
+
+ The message can directly be read from the codeword because:
+ \begin{itemize}
+ \item The codeword is valid. No error correction is required.
+ \item The code is systematic. The message is encoded in the first four bits.
+ \end{itemize}
+ The message is $\vec{s} = \left[1, 0, 1, 0\right]$.
+ \end{tasks}
+\end{solution}
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%\begin{question}[subtitle={DS-CDMA}]
+%\begin{tasks}
+% \task
+%
+%\end{tasks}
+%\end{question}
+%
+%\begin{solution}
+%\begin{tasks}
+% \task
+%
+%\end{tasks}
+%\end{solution}
+
diff --git a/main/DCS.tex b/main/DCS.tex
index 027f5bf..7a139c8 100644
--- a/main/DCS.tex
+++ b/main/DCS.tex
@@ -112,6 +112,8 @@
\input{../chapter08/content_ch08.tex}
\clearpage
+\input{../exercise08/exercise08.tex}
+\clearpage
\input{../chapter09/content_ch09.tex}
\clearpage
diff --git a/main/exercise08.tex b/main/exercise08.tex
new file mode 100644
index 0000000..6e254c6
--- /dev/null
+++ b/main/exercise08.tex
@@ -0,0 +1,74 @@
+% SPDX-License-Identifier: CC-BY-SA-4.0
+%
+% Copyright (c) 2022 Philipp Le
+%
+% Except where otherwise noted, this work is licensed under a
+% Creative Commons Attribution-ShareAlike 4.0 License.
+%
+% Please find the full copy of the licence at:
+% https://creativecommons.org/licenses/by-sa/4.0/legalcode
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+% Configuration
+\def\thekindofdocument{Exercise}
+\def\thesubtitle{Chapter 8: Information and Coding Theory}
+\def\therevision{1}
+\def\therevisiondate{2020-05-26}
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+% Header
+\input{../common/settings.tex}
+
+\begin{document}
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+% Title Page
+\pagenumbering{Alph}
+\pagestyle{empty}
+
+% Title Page
+\input{../common/titlepage.tex}
+\newpage
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+% Preface
+\pagenumbering{arabic}
+\pagestyle{headings}
+
+% Inhaltsverzeichnis
+%\tableofcontents
+%\newpage
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+% Content
+
+\setcounter{chapter}{8}
+
+\input{../exercise08/exercise08.tex}
+\clearpage
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+% Appendix
+
+\begin{appendix}
+
+\chapter{Solutions}
+
+\printsolutions
+\clearpage
+
+\end{appendix}
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+% Nachtrag
+
+% Imprint
+\input{../common/imprint.tex}
+\newpage
+
+% To Do
+\pagenumbering{alph}
+%\listoftodos
+
+\end{document}