entropy sources

This commit is contained in:
eneller
2025-10-29 22:31:42 +01:00
parent d1b45ee1ec
commit 90aa504539
5 changed files with 68 additions and 1 deletions

View File

@@ -10,6 +10,8 @@
\usepackage{amsmath}
\PassOptionsToPackage{hyphens}{url}
\usepackage{hyperref} % allows urls to follow line breaks of text
\usepackage[style=ieee, backend=biber, maxnames=1, minnames=1]{biblatex}
\addbibresource{entropy.bib}
@@ -147,7 +149,10 @@ as a cost function in machine learning
% relation to hamming distance and efficient codes
\subsection{Noisy communication channels}
Given a model of
The noisy channel coding theorem was stated by \textit{Claude Shannon} in 1948, but first rigorous proof was
provided in 1954 by Amiel Feinstein.
It is of foundational to information theory, stating that given a noisy channel with capacity $C$
and information transmitted at $R$ \cite{enwiki:shannon-hartley}
\begin{figure}[H]
\begin{tikzpicture}
@@ -173,4 +178,31 @@ Given a model of
\label{fig:noisy-channel}
\end{figure}
\begin{figure}[H]
\begin{tikzpicture}
\def\boxw{2.5cm}
\def\n{4}
\pgfmathsetmacro{\gap}{(\textwidth - \n*\boxw)/(\n-1)}
\node (S) at (0,0) [draw, align=center, text width=\boxw] {Information Source};
\node (S0) at (\boxw + \gap,1) [draw, circle] {0};
\node (S1) at (\boxw + \gap,-1) [draw, circle] {1};
\node (D0) at ({2*(\boxw + \gap)},1) [draw, circle] {0};
\node (D1) at ({2*(\boxw + \gap)},-1) [draw, circle] {1};
\node (D) at ({3*(\boxw + \gap)},0) [draw, align=center, text width=\boxw] {Destination};
\draw[->] (S) -- (S0);
\draw[->] (S) -- (S1);
\draw[->,dashed] (S0) -- (D0) node[midway, above] {$p$};
\draw[->,dashed] (S0) -- (D1) node[pos=0.8, above] {$1-p$};
\draw[->,dashed] (S1) -- (D0) node[pos= 0.2, above] {$1-p$};
\draw[->,dashed] (S1) -- (D1) node[midway, below] {$p$};
\draw[->] (D0) -- (D);
\draw[->] (D1) -- (D);
\end{tikzpicture}
\caption{Model of a binary symmetric channel}
\label{fig:binary-channel}
\end{figure}
\end{document}