entropy sources

This commit is contained in:
eneller
2025-10-29 22:31:42 +01:00
parent d1b45ee1ec
commit 90aa504539
5 changed files with 68 additions and 1 deletions

0
compression.tex Normal file
View File

0
correction.tex Normal file
View File

0
crypto.tex Normal file
View File

35
entropy.bib Normal file
View File

@@ -0,0 +1,35 @@
@misc{ enwiki:shannon-hartley,
author = "{Wikipedia contributors}",
title = "ShannonHartley theorem --- {Wikipedia}{,} The Free Encyclopedia",
year = "2025",
howpublished = "\url{https://en.wikipedia.org/w/index.php?title=Shannon%E2%80%93Hartley_theorem&oldid=1316080633}",
note = "[Online; accessed 29-October-2025]"
}
@misc{ enwiki:noisy-channel,
author = "{Wikipedia contributors}",
title = "Noisy-channel coding theorem --- {Wikipedia}{,} The Free Encyclopedia",
year = "2025",
howpublished = "\url{https://en.wikipedia.org/w/index.php?title=Noisy-channel_coding_theorem&oldid=1285893870}",
note = "[Online; accessed 29-October-2025]"
}
@misc{ enwiki:source-coding,
author = "{Wikipedia contributors}",
title = "Shannon's source coding theorem --- {Wikipedia}{,} The Free Encyclopedia",
year = "2025",
howpublished = "\url{https://en.wikipedia.org/w/index.php?title=Shannon%27s_source_coding_theorem&oldid=1301398440}",
note = "[Online; accessed 29-October-2025]"
}
@misc{ dewiki:nyquist-shannon,
author = "Wikipedia",
title = "Nyquist-Shannon-Abtasttheorem --- Wikipedia{,} die freie Enzyklopädie",
year = "2025",
url = "\url{https://de.wikipedia.org/w/index.php?title=Nyquist-Shannon-Abtasttheorem&oldid=255540066}",
note = "[Online; Stand 29. Oktober 2025]"
}
@misc{ enwiki:information-content,
author = "{Wikipedia contributors}",
title = "Information content --- {Wikipedia}{,} The Free Encyclopedia",
year = "2025",
howpublished = "\url{https://en.wikipedia.org/w/index.php?title=Information_content&oldid=1313862600}",
note = "[Online; accessed 29-October-2025]"
}

View File

@@ -10,6 +10,8 @@
\usepackage{amsmath}
\PassOptionsToPackage{hyphens}{url}
\usepackage{hyperref} % allows urls to follow line breaks of text
\usepackage[style=ieee, backend=biber, maxnames=1, minnames=1]{biblatex}
\addbibresource{entropy.bib}
@@ -147,7 +149,10 @@ as a cost function in machine learning
% relation to hamming distance and efficient codes
\subsection{Noisy communication channels}
Given a model of
The noisy channel coding theorem was stated by \textit{Claude Shannon} in 1948, but first rigorous proof was
provided in 1954 by Amiel Feinstein.
It is of foundational to information theory, stating that given a noisy channel with capacity $C$
and information transmitted at $R$ \cite{enwiki:shannon-hartley}
\begin{figure}[H]
\begin{tikzpicture}
@@ -173,4 +178,31 @@ Given a model of
\label{fig:noisy-channel}
\end{figure}
\begin{figure}[H]
\begin{tikzpicture}
\def\boxw{2.5cm}
\def\n{4}
\pgfmathsetmacro{\gap}{(\textwidth - \n*\boxw)/(\n-1)}
\node (S) at (0,0) [draw, align=center, text width=\boxw] {Information Source};
\node (S0) at (\boxw + \gap,1) [draw, circle] {0};
\node (S1) at (\boxw + \gap,-1) [draw, circle] {1};
\node (D0) at ({2*(\boxw + \gap)},1) [draw, circle] {0};
\node (D1) at ({2*(\boxw + \gap)},-1) [draw, circle] {1};
\node (D) at ({3*(\boxw + \gap)},0) [draw, align=center, text width=\boxw] {Destination};
\draw[->] (S) -- (S0);
\draw[->] (S) -- (S1);
\draw[->,dashed] (S0) -- (D0) node[midway, above] {$p$};
\draw[->,dashed] (S0) -- (D1) node[pos=0.8, above] {$1-p$};
\draw[->,dashed] (S1) -- (D0) node[pos= 0.2, above] {$1-p$};
\draw[->,dashed] (S1) -- (D1) node[midway, below] {$p$};
\draw[->] (D0) -- (D);
\draw[->] (D1) -- (D);
\end{tikzpicture}
\caption{Model of a binary symmetric channel}
\label{fig:binary-channel}
\end{figure}
\end{document}