Merge branch 'master' of raven.am28.uni-tuebingen.de:scientificComputing

This commit is contained in:
Jan Grewe 2015-10-26 19:49:35 +01:00
commit 61a04aa4aa
58 changed files with 2063 additions and 762 deletions

View File

@ -1,22 +1,29 @@
BASENAME=bootstrap BASENAME=bootstrap
PYFILES=$(wildcard *.py) PYFILES=$(wildcard *.py)
PYPDFFILES=$(PYFILES:.py=.pdf) PYPDFFILES=$(PYFILES:.py=.pdf)
pdf : $(BASENAME)-chapter.pdf $(PYPDFFILES) all : pdf
# script:
pdf : $(BASENAME)-chapter.pdf
$(BASENAME)-chapter.pdf : $(BASENAME)-chapter.tex $(BASENAME).tex $(BASENAME)-chapter.pdf : $(BASENAME)-chapter.tex $(BASENAME).tex $(PYPDFFILES)
pdflatex -interaction=scrollmode $< | tee /dev/stderr | fgrep -q "Rerun to get cross-references right" && pdflatex -interaction=scrollmode $< || true pdflatex -interaction=scrollmode $< | tee /dev/stderr | fgrep -q "Rerun to get cross-references right" && pdflatex -interaction=scrollmode $< || true
$(PYPDFFILES) : %.pdf : %.py $(PYPDFFILES) : %.pdf : %.py
python $< python $<
clean : clean :
rm -f *~ $(BASENAME)-chapter.aux $(BASENAME)-chapter.log $(BASENAME)-chapter.out $(BASENAME).aux $(BASENAME).log rm -f *~
rm -f $(BASENAME).aux $(BASENAME).log
rm -f $(BASENAME)-chapter.aux $(BASENAME)-chapter.log $(BASENAME)-chapter.out
rm -f $(PYPDFFILES) $(GPTTEXFILES)
cleanall : clean cleanall : clean
rm -f $(BASENAME)-chapter.pdf rm -f $(BASENAME)-chapter.pdf
watch : watchpdf :
while true; do ! make -q pdf && make pdf; sleep 0.5; done while true; do ! make -q pdf && make pdf; sleep 0.5; done

View File

@ -1,22 +1,29 @@
BASENAME=likelihood BASENAME=likelihood
PYFILES=$(wildcard *.py) PYFILES=$(wildcard *.py)
PYPDFFILES=$(PYFILES:.py=.pdf) PYPDFFILES=$(PYFILES:.py=.pdf)
pdf : $(BASENAME)-chapter.pdf $(PYPDFFILES) all : pdf
# script:
pdf : $(BASENAME)-chapter.pdf
$(BASENAME)-chapter.pdf : $(BASENAME)-chapter.tex $(BASENAME).tex $(BASENAME)-chapter.pdf : $(BASENAME)-chapter.tex $(BASENAME).tex $(PYPDFFILES)
pdflatex -interaction=scrollmode $< | tee /dev/stderr | fgrep -q "Rerun to get cross-references right" && pdflatex -interaction=scrollmode $< || true pdflatex -interaction=scrollmode $< | tee /dev/stderr | fgrep -q "Rerun to get cross-references right" && pdflatex -interaction=scrollmode $< || true
$(PYPDFFILES) : %.pdf : %.py $(PYPDFFILES) : %.pdf : %.py
python $< python $<
clean : clean :
rm -f *~ $(BASENAME)-chapter.aux $(BASENAME)-chapter.log $(BASENAME)-chapter.out $(BASENAME).aux $(BASENAME).log rm -f *~
rm -f $(BASENAME).aux $(BASENAME).log
rm -f $(BASENAME)-chapter.aux $(BASENAME)-chapter.log $(BASENAME)-chapter.out
rm -f $(PYPDFFILES) $(GPTTEXFILES)
cleanall : clean cleanall : clean
rm -f $(BASENAME)-chapter.pdf rm -f $(BASENAME)-chapter.pdf
watch : watchpdf :
while true; do ! make -q pdf && make pdf; sleep 0.5; done while true; do ! make -q pdf && make pdf; sleep 0.5; done

View File

@ -1,11 +1,11 @@
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\chapter{\tr{Maximum likelihood estimation}{Maximum-Likelihood Methode}} \chapter{\tr{Maximum likelihood estimation}{Maximum-Likelihood-Sch\"atzer}}
In vielen Situationen wollen wir einen oder mehrere Parameter $\theta$ In vielen Situationen wollen wir einen oder mehrere Parameter $\theta$
einer Wahrscheinlichkeitsverteilung sch\"atzen, so dass die Verteilung einer Wahrscheinlichkeitsverteilung sch\"atzen, so dass die Verteilung
die Daten $x_1, x_2, \ldots x_n$ am besten beschreibt. Bei der die Daten $x_1, x_2, \ldots x_n$ am besten beschreibt.
Maximum-Likelihood-Methode w\"ahlen wir die Parameter so, dass die Maximum-Likelihood-Sch\"atzer w\"ahlen die Parameter so, dass die
Wahrscheinlichkeit, dass die Daten aus der Verteilung stammen, am Wahrscheinlichkeit, dass die Daten aus der Verteilung stammen, am
gr\"o{\ss}ten ist. gr\"o{\ss}ten ist.
@ -16,10 +16,9 @@ $\theta$'') die Wahrscheinlichkeits(dichte)verteilung von $x$ mit dem
Parameter(n) $\theta$. Das k\"onnte die Normalverteilung Parameter(n) $\theta$. Das k\"onnte die Normalverteilung
\begin{equation} \begin{equation}
\label{normpdfmean} \label{normpdfmean}
p(x|\theta) = \frac{1}{\sqrt{2\pi \sigma^2}}e^{-\frac{(x-\theta)^2}{2\sigma^2}} p(x|\theta) = \frac{1}{\sqrt{2\pi \sigma^2}}e^{-\frac{(x-\mu)^2}{2\sigma^2}}
\end{equation} \end{equation}
sein mit sein mit dem Mittelwert $\mu$ und der Standardabweichung $\sigma$ als
fester Standardverteilung $\sigma$ und dem Mittelwert $\mu$ als
Parameter $\theta$. Parameter $\theta$.
Wenn nun den $n$ unabh\"angigen Beobachtungen $x_1, x_2, \ldots x_n$ Wenn nun den $n$ unabh\"angigen Beobachtungen $x_1, x_2, \ldots x_n$
@ -59,9 +58,10 @@ das Maximum der logarithmierten Likelihood (``Log-Likelihood'') gesucht:
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\subsection{Beispiel: Das arithmetische Mittel} \subsection{Beispiel: Das arithmetische Mittel}
Wenn die Me{\ss}daten $x_1, x_2, \ldots x_n$ der Normalverteilung \eqnref{normpdfmean} Wenn die Me{\ss}daten $x_1, x_2, \ldots x_n$ der Normalverteilung
entstammen, und wir den Mittelwert $\mu$ als einzigen Parameter der Verteilung betrachten, \eqnref{normpdfmean} entstammen, und wir den Mittelwert $\mu=\theta$ als
welcher Wert von $\theta$ maximiert dessen Likelhood? einzigen Parameter der Verteilung betrachten, welcher Wert von
$\theta$ maximiert dessen Likelhood?
\begin{figure}[t] \begin{figure}[t]
\includegraphics[width=1\textwidth]{mlemean} \includegraphics[width=1\textwidth]{mlemean}
@ -89,7 +89,7 @@ nach dem Parameter $\theta$ und setzen diese gleich Null:
\Leftrightarrow \quad n \theta & = & \sum_{i=1}^n x_i \\ \Leftrightarrow \quad n \theta & = & \sum_{i=1}^n x_i \\
\Leftrightarrow \quad \theta & = & \frac{1}{n} \sum_{i=1}^n x_i \Leftrightarrow \quad \theta & = & \frac{1}{n} \sum_{i=1}^n x_i
\end{eqnarray*} \end{eqnarray*}
Der Maximum-Likelihood-Estimator ist das arithmetische Mittel der Daten. D.h. Der Maximum-Likelihood-Sch\"atzer ist das arithmetische Mittel der Daten. D.h.
das arithmetische Mittel maximiert die Wahrscheinlichkeit, dass die Daten aus einer das arithmetische Mittel maximiert die Wahrscheinlichkeit, dass die Daten aus einer
Normalverteilung mit diesem Mittelwert gezogen worden sind. Normalverteilung mit diesem Mittelwert gezogen worden sind.
@ -101,12 +101,12 @@ Normalverteilung mit diesem Mittelwert gezogen worden sind.
die Log-Likelihood (aus der Summe der logarithmierten die Log-Likelihood (aus der Summe der logarithmierten
Wahrscheinlichkeiten) f\"ur den Mittelwert als Parameter. Vergleiche Wahrscheinlichkeiten) f\"ur den Mittelwert als Parameter. Vergleiche
die Position der Maxima mit den aus den Daten berechneten die Position der Maxima mit den aus den Daten berechneten
Mittelwerte. Mittelwert.
\end{exercise} \end{exercise}
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\section{Kurvenfit als Maximum Likelihood Estimation} \section{Kurvenfit als Maximum-Likelihood Sch\"atzung}
Beim Kurvenfit soll eine Funktion $f(x;\theta)$ mit den Parametern Beim Kurvenfit soll eine Funktion $f(x;\theta)$ mit den Parametern
$\theta$ an die Datenpaare $(x_i|y_i)$ durch Anpassung der Parameter $\theta$ an die Datenpaare $(x_i|y_i)$ durch Anpassung der Parameter
$\theta$ gefittet werden. Wenn wir annehmen, dass die $y_i$ um die $\theta$ gefittet werden. Wenn wir annehmen, dass die $y_i$ um die
@ -125,25 +125,29 @@ gegeben sind.
Der Parameter $\theta$ soll so gew\"ahlt werden, dass die Der Parameter $\theta$ soll so gew\"ahlt werden, dass die
Log-Likelihood maximal wird. Der erste Term der Summe ist Log-Likelihood maximal wird. Der erste Term der Summe ist
unabh\"angig von $\theta$ und kann deshalb bei der Suche nach dem unabh\"angig von $\theta$ und kann deshalb bei der Suche nach dem
Maximum weggelassen werden. Maximum weggelassen werden:
\begin{eqnarray*} \begin{eqnarray*}
& = & - \frac{1}{2} \sum_{i=1}^n \left( \frac{y_i-f(x_i;\theta)}{\sigma_i} \right)^2 & = & - \frac{1}{2} \sum_{i=1}^n \left( \frac{y_i-f(x_i;\theta)}{\sigma_i} \right)^2
\end{eqnarray*} \end{eqnarray*}
Anstatt nach dem Maximum zu suchen, k\"onnen wir auch das Vorzeichen der Log-Likelihood Anstatt nach dem Maximum zu suchen, k\"onnen wir auch das Vorzeichen der Log-Likelihood
umdrehen und nach dem Minimum suchen. Dabei k\"onnen wir auch den Faktor $1/2$ vor der Summe vernachl\"assigen --- auch das \"andert nichts an der Position des Minimums. umdrehen und nach dem Minimum suchen. Dabei k\"onnen wir auch den Faktor $1/2$ vor der Summe vernachl\"assigen --- auch das \"andert nichts an der Position des Minimums:
\begin{equation} \begin{equation}
\label{chisqmin}
\theta_{mle} = \text{argmin}_{\theta} \; \sum_{i=1}^n \left( \frac{y_i-f(x_i;\theta)}{\sigma_i} \right)^2 \;\; = \;\; \text{argmin}_{\theta} \; \chi^2 \theta_{mle} = \text{argmin}_{\theta} \; \sum_{i=1}^n \left( \frac{y_i-f(x_i;\theta)}{\sigma_i} \right)^2 \;\; = \;\; \text{argmin}_{\theta} \; \chi^2
\end{equation} \end{equation}
Die Summer der quadratischen Abst\"ande normiert auf die jeweiligen Die Summe der quadratischen Abst\"ande normiert auf die jeweiligen
Standardabweichungen wird auch mit $\chi^2$ bezeichnet. Der Wert des Standardabweichungen wird auch mit $\chi^2$ bezeichnet. Der Wert des
Parameters $\theta$ welcher den quadratischen Abstand minimiert ist Parameters $\theta$, welcher den quadratischen Abstand minimiert, ist
also identisch mit der Maximierung der Wahrscheinlichkeit, dass die also identisch mit der Maximierung der Wahrscheinlichkeit, dass die
Daten tats\"achlich aus der Funktion stammen k\"onnen. Minimierung des Daten tats\"achlich aus der Funktion stammen k\"onnen. Minimierung des
$\chi^2$ ist also ein Maximum-Likelihood Estimate. $\chi^2$ ist also eine Maximum-Likelihood Sch\"atzung. Aber nur, wenn
die Daten normalverteilt um die Funktion streuen! Bei anderen
Verteilungen m\"usste man die Log-Likelihood entsprechend
\eqnref{loglikelihood} ausrechnen und maximieren.
\begin{figure}[t] \begin{figure}[t]
\includegraphics[width=1\textwidth]{mlepropline} \includegraphics[width=1\textwidth]{mlepropline}
\caption{\label{mleproplinefig} Maximum Likelihood Estimation der \caption{\label{mleproplinefig} Maximum-Likelihood Sch\"atzung der
Steigung einer Ursprungsgeraden.} Steigung einer Ursprungsgeraden.}
\end{figure} \end{figure}
@ -165,33 +169,34 @@ und setzen diese gleich Null:
\end{eqnarray} \end{eqnarray}
Damit haben wir nun einen anlytischen Ausdruck f\"ur die Bestimmung Damit haben wir nun einen anlytischen Ausdruck f\"ur die Bestimmung
der Steigung $\theta$ des Regressionsgeraden gewonnen. Ein der Steigung $\theta$ des Regressionsgeraden gewonnen. Ein
Gradientenabstieg ist f\"ur das Fitten der Geradensteigung also gar nicht Gradientenabstieg ist f\"ur das Fitten der Geradensteigung also gar
n\"otig. Das gilt allgemein f\"ur das Fitten von Koeffizienten von nicht n\"otig. Das gilt allgemein f\"ur das Fitten von Koeffizienten
linear kombinierten Basisfunktionen. Parameter die nichtlinear in von linear kombinierten Basisfunktionen. Parameter, die nichtlinear in
einer Funktion enthalten sind k\"onnen aber nicht analytisch aus den einer Funktion enthalten sind, k\"onnen im Gegensatz dazu nicht
Daten berechnet werden. Da bleibt dann nur auf numerische Verfahren analytisch aus den Daten berechnet werden. F\"ur diesen Fall bleibt
zur Optimierung der Kostenfunktion, wie z.B. der Gradientenabstieg, dann nur auf numerische Verfahren zur Optimierung der Kostenfunktion,
zur\"uckzugreifen. wie z.B. der Gradientenabstieg, zur\"uckzugreifen.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\section{Fits von Wahrscheinlichkeitsverteilungen} \section{Fits von Wahrscheinlichkeitsverteilungen}
Zum Abschluss betrachten wir noch den Fall, bei dem wir die Parameter Zum Abschluss betrachten wir noch den Fall, bei dem wir die Parameter
einer Wahrscheinlichkeitsdichtefunktion (z.B. Mittelwert und einer Wahrscheinlichkeitsdichtefunktion (z.B. Mittelwert und
Standardabweichung der Normalverteilung) an ein Datenset fitten wolle. Standardabweichung der Normalverteilung) an ein Datenset fitten wollen.
Ein erster Gedanke k\"onnte sein, die Ein erster Gedanke k\"onnte sein, die
Wahrscheinlichkeitsdichtefunktion durch Minimierung des quadratischen Wahrscheinlichkeitsdichtefunktion durch Minimierung des quadratischen
Abstands an ein Histogram der Daten zu fitten. Das ist aber aus Abstands an ein Histogramm der Daten zu fitten. Das ist aber aus
folgenden Gr\"unden nicht die Methode der Wahl: (i) folgenden Gr\"unden nicht die Methode der Wahl: (i)
Wahrscheinlichkeitsdichten k\"onnen nur positiv sein. Darum k\"onnen Wahrscheinlichkeitsdichten k\"onnen nur positiv sein. Darum k\"onnen
insbesondere bei kleinen Werten die Daten nicht symmetrisch streuen, insbesondere bei kleinen Werten die Daten nicht symmetrisch streuen,
wie es normalverteilte Daten machen sollten. (ii) Die Datenwerte sind wie es bei normalverteilten Daten der Fall ist. (ii) Die Datenwerte
nicht unabh\"angig, da das normierte Histogram sich zu Eins sind nicht unabh\"angig, da das normierte Histogram sich zu Eins
aufintegriert. Die beiden Annahmen normalverteilte und unabh\"angige Daten aufintegriert. Die beiden Annahmen normalverteilte und unabh\"angige
die die Minimierung des quadratischen Abstands zu einem Maximum Daten, die die Minimierung des quadratischen Abstands
Likelihood Estimator machen sind also verletzt. (iii) Das Histgramm \eqnref{chisqmin} zu einem Maximum-Likelihood Sch\"atzer machen, sind
h\"angt von der Wahl der Klassenbreite ab. also verletzt. (iii) Das Histogramm h\"angt von der Wahl der
Klassenbreite ab.
Den direkten Weg, eine Wahrscheinlichkeitsdichtefunktion an ein Den direkten Weg, eine Wahrscheinlichkeitsdichtefunktion an ein
Datenset zu fitten, haben wir oben schon bei dem Beispiel zur Datenset zu fitten, haben wir oben schon bei dem Beispiel zur
@ -204,9 +209,10 @@ z.B. dem Gradientenabstieg, gel\"ost wird.
\begin{figure}[t] \begin{figure}[t]
\includegraphics[width=1\textwidth]{mlepdf} \includegraphics[width=1\textwidth]{mlepdf}
\caption{\label{mlepdffig} Maximum Likelihood Estimation einer \caption{\label{mlepdffig} Maximum-Likelihood Sch\"atzung einer
Wahrscheinlichkeitsdichtefunktion. Links: die 100 Datenpunkte, die aus der Gammaverteilung Wahrscheinlichkeitsdichtefunktion. Links: die 100 Datenpunkte, die
2. Ordnung (rot) gezogen worden sind. Der Maximum-Likelihood-Fit ist orange dargestellt. aus der Gammaverteilung 2. Ordnung (rot) gezogen worden sind. Der
Rechts: das normierte Histogramm der Daten zusammen mit der \"uber Minimierung Maximum-Likelihood-Fit ist orange dargestellt. Rechts: das
des quadratischen Abstands zum Histogramm berechneten Fits ist potentiell schlechter.} normierte Histogramm der Daten zusammen mit dem \"uber Minimierung
des quadratischen Abstands zum Histogramm berechneten Fit.}
\end{figure} \end{figure}

View File

@ -0,0 +1,35 @@
BASENAME=pointprocesses
TEXFILES=$(wildcard $(BASENAME)??.tex)
EXERCISES=$(TEXFILES:.tex=.pdf)
SOLUTIONS=$(EXERCISES:pointprocesses%=pointprocesses-solutions%)
.PHONY: pdf exercises solutions watch watchexercises watchsolutions clean
pdf : $(SOLUTIONS) $(EXERCISES)
exercises : $(EXERCISES)
solutions : $(SOLUTIONS)
$(SOLUTIONS) : pointprocesses-solutions%.pdf : pointprocesses%.tex instructions.tex
{ echo "\\documentclass[answers,12pt,a4paper,pdftex]{exam}"; sed -e '1d' $<; } > $(patsubst %.pdf,%.tex,$@)
pdflatex -interaction=scrollmode $(patsubst %.pdf,%.tex,$@) | tee /dev/stderr | fgrep -q "Rerun to get cross-references right" && pdflatex -interaction=scrollmode $(patsubst %.pdf,%.tex,$@) || true
rm $(patsubst %.pdf,%,$@).[!p]*
$(EXERCISES) : %.pdf : %.tex instructions.tex
pdflatex -interaction=scrollmode $< | tee /dev/stderr | fgrep -q "Rerun to get cross-references right" && pdflatex -interaction=scrollmode $< || true
watch :
while true; do ! make -q pdf && make pdf; sleep 0.5; done
watchexercises :
while true; do ! make -q exercises && make exercises; sleep 0.5; done
watchsolutions :
while true; do ! make -q solutions && make solutions; sleep 0.5; done
clean :
rm -f *~ *.aux *.log *.out
cleanup : clean
rm -f $(SOLUTIONS) $(EXERCISES)

View File

@ -0,0 +1,11 @@
\vspace*{-6.5ex}
\begin{center}
\textbf{\Large Einf\"uhrung in die wissenschaftliche Datenverarbeitung}\\[1ex]
{\large Jan Grewe, Jan Benda}\\[-3ex]
Abteilung Neuroethologie \hfill --- \hfill Institut f\"ur Neurobiologie \hfill --- \hfill \includegraphics[width=0.28\textwidth]{UT_WBMW_Black_RGB} \\
\end{center}
\ifprintanswers%
\else
\fi

View File

@ -0,0 +1,202 @@
\documentclass[12pt,a4paper,pdftex]{exam}
\usepackage[german]{babel}
\usepackage{pslatex}
\usepackage[mediumspace,mediumqspace,Gray]{SIunits} % \ohm, \micro
\usepackage{xcolor}
\usepackage{graphicx}
\usepackage[breaklinks=true,bookmarks=true,bookmarksopen=true,pdfpagemode=UseNone,pdfstartview=FitH,colorlinks=true,citecolor=blue]{hyperref}
%%%%% layout %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\usepackage[left=20mm,right=20mm,top=25mm,bottom=25mm]{geometry}
\pagestyle{headandfoot}
\ifprintanswers
\newcommand{\stitle}{: L\"osungen}
\else
\newcommand{\stitle}{}
\fi
\header{{\bfseries\large \"Ubung 6\stitle}}{{\bfseries\large Statistik}}{{\bfseries\large 27. Oktober, 2015}}
\firstpagefooter{Prof. Dr. Jan Benda}{Phone: 29 74573}{Email:
jan.benda@uni-tuebingen.de}
\runningfooter{}{\thepage}{}
\setlength{\baselineskip}{15pt}
\setlength{\parindent}{0.0cm}
\setlength{\parskip}{0.3cm}
\renewcommand{\baselinestretch}{1.15}
%%%%% listings %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\usepackage{listings}
\lstset{
language=Matlab,
basicstyle=\ttfamily\footnotesize,
numbers=left,
numberstyle=\tiny,
title=\lstname,
showstringspaces=false,
commentstyle=\itshape\color{darkgray},
breaklines=true,
breakautoindent=true,
columns=flexible,
frame=single,
xleftmargin=1em,
xrightmargin=1em,
aboveskip=10pt
}
%%%%% math stuff: %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\usepackage{amsmath}
\usepackage{amssymb}
\usepackage{bm}
\usepackage{dsfont}
\newcommand{\naZ}{\mathds{N}}
\newcommand{\gaZ}{\mathds{Z}}
\newcommand{\raZ}{\mathds{Q}}
\newcommand{\reZ}{\mathds{R}}
\newcommand{\reZp}{\mathds{R^+}}
\newcommand{\reZpN}{\mathds{R^+_0}}
\newcommand{\koZ}{\mathds{C}}
%%%%% page breaks %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\newcommand{\continue}{\ifprintanswers%
\else
\vfill\hspace*{\fill}$\rightarrow$\newpage%
\fi}
\newcommand{\continuepage}{\ifprintanswers%
\newpage
\else
\vfill\hspace*{\fill}$\rightarrow$\newpage%
\fi}
\newcommand{\newsolutionpage}{\ifprintanswers%
\newpage%
\else
\fi}
%%%%% new commands %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\newcommand{\qt}[1]{\textbf{#1}\\}
\newcommand{\pref}[1]{(\ref{#1})}
\newcommand{\extra}{--- Zusatzaufgabe ---\ \mbox{}}
\newcommand{\code}[1]{\texttt{#1}}
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\begin{document}
\input{instructions}
\begin{questions}
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\question \qt{Homogeneous Poisson process}
We use the Poisson process to generate spike trains on which we can test and imrpove some
standard analysis functions.
A homogeneous Poisson process of rate $\lambda$ (measured in Hertz) is a point process
where the probability of an event is independent of time $t$ and independent of previous events.
The probability $P$ of an event within a bin of width $\Delta t$ is
\[ P = \lambda \cdot \Delta t \]
for sufficiently small $\Delta t$.
\begin{parts}
\part Write a function that generates $n$ homogeneous Poisson spike trains of a given duration $T_{max}$
with rate $\lambda$.
\begin{solution}
\lstinputlisting{hompoissonspikes.m}
\end{solution}
\part Using this function, generate a few trials and display them in a raster plot.
\begin{solution}
\lstinputlisting{../code/spikeraster.m}
\begin{lstlisting}
spikes = hompoissonspikes( 10, 100.0, 0.5 );
spikeraster( spikes )
\end{lstlisting}
\mbox{}\\[-3ex]
\colorbox{white}{\includegraphics[width=0.7\textwidth]{poissonraster100hz}}
\end{solution}
\part Write a function that extracts a single vector of interspike intervals
from the spike times returned by the first function.
\begin{solution}
\lstinputlisting{../code/isis.m}
\end{solution}
\part Write a function that plots the interspike-interval histogram
from a vector of interspike intervals. The function should also
compute the mean, the standard deviation, and the CV of the intervals
and display the values in the plot.
\begin{solution}
\lstinputlisting{../code/isihist.m}
\end{solution}
\part Compute histograms for Poisson spike trains with rate
$\lambda=100$\,Hz. Play around with $T_{max}$ and $n$ and the bin width
(start with 1\,ms) of the histogram.
How many
interspike intervals do you approximately need to get a ``nice''
histogram? How long do you need to record from the neuron?
\begin{solution}
About 5000 intervals for 25 bins. This corresponds to a $5000 / 100\,\hertz = 50\,\second$ recording
of a neuron firing with 100\,\hertz.
\end{solution}
\part Compare the histogram with the true distribution of intervals $T$ of the Poisson process
\[ p(T) = \lambda e^{-\lambda T} \]
for various rates $\lambda$.
\begin{solution}
\lstinputlisting{hompoissonisih.m}
\colorbox{white}{\includegraphics[width=0.48\textwidth]{poissonisih100hz}}
\colorbox{white}{\includegraphics[width=0.48\textwidth]{poissonisih20hz}}
\end{solution}
\part What happens if you make the bin width of the histogram smaller than $\Delta t$
used for generating the Poisson spikes?
\begin{solution}
The bins between the discretization have zero entries. Therefore
the other ones become higher than they should be.
\end{solution}
\part Plot the mean interspike interval, the corresponding standard deviation, and the CV
as a function of the rate $\lambda$ of the Poisson process.
Compare the ../code with the theoretical expectations for the dependence on $\lambda$.
\begin{solution}
\lstinputlisting{hompoissonisistats.m}
\colorbox{white}{\includegraphics[width=0.98\textwidth]{poissonisistats}}
\end{solution}
\part Write a function that computes serial correlations for the interspike intervals
for a range of lags.
The serial correlations $\rho_k$ at lag $k$ are defined as
\[ \rho_k = \frac{\langle (T_{i+k} - \langle T \rangle)(T_i - \langle T \rangle) \rangle}{\langle (T_i - \langle T \rangle)^2\rangle} = \frac{{\rm cov}(T_{i+k}, T_i)}{{\rm var}(T_i)} \]
Use this function to show that interspike intervals of Poisson spikes are independent.
\begin{solution}
\lstinputlisting{../code/isiserialcorr.m}
\colorbox{white}{\includegraphics[width=0.98\textwidth]{poissonserial100hz}}
\end{solution}
\part Write a function that generates from spike times
a histogram of spike counts in a count window of given duration $W$.
The function should also plot the Poisson distribution
\[ P(k) = \frac{(\lambda W)^ke^{\lambda W}}{k!} \]
for the rate $\lambda$ determined from the spike trains.
\begin{solution}
\lstinputlisting{../code/counthist.m}
\colorbox{white}{\includegraphics[width=0.48\textwidth]{poissoncounthistdist100hz10ms}}
\colorbox{white}{\includegraphics[width=0.48\textwidth]{poissoncounthistdist100hz100ms}}
\end{solution}
\part Write a function that computes mean count, variance of count and the corresponding Fano factor
for a range of count window durations. The function should generate tow plots: one plotting
the count variance against the mean, the other one the Fano factor as a function of the window duration.
\begin{solution}
\lstinputlisting{../code/fano.m}
\colorbox{white}{\includegraphics[width=0.98\textwidth]{poissonfano100hz}}
\end{solution}
\end{parts}
\end{questions}
\end{document}

Binary file not shown.

View File

@ -1,160 +0,0 @@
\documentclass[addpoints,10pt]{exam}
\usepackage{url}
\usepackage{color}
\usepackage{hyperref}
\usepackage{graphicx}
\pagestyle{headandfoot}
\runningheadrule
\firstpageheadrule
\firstpageheader{Scientific Computing}{Homogeneous Poisson process}{Oct 27, 2014}
%\runningheader{Homework 01}{Page \thepage\ of \numpages}{23. October 2014}
\firstpagefooter{}{}{}
\runningfooter{}{}{}
\pointsinmargin
\bracketedpoints
%\printanswers
\shadedsolutions
\usepackage[mediumspace,mediumqspace,Gray]{SIunits} % \ohm, \micro
%%%%% listings %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\usepackage{listings}
\lstset{
basicstyle=\ttfamily,
numbers=left,
showstringspaces=false,
language=Matlab,
breaklines=true,
breakautoindent=true,
columns=flexible,
frame=single,
captionpos=t,
xleftmargin=2em,
xrightmargin=1em,
aboveskip=10pt,
%title=\lstname,
title={\protect\filename@parse{\lstname}\protect\filename@base.\protect\filename@ext}
}
\begin{document}
\sffamily
%%%%%%%%%%%%%% Questions %%%%%%%%%%%%%%%%%%%%%%%%%
\begin{questions}
\question \textbf{Homogeneous Poisson process}
We use the Poisson process to generate spike trains on which we can test and imrpove some
standard analysis functions.
A homogeneous Poisson process of rate $\lambda$ (measured in Hertz) is a point process
where the probability of an event is independent of time $t$ and independent of previous events.
The probability $P$ of an event within a bin of width $\Delta t$ is
\[ P = \lambda \cdot \Delta t \]
for sufficiently small $\Delta t$.
\begin{parts}
\part Write a function that generates $n$ homogeneous Poisson spike trains of a given duration $T_{max}$
with rate $\lambda$.
\begin{solution}
\lstinputlisting{hompoissonspikes.m}
\end{solution}
\part Using this function, generate a few trials and display them in a raster plot.
\begin{solution}
\lstinputlisting{simulations/spikeraster.m}
\begin{lstlisting}
spikes = hompoissonspikes( 10, 100.0, 0.5 );
spikeraster( spikes )
\end{lstlisting}
\mbox{}\\[-3ex]
\colorbox{white}{\includegraphics[width=0.7\textwidth]{poissonraster100hz}}
\end{solution}
\part Write a function that extracts a single vector of interspike intervals
from the spike times returned by the first function.
\begin{solution}
\lstinputlisting{simulations/isis.m}
\end{solution}
\part Write a function that plots the interspike-interval histogram
from a vector of interspike intervals. The function should also
compute the mean, the standard deviation, and the CV of the intervals
and display the values in the plot.
\begin{solution}
\lstinputlisting{simulations/isihist.m}
\end{solution}
\part Compute histograms for Poisson spike trains with rate
$\lambda=100$\,Hz. Play around with $T_{max}$ and $n$ and the bin width
(start with 1\,ms) of the histogram.
How many
interspike intervals do you approximately need to get a ``nice''
histogram? How long do you need to record from the neuron?
\begin{solution}
About 5000 intervals for 25 bins. This corresponds to a $5000 / 100\,\hertz = 50\,\second$ recording
of a neuron firing with 100\,\hertz.
\end{solution}
\part Compare the histogram with the true distribution of intervals $T$ of the Poisson process
\[ p(T) = \lambda e^{-\lambda T} \]
for various rates $\lambda$.
\begin{solution}
\lstinputlisting{hompoissonisih.m}
\colorbox{white}{\includegraphics[width=0.48\textwidth]{poissonisih100hz}}
\colorbox{white}{\includegraphics[width=0.48\textwidth]{poissonisih20hz}}
\end{solution}
\part What happens if you make the bin width of the histogram smaller than $\Delta t$
used for generating the Poisson spikes?
\begin{solution}
The bins between the discretization have zero entries. Therefore
the other ones become higher than they should be.
\end{solution}
\part Plot the mean interspike interval, the corresponding standard deviation, and the CV
as a function of the rate $\lambda$ of the Poisson process.
Compare the simulations with the theoretical expectations for the dependence on $\lambda$.
\begin{solution}
\lstinputlisting{hompoissonisistats.m}
\colorbox{white}{\includegraphics[width=0.98\textwidth]{poissonisistats}}
\end{solution}
\part Write a function that computes serial correlations for the interspike intervals
for a range of lags.
The serial correlations $\rho_k$ at lag $k$ are defined as
\[ \rho_k = \frac{\langle (T_{i+k} - \langle T \rangle)(T_i - \langle T \rangle) \rangle}{\langle (T_i - \langle T \rangle)^2\rangle} = \frac{{\rm cov}(T_{i+k}, T_i)}{{\rm var}(T_i)} \]
Use this function to show that interspike intervals of Poisson spikes are independent.
\begin{solution}
\lstinputlisting{simulations/isiserialcorr.m}
\colorbox{white}{\includegraphics[width=0.98\textwidth]{poissonserial100hz}}
\end{solution}
\part Write a function that generates from spike times
a histogram of spike counts in a count window of given duration $W$.
The function should also plot the Poisson distribution
\[ P(k) = \frac{(\lambda W)^ke^{\lambda W}}{k!} \]
for the rate $\lambda$ determined from the spike trains.
\begin{solution}
\lstinputlisting{simulations/counthist.m}
\colorbox{white}{\includegraphics[width=0.48\textwidth]{poissoncounthistdist100hz10ms}}
\colorbox{white}{\includegraphics[width=0.48\textwidth]{poissoncounthistdist100hz100ms}}
\end{solution}
\part Write a function that computes mean count, variance of count and the corresponding Fano factor
for a range of count window durations. The function should generate tow plots: one plotting
the count variance against the mean, the other one the Fano factor as a function of the window duration.
\begin{solution}
\lstinputlisting{simulations/fano.m}
\colorbox{white}{\includegraphics[width=0.98\textwidth]{poissonfano100hz}}
\end{solution}
\end{parts}
\end{questions}
\end{document}

View File

@ -1,142 +1,70 @@
BASENAME=pointprocesses BASENAME=pointprocesses
TEXFILE=$(BASENAME).tex PYFILES=$(wildcard *.py)
DVIFILE=$(BASENAME).dvi PYPDFFILES=$(PYFILES:.py=.pdf)
PSFILE=$(BASENAME).ps
PDFFILE=$(BASENAME).pdf
FOILSFILE=foils.pdf
THUMBNAILSFILE=thumbnails.pdf
HTMLBASENAME=$(BASENAME)h
HTMLTEXFILE=$(BASENAME)h.tex
HTMLDIR=$(BASENAME)h
GPTFILES=$(wildcard *.gpt) GPTFILES=$(wildcard *.gpt)
GPTTEXFILES=$(GPTFILES:.gpt=.tex) GPTTEXFILES=$(GPTFILES:.gpt=.tex)
all: ps pdf talk again watchps watchpdf foils thumbs html html1 epsfigs clean cleanup cleanplots help all: pdf slides thumbs
.PHONY: epsfigs
# script:
pdf : $(BASENAME)-chapter.pdf
$(BASENAME)-chapter.pdf : $(BASENAME)-chapter.tex $(BASENAME).tex $(GPTTEXFILES) $(PYPDFFILES)
pdflatex -interaction=scrollmode $< | tee /dev/stderr | fgrep -q "Rerun to get cross-references right" && pdflatex -interaction=scrollmode $< || true
# slides:
slides: $(BASENAME)-slides.pdf
$(BASENAME)-slides.pdf : $(BASENAME)-slides.tex $(GPTTEXFILES) $(PYPDFFILES)
pdflatex -interaction=scrollmode $< | tee /dev/stderr | fgrep -q "Rerun to get cross-references right" && pdflatex -interaction=scrollmode $< || true
# thumbnails: # thumbnails:
thumbs: $(THUMBNAILSFILE) thumbs: $(BASENAME)-handout.pdf
$(THUMBNAILSFILE): $(TEXFILE) $(GPTTEXFILES) $(BASENAME)-handout.pdf: $(BASENAME)-slides.tex $(GPTTEXFILES)
sed -e 's/setboolean{presentation}{true}/setboolean{presentation}{false}/; s/usepackage{crop}/usepackage[frame]{crop}/' $< > thumbsfoils.tex sed -e 's/setboolean{presentation}{true}/setboolean{presentation}{false}/; s/usepackage{crop}/usepackage[frame]{crop}/' $< > thumbsfoils.tex
pdflatex thumbsfoils | tee /dev/stderr | fgrep -q "Rerun to get cross-references right" && pdflatex thumbsfoils || true pdflatex thumbsfoils | tee /dev/stderr | fgrep -q "Rerun to get cross-references right" && pdflatex thumbsfoils || true
pdfnup --nup 2x4 --no-landscape --paper a4paper --trim "-1cm -1cm -1cm -1cm" --outfile $@ thumbsfoils.pdf '1-19' pdfnup --nup 2x4 --no-landscape --paper a4paper --trim "-1cm -1cm -1cm -1cm" --outfile $@ thumbsfoils.pdf # 1-19
rm thumbsfoils.* rm thumbsfoils.*
# transparencies:
foils: $(FOILSFILE)
$(FOILSFILE): $(TEXFILE) $(GPTTEXFILES)
sed -e 's/setboolean{presentation}{true}/setboolean{presentation}{false}/' $< > tfoils.tex
pdflatex tfoils | tee /dev/stderr | fgrep -q "Rerun to get cross-references right" && pdflatex tfoils || true
pdfnup --nup 1x2 --orient portrait --trim "-1mm -1mm -1mm -1mm" --frame true --delta "1cm 1cm" --paper a4paper --outfile tfoils2.pdf tfoils.pdf
pdfnup --nup 1x1 --orient portrait --trim "-2cm -2cm -2cm -2cm" --paper a4paper --outfile $@ tfoils2.pdf
rm tfoils.* tfoils2.pdf
# talk:
talk: $(PDFFILE)
pdf: $(PDFFILE)
$(PDFFILE): $(TEXFILE) $(GPTTEXFILES)
pdflatex -interaction=scrollmode $< | tee /dev/stderr | fgrep -q "Rerun to get cross-references right" && pdflatex -interaction=scrollmode $< || true
# batchmode (no output, no stop on error)
# nonstopmode / scrollmode (no stop on error)
# errorstopmode (stop on error)
again :
pdflatex $(TEXFILE)
watchpdf : watchpdf :
while true; do ! make -q pdf && make pdf; sleep 0.5; done while true; do ! make -q pdf && make pdf; sleep 0.5; done
# html watchslides :
html : $(HTMLTEXFILE) $(GPTTEXFILES) while true; do ! make -q slides && make slides; sleep 0.5; done
rm -f $(HTMLDIR)/*
htlatex $< # python plots:
mkdir -p $(HTMLDIR) $(PYPDFFILES) : %.pdf: %.py
mv $(HTMLBASENAME).html $(HTMLDIR) python $<
mv $(HTMLBASENAME)*.* $(HTMLDIR)
mv z*.gif $(HTMLDIR) # gnuplot plots:
cd $(HTMLDIR); for i in *.gif; do convert -page +0+0 $$i tmp.gif; mv tmp.gif $$i; done; rmtex $(HTMLBASENAME) $(GPTTEXFILES) : %.tex: %.gpt whitestyles.gp
#$(HTMLTEXFILE) : $(TEXFILE) Makefile
# sed 's/setboolean{html}{false}/setboolean{html}{true}/; s/\\colorbox{white}{\(.*\)}/\1/g' $< > $@
html1 : $(HTMLTEXFILE) $(GPTTEXFILES)
latex2html -dir $(HTMLDIR) -mkdir -subdir -nonavigation -noinfo -image_type png -notransparent -white -split 0 $<
sed 's-<I>Date:</I>--' $(HTMLDIR)/$(HTMLDIR).html > tmp.html
cp tmp.html $(HTMLDIR)/index.html
mv tmp.html $(HTMLDIR)/$(HTMLDIR).html
$(HTMLTEXFILE) : $(TEXFILE)
sed '/^%nohtml/,/^%endnohtml/d; s/\\colorbox{white}{\(.*\)}/\1/g' $< > $@
# eps of all figures:
epsfigs:
mkdir -p epsfigs; \
for i in $(GPTFILES); do \
{ sed -n -e '1,/\\begin{document}/p' $(TEXFILE); echo "\texpicture{$${i%%.*}}"; echo "\end{document}"; } > tmp.tex; \
latex tmp.tex; \
dvips tmp.dvi; \
ps2eps tmp.ps; \
mv tmp.eps epsfigs/$${i%%.*}.eps; \
rm tmp.*; \
done
# plots:
%.tex: %.gpt whitestyles.gp
gnuplot whitestyles.gp $< gnuplot whitestyles.gp $<
epstopdf $*.eps epstopdf $*.eps
clean : clean :
rm -f *~ rm -f *~
rmtex $(BASENAME) rm -f $(BASENAME).aux $(BASENAME).log
rm -f $(GPTTEXFILES) rm -f $(BASENAME)-chapter.aux $(BASENAME)-chapter.log $(BASENAME)-chapter.out
rm -f $(BASENAME)-slides.aux $(BASENAME)-slides.log $(BASENAME)-slides.out $(BASENAME)-slides.toc $(BASENAME)-slides.nav $(BASENAME)-slides.snm $(BASENAME)-slides.vrb
cleanup : rm -f $(PYPDFFILES) $(GPTTEXFILES)
rm -f *~
rmtex $(BASENAME) cleanall : clean
rm -f $(PSFILE) $(PDFFILE) $(FOILSFILE) $(THUMBNAILSFILE) rm -f $(BASENAME)-chapter.pdf $(BASENAME)-slides.pdf $(BASENAME)-handout.pdf
rm -f $(GPTTEXFILES)
rm -f -r $(HTMLDIR)
cleanplots :
sed -n -e '/\\begin{document}/,/\\end{document}/p' $(TEXFILE) | fgrep '\input{' | grep -v '^%' | sed 's/.*input{\(.*\).tex}.*/\1.gpt/' > plot.fls
mkdir -p unusedplots
for i in *.gp*; do \
grep -q $$i plot.fls || { grep -q $$i $$(<plot.fls) && echo $$i || mv $$i unusedplots; }; \
done >> plot.fls
for i in $$(<plot.fls); do \
sed "s/\([^'\" ]*\.dat\)/\n\1\n/g;" $$i | fgrep .dat; \
done | sort | uniq > dat.fls
mkdir -p unuseddata
for i in *.dat; do \
grep -q $$i dat.fls || mv $$i unuseddata; \
done
rm dat.fls plot.fls
help : help :
@echo -e \ @echo -e \
"make pdf: make the pdf file of the talk.\n"\ "make pdf: make the pdf file of the script.\n"\
"make foils: make black&white postscript foils of the talk.\n"\ "make slides: make the pdf file of the slides.\n"\
"make thumbs: make color thumbnails of the talk.\n"\ "make thumbs: make color thumbnails of the talk.\n"\
"make again: run latex and make the pdf file of the talk,\n"\ "make watchpdf: make the pdf file of the script\n"\
" no matter whether you changed the .tex file or not.\n\n"\ " whenever the tex file is modified.\n"\
"make watchpdf: make the pdf file of the talk\n"\ "make watchpdf: make the pdf file of the slides\n"\
" whenever the tex file is modified.\n"\ " whenever the tex file is modified.\n"\
"make html: make a html version of the paper (in $(HTMLDIR)).\n\n"\
"make clean: remove all intermediate files,\n"\ "make clean: remove all intermediate files,\n"\
" just leave the source files and the final .ps and .pdf files.\n"\ " just leave the source files and the final .pdf files.\n"\
"make cleanup: remove all intermediate files as well as\n"\ "make cleanup: remove all intermediate files as well as\n"\
" the final .ps and .pdf files.\n"\ " the final .pdf files.\n"\
"make cleanplots: move all unused .gpt and .dat files\n"\
" into unusedplots/ and unuseddata/, respectively."

View File

@ -0,0 +1,101 @@
import numpy as np
import matplotlib.pyplot as plt
def hompoisson(rate, trials, duration) :
spikes = []
for k in range(trials) :
times = []
t = 0.0
while t < duration :
t += np.random.exponential(1/rate)
times.append( t )
spikes.append( times )
return spikes
def inhompoisson(rate, trials, dt) :
spikes = []
p = rate*dt
for k in range(trials) :
x = np.random.rand(len(rate))
times = dt*np.nonzero(x<p)[0]
spikes.append( times )
return spikes
def pifspikes(input, trials, dt, D=0.1) :
vreset = 0.0
vthresh = 1.0
tau = 1.0
spikes = []
for k in range(trials) :
times = []
v = vreset
noise = np.sqrt(2.0*D)*np.random.randn(len(input))/np.sqrt(dt)
for k in xrange(len(noise)) :
v += (input[k]+noise[k])*dt/tau
if v >= vthresh :
v = vreset
times.append(k*dt)
spikes.append( times )
return spikes
def isis( spikes ) :
isi = []
for k in xrange(len(spikes)) :
isi.extend(np.diff(spikes[k]))
return isi
def plotisih( ax, isis, binwidth=None ) :
if binwidth == None :
nperbin = 200.0 # average number of isis per bin
bins = len(isis)/nperbin # number of bins
binwidth = np.max(isis)/bins
if binwidth < 5e-4 : # half a millisecond
binwidth = 5e-4
h, b = np.histogram(isis, np.arange(0.0, np.max(isis)+binwidth, binwidth), density=True)
ax.text(0.9, 0.85, 'rate={:.0f}Hz'.format(1.0/np.mean(isis)), ha='right', transform=ax.transAxes)
ax.text(0.9, 0.75, 'mean={:.0f}ms'.format(1000.0*np.mean(isis)), ha='right', transform=ax.transAxes)
ax.text(0.9, 0.65, 'CV={:.2f}'.format(np.std(isis)/np.mean(isis)), ha='right', transform=ax.transAxes)
ax.set_xlabel('ISI [ms]')
ax.set_ylabel('p(ISI) [1/s]')
ax.bar( 1000.0*b[:-1], h, 1000.0*np.diff(b) )
# parameter:
rate = 20.0
drate = 50.0
trials = 10
duration = 100.0
dt = 0.001
tau = 0.1;
# homogeneous spike trains:
homspikes = hompoisson(rate, trials, duration)
# OU noise:
rng = np.random.RandomState(54637281)
time = np.arange(0.0, duration, dt)
x = np.zeros(time.shape)+rate
n = rng.randn(len(time))*drate*tau/np.sqrt(dt)+rate
for k in xrange(1,len(x)) :
x[k] = x[k-1] + (n[k]-x[k-1])*dt/tau
x[x<0.0] = 0.0
# pif spike trains:
inhspikes = pifspikes(x, trials, dt, D=0.3)
fig = plt.figure( figsize=(9,4) )
ax = fig.add_subplot(1, 2, 1)
ax.set_title('stationary')
ax.set_xlim(0.0, 200.0)
ax.set_ylim(0.0, 40.0)
plotisih(ax, isis(homspikes))
ax = fig.add_subplot(1, 2, 2)
ax.set_title('non-stationary')
ax.set_xlim(0.0, 200.0)
ax.set_ylim(0.0, 40.0)
plotisih(ax, isis(inhspikes))
plt.tight_layout()
plt.savefig('isihexamples.pdf')
plt.show()

View File

@ -0,0 +1,271 @@
\documentclass[12pt]{report}
%%%%% title %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\title{\tr{Introduction to Scientific Computing}{Einf\"uhrung in die wissenschaftliche Datenverarbeitung}}
\author{Jan Benda\\Abteilung Neuroethologie\\[2ex]\includegraphics[width=0.3\textwidth]{UT_WBMW_Rot_RGB}}
\date{WS 15/16}
%%%% language %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% \newcommand{\tr}[2]{#1} % en
% \usepackage[english]{babel}
\newcommand{\tr}[2]{#2} % de
\usepackage[german]{babel}
%%%%% packages %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\usepackage{pslatex} % nice font for pdf file
\usepackage[breaklinks=true,bookmarks=true,bookmarksopen=true,pdfpagemode=UseNone,pdfstartview=FitH,colorlinks=true,citecolor=blue]{hyperref}
%%%% layout %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\usepackage[left=25mm,right=25mm,top=20mm,bottom=30mm]{geometry}
\setcounter{tocdepth}{1}
%%%%% section style %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\usepackage[sf,bf,it,big,clearempty]{titlesec}
\setcounter{secnumdepth}{1}
%%%%% units %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\usepackage[mediumspace,mediumqspace,Gray]{SIunits} % \ohm, \micro
%%%%% figures %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\usepackage{graphicx}
\usepackage{xcolor}
\pagecolor{white}
\newcommand{\ruler}{\par\noindent\setlength{\unitlength}{1mm}\begin{picture}(0,6)%
\put(0,4){\line(1,0){170}}%
\multiput(0,2)(10,0){18}{\line(0,1){4}}%
\multiput(0,3)(1,0){170}{\line(0,1){2}}%
\put(0,0){\makebox(0,0){{\tiny 0}}}%
\put(10,0){\makebox(0,0){{\tiny 1}}}%
\put(20,0){\makebox(0,0){{\tiny 2}}}%
\put(30,0){\makebox(0,0){{\tiny 3}}}%
\put(40,0){\makebox(0,0){{\tiny 4}}}%
\put(50,0){\makebox(0,0){{\tiny 5}}}%
\put(60,0){\makebox(0,0){{\tiny 6}}}%
\put(70,0){\makebox(0,0){{\tiny 7}}}%
\put(80,0){\makebox(0,0){{\tiny 8}}}%
\put(90,0){\makebox(0,0){{\tiny 9}}}%
\put(100,0){\makebox(0,0){{\tiny 10}}}%
\put(110,0){\makebox(0,0){{\tiny 11}}}%
\put(120,0){\makebox(0,0){{\tiny 12}}}%
\put(130,0){\makebox(0,0){{\tiny 13}}}%
\put(140,0){\makebox(0,0){{\tiny 14}}}%
\put(150,0){\makebox(0,0){{\tiny 15}}}%
\put(160,0){\makebox(0,0){{\tiny 16}}}%
\put(170,0){\makebox(0,0){{\tiny 17}}}%
\end{picture}\par}
% figures:
\setlength{\fboxsep}{0pt}
\newcommand{\texpicture}[1]{{\sffamily\footnotesize\input{#1.tex}}}
%\newcommand{\texpicture}[1]{\fbox{\sffamily\footnotesize\input{#1.tex}}}
%\newcommand{\texpicture}[1]{\setlength{\fboxsep}{2mm}\fbox{#1}}
%\newcommand{\texpicture}[1]{}
\newcommand{\figlabel}[1]{\textsf{\textbf{\large \uppercase{#1}}}}
% maximum number of floats:
\setcounter{topnumber}{2}
\setcounter{bottomnumber}{0}
\setcounter{totalnumber}{2}
% float placement fractions:
\renewcommand{\textfraction}{0.2}
\renewcommand{\topfraction}{0.8}
\renewcommand{\bottomfraction}{0.0}
\renewcommand{\floatpagefraction}{0.5}
% spacing for floats:
\setlength{\floatsep}{12pt plus 2pt minus 2pt}
\setlength{\textfloatsep}{20pt plus 4pt minus 2pt}
\setlength{\intextsep}{12pt plus 2pt minus 2pt}
% spacing for a floating page:
\makeatletter
\setlength{\@fptop}{0pt}
\setlength{\@fpsep}{8pt plus 2.0fil}
\setlength{\@fpbot}{0pt plus 1.0fil}
\makeatother
% rules for floats:
\newcommand{\topfigrule}{\vspace*{10pt}{\hrule height0.4pt}\vspace*{-10.4pt}}
\newcommand{\bottomfigrule}{\vspace*{-10.4pt}{\hrule height0.4pt}\vspace*{10pt}}
% captions:
\usepackage[format=plain,singlelinecheck=off,labelfont=bf,font={small,sf}]{caption}
% put caption on separate float:
\newcommand{\breakfloat}{\end{figure}\begin{figure}[t]}
% references to panels of a figure within the caption:
\newcommand{\figitem}[1]{\textsf{\bfseries\uppercase{#1}}}
% references to figures:
\newcommand{\panel}[1]{\textsf{\uppercase{#1}}}
\newcommand{\fref}[1]{\textup{\ref{#1}}}
\newcommand{\subfref}[2]{\textup{\ref{#1}}\,\panel{#2}}
% references to figures in normal text:
\newcommand{\fig}{Fig.}
\newcommand{\Fig}{Figure}
\newcommand{\figs}{Figs.}
\newcommand{\Figs}{Figures}
\newcommand{\figref}[1]{\fig~\fref{#1}}
\newcommand{\Figref}[1]{\Fig~\fref{#1}}
\newcommand{\figsref}[1]{\figs~\fref{#1}}
\newcommand{\Figsref}[1]{\Figs~\fref{#1}}
\newcommand{\subfigref}[2]{\fig~\subfref{#1}{#2}}
\newcommand{\Subfigref}[2]{\Fig~\subfref{#1}{#2}}
\newcommand{\subfigsref}[2]{\figs~\subfref{#1}{#2}}
\newcommand{\Subfigsref}[2]{\Figs~\subfref{#1}{#2}}
% references to figures within bracketed text:
\newcommand{\figb}{Fig.}
\newcommand{\figsb}{Figs.}
\newcommand{\figrefb}[1]{\figb~\fref{#1}}
\newcommand{\figsrefb}[1]{\figsb~\fref{#1}}
\newcommand{\subfigrefb}[2]{\figb~\subfref{#1}{#2}}
\newcommand{\subfigsrefb}[2]{\figsb~\subfref{#1}{#2}}
% references to tables:
\newcommand{\tref}[1]{\textup{\ref{#1}}}
% references to tables in normal text:
\newcommand{\tab}{Tab.}
\newcommand{\Tab}{Table}
\newcommand{\tabs}{Tabs.}
\newcommand{\Tabs}{Tables}
\newcommand{\tabref}[1]{\tab~\tref{#1}}
\newcommand{\Tabref}[1]{\Tab~\tref{#1}}
\newcommand{\tabsref}[1]{\tabs~\tref{#1}}
\newcommand{\Tabsref}[1]{\Tabs~\tref{#1}}
% references to tables within bracketed text:
\newcommand{\tabb}{Tab.}
\newcommand{\tabsb}{Tab.}
\newcommand{\tabrefb}[1]{\tabb~\tref{#1}}
\newcommand{\tabsrefb}[1]{\tabsb~\tref{#1}}
%%%%% equation references %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%\newcommand{\eqref}[1]{(\ref{#1})}
\newcommand{\eqn}{\tr{Eq}{Gl}.}
\newcommand{\Eqn}{\tr{Eq}{Gl}.}
\newcommand{\eqns}{\tr{Eqs}{Gln}.}
\newcommand{\Eqns}{\tr{Eqs}{Gln}.}
\newcommand{\eqnref}[1]{\eqn~\eqref{#1}}
\newcommand{\Eqnref}[1]{\Eqn~\eqref{#1}}
\newcommand{\eqnsref}[1]{\eqns~\eqref{#1}}
\newcommand{\Eqnsref}[1]{\Eqns~\eqref{#1}}
%%%%% listings %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\usepackage{listings}
\lstset{
inputpath=../code,
basicstyle=\ttfamily\footnotesize,
numbers=left,
showstringspaces=false,
language=Matlab,
commentstyle=\itshape\color{darkgray},
keywordstyle=\color{blue},
stringstyle=\color{green},
backgroundcolor=\color{blue!10},
breaklines=true,
breakautoindent=true,
columns=flexible,
frame=single,
caption={\protect\filename@parse{\lstname}\protect\filename@base},
captionpos=t,
xleftmargin=1em,
xrightmargin=1em,
aboveskip=10pt
}
%%%%% math stuff: %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\usepackage{amsmath}
\usepackage{bm}
\usepackage{dsfont}
\newcommand{\naZ}{\mathds{N}}
\newcommand{\gaZ}{\mathds{Z}}
\newcommand{\raZ}{\mathds{Q}}
\newcommand{\reZ}{\mathds{R}}
\newcommand{\reZp}{\mathds{R^+}}
\newcommand{\reZpN}{\mathds{R^+_0}}
\newcommand{\koZ}{\mathds{C}}
%%%%% structure: %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\usepackage{ifthen}
\newcommand{\code}[1]{\texttt{#1}}
\newcommand{\source}[1]{
\begin{flushright}
\color{gray}\scriptsize \url{#1}
\end{flushright}
}
\newenvironment{definition}[1][]{\medskip\noindent\textbf{Definition}\ifthenelse{\equal{#1}{}}{}{ #1}:\newline}%
{\medskip}
\newcounter{maxexercise}
\setcounter{maxexercise}{9} % show listings up to exercise maxexercise
\newcounter{theexercise}
\setcounter{theexercise}{1}
\newenvironment{exercise}[1][]{\medskip\noindent\textbf{\tr{Exercise}{\"Ubung}
\arabic{theexercise}:}\newline \newcommand{\exercisesource}{#1}}%
{\ifthenelse{\equal{\exercisesource}{}}{}{\ifthenelse{\value{theexercise}>\value{maxexercise}}{}{\medskip\lstinputlisting{\exercisesource}}}\medskip\stepcounter{theexercise}}
\graphicspath{{figures/}}
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\begin{document}
\include{pointprocesses}
\end{document}
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\section{\tr{Homogeneous Poisson process}{Homogener Poisson Prozess}}
\begin{figure}[t]
\includegraphics[width=1\textwidth]{poissonraster100hz}
\caption{\label{hompoissonfig}Rasterplot von Poisson-Spikes.}
\end{figure}
The probability $p(t)\delta t$ of an event occuring at time $t$
is independent of $t$ and independent of any previous event
(independent of event history).
The probability $P$ for an event occuring within a time bin of width $\Delta t$
is
\[ P=\lambda \cdot \Delta t \]
for a Poisson process with rate $\lambda$.
\subsection{Statistics of homogeneous Poisson process}
\begin{figure}[t]
\includegraphics[width=0.45\textwidth]{poissonisihexp20hz}\hfill
\includegraphics[width=0.45\textwidth]{poissonisihexp100hz}
\caption{\label{hompoissonisihfig}Interspike interval histograms of poisson spike train.}
\end{figure}
\begin{itemize}
\item Exponential distribution of intervals $T$: $p(T) = \lambda e^{-\lambda T}$
\item Mean interval $\mu_{ISI} = \frac{1}{\lambda}$
\item Variance of intervals $\sigma_{ISI}^2 = \frac{1}{\lambda^2}$
\item Coefficient of variation $CV_{ISI} = 1$
\item Serial correlation $\rho_k =0$ for $k>0$ (renewal process!)
\item Fano factor $F=1$
\end{itemize}
\subsection{Count statistics of Poisson process}
\begin{figure}[t]
\includegraphics[width=0.48\textwidth]{poissoncounthistdist100hz10ms}\hfill
\includegraphics[width=0.48\textwidth]{poissoncounthistdist100hz100ms}
\caption{\label{hompoissoncountfig}Count statistics of poisson spike train.}
\end{figure}
Poisson distribution:
\[ P(k) = \frac{(\lambda W)^ke^{\lambda W}}{k!} \]

View File

@ -0,0 +1,412 @@
\documentclass{beamer}
%%%%% title %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\title[]{Scientific Computing --- Point Processes}
\author[]{Jan Benda}
\institute[]{Neuroethology}
\date[]{WS 14/15}
\titlegraphic{\includegraphics[width=0.3\textwidth]{UT_WBMW_Rot_RGB}}
%%%%% beamer %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\mode<presentation>
{
\usetheme{Singapore}
\setbeamercovered{opaque}
\usecolortheme{tuebingen}
\setbeamertemplate{navigation symbols}{}
\usefonttheme{default}
\useoutertheme{infolines}
% \useoutertheme{miniframes}
}
%\AtBeginSection[]
%{
% \begin{frame}<beamer>
% \begin{center}
% \Huge \insertsectionhead
% \end{center}
% \end{frame}
%}
\setbeamertemplate{blocks}[rounded][shadow=true]
\setcounter{tocdepth}{1}
%%%%% packages %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\usepackage[english]{babel}
\usepackage{amsmath}
\usepackage{bm}
\usepackage{pslatex} % nice font for pdf file
%\usepackage{multimedia}
\usepackage{dsfont}
\newcommand{\naZ}{\mathds{N}}
\newcommand{\gaZ}{\mathds{Z}}
\newcommand{\raZ}{\mathds{Q}}
\newcommand{\reZ}{\mathds{R}}
\newcommand{\reZp}{\mathds{R^+}}
\newcommand{\reZpN}{\mathds{R^+_0}}
\newcommand{\koZ}{\mathds{C}}
%%%% graphics %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\usepackage{graphicx}
\newcommand{\texpicture}[1]{{\sffamily\small\input{#1.tex}}}
%%%%% listings %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\usepackage{listings}
\lstset{
basicstyle=\ttfamily,
numbers=left,
showstringspaces=false,
language=Matlab,
commentstyle=\itshape\color{darkgray},
keywordstyle=\color{blue},
stringstyle=\color{green},
backgroundcolor=\color{blue!10},
breaklines=true,
breakautoindent=true,
columns=flexible,
frame=single,
captionpos=b,
xleftmargin=1em,
xrightmargin=1em,
aboveskip=10pt
}
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\begin{document}
\begin{frame}[plain]
\frametitle{}
\vspace{-1cm}
\titlepage % erzeugt Titelseite
\end{frame}
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\begin{frame}
\frametitle{Content}
\tableofcontents
\end{frame}
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\section{Point processes}
\begin{frame}
\frametitle{Point process}
\vspace{-3ex}
\texpicture{pointprocessscetchA}
A point process is a stochastic (or random) process that generates a sequence of events
at times $\{t_i\}$, $t_i \in \reZ$.
For each point process there is an underlying continuous-valued
process evolving in time. The associated point process occurs when
the underlying continuous process crosses a threshold.
Examples:
\begin{itemize}
\item Spikes/heartbeat: generated by the dynamics of the membrane potential of neurons/heart cells.
\item Earth quakes: generated by the pressure dynamics between the tectonic plates on either side of a geological fault line.
\item Onset of cricket/frogs/birds/... songs: generated by the dynamics of the state of a nervous system.
\end{itemize}
\end{frame}
\begin{frame}
\frametitle{Point process}
\texpicture{pointprocessscetchB}
\end{frame}
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\section{Homogeneous Poisson process}
\begin{frame}
\frametitle{Homogeneous Poisson process}
The probability $p(t)\delta t$ of an event occuring at time $t$
is independent of $t$ and independent of any previous event
(independent of event history).
The probability $P$ for an event occuring within a time bin of width $\Delta t$
is
\[ P=\lambda \cdot \Delta t \]
for a Poisson process with rate $\lambda$.
\includegraphics[width=1\textwidth]{poissonraster100hz}
\end{frame}
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\section{Interval statistics}
\begin{frame}
\frametitle{Rate}
Rate of events $r$ (``spikes per time'') measured in Hertz.
\begin{itemize}
\item Number of events $N$ per observation time $W$: $r = \frac{N}{W}$
\item Without boundary effects: $r = \frac{N-1}{t_N-t_1}$
\item Inverse interval: $r = \frac{1}{\mu_{ISI}}$
\end{itemize}
\end{frame}
\begin{frame}
\frametitle{(Interspike) interval statistics}
\begin{itemize}
\item Histogram $p(T)$ of intervals $T$. Normalized to $\int_0^{\infty} p(T) \; dT = 1$
\item Mean interval $\mu_{ISI} = \langle T \rangle = \frac{1}{n}\sum\limits_{i=1}^n T_i$
\item Variance of intervals $\sigma_{ISI}^2 = \langle (T - \langle T \rangle)^2 \rangle$\vspace{1ex}
\item Coefficient of variation $CV_{ISI} = \frac{\sigma_{ISI}}{\mu_{ISI}}$
\item Diffusion coefficient $D_{ISI} = \frac{\sigma_{ISI}^2}{2\mu_{ISI}^3}$
\vfill
\end{itemize}
\includegraphics[width=0.45\textwidth]{poissonisih100hz}\hfill
\includegraphics[width=0.45\textwidth]{lifisih16}
\end{frame}
\begin{frame}
\frametitle{Interval statistics of homogeneous Poisson process}
\begin{itemize}
\item Exponential distribution of intervals $T$: $p(T) = \lambda e^{-\lambda T}$
\item Mean interval $\mu_{ISI} = \frac{1}{\lambda}$
\item Variance of intervals $\sigma_{ISI}^2 = \frac{1}{\lambda^2}$
\item Coefficient of variation $CV_{ISI} = 1$
\end{itemize}
\vfill
\includegraphics[width=0.45\textwidth]{poissonisihexp20hz}\hfill
\includegraphics[width=0.45\textwidth]{poissonisihexp100hz}
\end{frame}
\begin{frame}
\frametitle{Interval return maps}
Scatter plot between succeeding intervals separated by lag $k$.
\vfill
Poisson process $\lambda=100$\,Hz:
\includegraphics[width=1\textwidth]{poissonreturnmap100hz}\hfill
\end{frame}
\begin{frame}
\frametitle{Serial interval correlations}
Correlation coefficients between succeeding intervals separated by lag $k$:
\[ \rho_k = \frac{\langle (T_{i+k} - \langle T \rangle)(T_i - \langle T \rangle) \rangle}{\langle (T_i - \langle T \rangle)^2\rangle} = \frac{{\rm cov}(T_{i+k}, T_i)}{{\rm var}(T_i)} \]
\begin{itemize}
\item $\rho_0=1$ (correlation of each interval with itself).
\item Poisson process: $\rho_k =0$ for $k>0$ (renewal process!)
\end{itemize}
\vfill
\includegraphics[width=0.7\textwidth]{poissonserial100hz}
\end{frame}
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\section{Count statistics}
\begin{frame}
\frametitle{Count statistics}
Histogram of number of events $N$ (counts) within observation window of duration $W$.
\vfill
\includegraphics[width=0.48\textwidth]{poissoncounthist100hz10ms}\hfill
\includegraphics[width=0.48\textwidth]{poissoncounthist100hz100ms}
\end{frame}
\begin{frame}
\frametitle{Count statistics of Poisson process}
Poisson distribution:
\[ P(k) = \frac{(\lambda W)^ke^{\lambda W}}{k!} \]
\vfill
\includegraphics[width=0.48\textwidth]{poissoncounthistdist100hz10ms}\hfill
\includegraphics[width=0.48\textwidth]{poissoncounthistdist100hz100ms}
\end{frame}
\begin{frame}
\frametitle{Count statistics --- Fano factor}
Statistics of number of events $N$ within observation window of duration $W$.
\begin{itemize}
\item Mean count: $\mu_N = \langle N \rangle$
\item Count variance: $\sigma_N^2 = \langle (N - \langle N \rangle)^2 \rangle$
\item Fano factor (variance divided by mean): $F = \frac{\sigma_N^2}{\mu_N}$
\item Poisson process: $F=1$
\end{itemize}
\vfill
Poisson process $\lambda=100$\,Hz:
\includegraphics[width=1\textwidth]{poissonfano100hz}
\end{frame}
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\section{Integrate-and-fire models}
\begin{frame}
\frametitle{Integrate-and-fire models}
Leaky integrate-and-fire model (LIF):
\[ \tau \frac{dV}{dt} = -V + RI + D\xi \]
Whenever membrane potential $V(t)$ crosses the firing threshold $\theta$, a spike is emitted and
$V(t)$ is reset to $V_{reset}$.
\begin{itemize}
\item $\tau$: membrane time constant (typically 10\,ms)
\item $R$: input resistance (here 1\,mV (!))
\item $D\xi$: additive Gaussian white noise of strength $D$
\item $\theta$: firing threshold (here 10\,mV)
\item $V_{reset}$: reset potential (here 0\,mV)
\end{itemize}
\end{frame}
\begin{frame}
\frametitle{Integrate-and-fire models}
Discretization with time step $\Delta t$: $V(t) \rightarrow V_i,\;t_i = i \Delta t$.\\
Euler integration:
\begin{eqnarray*}
\frac{dV}{dt} & \approx & \frac{V_{i+1} - V_i}{\Delta t} \\
\Rightarrow \quad V_{i+1} & = & V_i + \Delta t \frac{-V_i+RI_i+\sqrt{2D\Delta t}N_i}{\tau}
\end{eqnarray*}
$N_i$ are normally distributed random numbers (Gaussian with zero mean and unit variance)
--- the $\sqrt{\Delta t}$ is for white noise.
\includegraphics[width=0.82\textwidth]{lifraster16}
\end{frame}
\begin{frame}
\frametitle{Interval statistics of LIF}
Interval distribution approaches Inverse Gaussian for large $I$:
\[ p(T) = \frac{1}{\sqrt{4\pi D T^3}}\exp\left[-\frac{(T-\langle T \rangle)^2}{4DT\langle T \rangle^2}\right] \]
where $\langle T \rangle$ is the mean interspike interval and $D$
is the diffusion coefficient.
\vfill
\includegraphics[width=0.45\textwidth]{lifisihdistr08}\hfill
\includegraphics[width=0.45\textwidth]{lifisihdistr16}
\end{frame}
\begin{frame}
\frametitle{Interval statistics of PIF}
For the perfect integrate-and-fire (PIF)
\[ \tau \frac{dV}{dt} = RI + D\xi \]
(the canonical model or supra-threshold firing on a limit cycle)\\
the Inverse Gaussian describes exactly the interspike interval distribution.
\vfill
\includegraphics[width=0.45\textwidth]{pifisihdistr01}\hfill
\includegraphics[width=0.45\textwidth]{pifisihdistr10}
\end{frame}
\begin{frame}
\frametitle{Interval return map of LIF}
LIF $I=15.7$:
\includegraphics[width=1\textwidth]{lifreturnmap16}
\end{frame}
\begin{frame}
\frametitle{Serial correlations of LIF}
LIF $I=15.7$:
\includegraphics[width=1\textwidth]{lifserial16}\\
Integrate-and-fire driven with white noise are still renewal processes!
\end{frame}
\begin{frame}
\frametitle{Count statistics of LIF}
LIF $I=15.7$:
\includegraphics[width=1\textwidth]{liffano16}\\
Fano factor is not one!
\end{frame}
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\begin{frame}
\frametitle{Interval statistics of LIF with OU noise}
\begin{eqnarray*}
\tau \frac{dV}{dt} & = & -V + RI + U \\
\tau_{OU} \frac{dU}{dt} & = & - U + D\xi
\end{eqnarray*}
Ohrnstein-Uhlenbeck noise is lowpass filtered white noise.
\includegraphics[width=0.45\textwidth]{lifouisihdistr08-100ms}\hfill
\includegraphics[width=0.45\textwidth]{lifouisihdistr16-100ms}\\
More peaky than the inverse Gaussian!
\end{frame}
\begin{frame}
\frametitle{Interval return map of LIF with OU noise}
LIF $I=15.7$, $\tau_{OU}=100$\,ms:
\includegraphics[width=1\textwidth]{lifoureturnmap16-100ms}
\end{frame}
\begin{frame}
\frametitle{Serial correlations of LIF with OU noise}
LIF $I=15.7$, $\tau_{OU}=100$\,ms:
\includegraphics[width=1\textwidth]{lifouserial16-100ms}\\
OU-noise introduces positive interval correlations!
\end{frame}
\begin{frame}
\frametitle{Count statistics of LIF with OU noise}
LIF $I=15.7$, $\tau_{OU}=100$\,ms:
\includegraphics[width=1\textwidth]{lifoufano16-100ms}\\
Fano factor increases with count window duration.
\end{frame}
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\begin{frame}
\frametitle{Interval statistics of LIF with adaptation}
\begin{eqnarray*}
\tau \frac{dV}{dt} & = & -V - A + RI + D\xi \\
\tau_{adapt} \frac{dA}{dt} & = & - A
\end{eqnarray*}
Adaptation $A$ with time constant $\tau_{adapt}$ and increment $\Delta A$ at spike.
\includegraphics[width=0.45\textwidth]{lifadaptisihdistr08-100ms}\hfill
\includegraphics[width=0.45\textwidth]{lifadaptisihdistr65-100ms}\\
Similar to LIF with white noise.
\end{frame}
\begin{frame}
\frametitle{Interval return map of LIF with adaptation}
LIF $I=10$, $\tau_{adapt}=100$\,ms:
\includegraphics[width=1\textwidth]{lifadaptreturnmap10-100ms}\\
Negative correlation at lag one.
\end{frame}
\begin{frame}
\frametitle{Serial correlations of LIF with adaptation}
LIF $I=10$, $\tau_{adapt}=100$\,ms:
\includegraphics[width=1\textwidth]{lifadaptserial10-100ms}\\
Adaptation with white noise introduces negative interval correlations!
\end{frame}
\begin{frame}
\frametitle{Count statistics of LIF with adaptation}
LIF $I=10$, $\tau_{adapt}=100$\,ms:
\includegraphics[width=1\textwidth]{lifadaptfano10-100ms}\\
Fano factor decreases with count window duration.
\end{frame}
\end{document}
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\section{Non stationary}
\subsection{Inhomogeneous Poisson process}
\subsection{Firing rate}
\subsection{Instantaneous rate}
\subsection{Autocorrelation}
\subsection{Crosscorrelation}
\subsection{Joint PSTH}
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\section{Renewal process}
\subsection{Superthreshold firing}
\subsection{Subthreshold firing}
\section{Non-renewal processes}
\subsection{Bursting}
\subsection{Resonator}
\subsection{Standard distributions}
\subsubsection{Gamma}
\subsubsection{How to read ISI histograms}
refractoriness, poisson tail, sub-, supra-threshold, missed spikes
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\section{Correlation with stimulus}
\subsection{Tuning curve}
\subsection{Linear filter}
\subsection{Spatiotemporal receptive field}
\subsection{Generalized linear model}
\begin{frame}
\end{frame}

View File

@ -1,412 +1,107 @@
\documentclass{beamer}
%%%%% title %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\title[]{Scientific Computing --- Point Processes}
\author[]{Jan Benda}
\institute[]{Neuroethology}
\date[]{WS 14/15}
\titlegraphic{\includegraphics[width=0.3\textwidth]{UT_WBMW_Rot_RGB}}
%%%%% beamer %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\mode<presentation>
{
\usetheme{Singapore}
\setbeamercovered{opaque}
\usecolortheme{tuebingen}
\setbeamertemplate{navigation symbols}{}
\usefonttheme{default}
\useoutertheme{infolines}
% \useoutertheme{miniframes}
}
%\AtBeginSection[]
%{
% \begin{frame}<beamer>
% \begin{center}
% \Huge \insertsectionhead
% \end{center}
% \end{frame}
%}
\setbeamertemplate{blocks}[rounded][shadow=true]
\setcounter{tocdepth}{1}
%%%%% packages %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\usepackage[english]{babel}
\usepackage{amsmath}
\usepackage{bm}
\usepackage{pslatex} % nice font for pdf file
%\usepackage{multimedia}
\usepackage{dsfont}
\newcommand{\naZ}{\mathds{N}}
\newcommand{\gaZ}{\mathds{Z}}
\newcommand{\raZ}{\mathds{Q}}
\newcommand{\reZ}{\mathds{R}}
\newcommand{\reZp}{\mathds{R^+}}
\newcommand{\reZpN}{\mathds{R^+_0}}
\newcommand{\koZ}{\mathds{C}}
%%%% graphics %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\usepackage{graphicx}
\newcommand{\texpicture}[1]{{\sffamily\small\input{#1.tex}}}
%%%%% listings %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\usepackage{listings}
\lstset{
basicstyle=\ttfamily,
numbers=left,
showstringspaces=false,
language=Matlab,
commentstyle=\itshape\color{darkgray},
keywordstyle=\color{blue},
stringstyle=\color{green},
backgroundcolor=\color{blue!10},
breaklines=true,
breakautoindent=true,
columns=flexible,
frame=single,
captionpos=b,
xleftmargin=1em,
xrightmargin=1em,
aboveskip=10pt
}
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\begin{document} \chapter{\tr{Point processes}{Punktprozesse}}
\begin{frame}[plain]
\frametitle{}
\vspace{-1cm}
\titlepage % erzeugt Titelseite
\end{frame}
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\begin{frame}
\frametitle{Content}
\tableofcontents
\end{frame}
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\section{Point processes}
\begin{frame}
\frametitle{Point process}
\vspace{-3ex}
\texpicture{pointprocessscetchA}
A point process is a stochastic (or random) process that generates a sequence of events
at times $\{t_i\}$, $t_i \in \reZ$.
For each point process there is an underlying continuous-valued
process evolving in time. The associated point process occurs when
the underlying continuous process crosses a threshold.
Examples:
\begin{itemize}
\item Spikes/heartbeat: generated by the dynamics of the membrane potential of neurons/heart cells.
\item Earth quakes: generated by the pressure dynamics between the tectonic plates on either side of a geological fault line.
\item Onset of cricket/frogs/birds/... songs: generated by the dynamics of the state of a nervous system.
\end{itemize}
\end{frame}
\begin{frame} \begin{figure}[t]
\frametitle{Point process}
\texpicture{pointprocessscetchB} \texpicture{pointprocessscetchB}
\end{frame} \caption{\label{pointprocessscetchfig}Ein Punktprozess ist eine
Abfolge von Zeitpunkten $t_i$ die auch durch die Intervalle
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% $T_i=t_{i+1}-t_i$ oder die Anzahl der Ereignisse $n_i$ beschrieben
\section{Homogeneous Poisson process} werden kann. }
\end{figure}
\begin{frame}
\frametitle{Homogeneous Poisson process} Ein zeitlicher Punktprozess ist ein stochastischer Prozess, der eine
The probability $p(t)\delta t$ of an event occuring at time $t$ Abfolge von Ereignissen zu den Zeiten $\{t_i\}$, $t_i \in \reZ$,
is independent of $t$ and independent of any previous event generiert.
(independent of event history).
Jeder Punktprozess wird durch einen sich in der Zeit kontinuierlich
The probability $P$ for an event occuring within a time bin of width $\Delta t$ entwickelnden Prozess generiert. Wann immer dieser Prozess eine
is Schwelle \"uberschreitet wird ein Ereigniss des Punktprozesses
\[ P=\lambda \cdot \Delta t \] erzeugt. Zum Beispiel:
for a Poisson process with rate $\lambda$. \begin{itemize}
\includegraphics[width=1\textwidth]{poissonraster100hz} \item Aktionspotentiale/Herzschlag: wird durch die Dynamik des
\end{frame} Membranpotentials eines Neurons/Herzzelle erzeugt.
\item Erdbeben: wird durch die Dynamik des Druckes zwischen
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% tektonischen Platten auf beiden Seiten einer geologischen Verwerfung
\section{Interval statistics} erzeugt.
\item Zeitpunkt eines Grillen/Frosch/Vogelgesangs: wird durch die
\begin{frame} Dynamik des Nervensystems und des Muskelapparates erzeugt.
\frametitle{Rate} \end{itemize}
Rate of events $r$ (``spikes per time'') measured in Hertz.
\begin{itemize} \begin{figure}[t]
\item Number of events $N$ per observation time $W$: $r = \frac{N}{W}$ \includegraphics[width=1\textwidth]{rasterexamples}
\item Without boundary effects: $r = \frac{N-1}{t_N-t_1}$ \caption{\label{rasterexamplesfig}Raster-Plot von jeweils 10
\item Inverse interval: $r = \frac{1}{\mu_{ISI}}$ Realisierungen eines station\"arenen Punktprozesses (homogener
\end{itemize} Poisson Prozess mit Rate $\lambda=20$\;Hz, links) und eines
\end{frame} nicht-station\"aren Punktprozesses (perfect integrate-and-fire
Neuron getrieben mit Ohrnstein-Uhlenbeck Rauschen mit
\begin{frame} Zeitkonstante $\tau=100$\,ms, rechts).}
\frametitle{(Interspike) interval statistics} \end{figure}
\begin{itemize}
\item Histogram $p(T)$ of intervals $T$. Normalized to $\int_0^{\infty} p(T) \; dT = 1$ %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\item Mean interval $\mu_{ISI} = \langle T \rangle = \frac{1}{n}\sum\limits_{i=1}^n T_i$ \section{Intervall Statistik}
\item Variance of intervals $\sigma_{ISI}^2 = \langle (T - \langle T \rangle)^2 \rangle$\vspace{1ex}
\item Coefficient of variation $CV_{ISI} = \frac{\sigma_{ISI}}{\mu_{ISI}}$ \begin{figure}[t]
\item Diffusion coefficient $D_{ISI} = \frac{\sigma_{ISI}^2}{2\mu_{ISI}^3}$ \includegraphics[width=1\textwidth]{isihexamples}\hfill
\vfill \caption{\label{isihexamplesfig}Interspike-Intervall Histogramme der in
\end{itemize} \figref{rasterexamplesfig} gezeigten Spikes.}
\includegraphics[width=0.45\textwidth]{poissonisih100hz}\hfill \end{figure}
\includegraphics[width=0.45\textwidth]{lifisih16}
\end{frame} \subsection{(Interspike) Intervall Statistik erster Ordnung}
\begin{itemize}
\begin{frame} \item Histogramm $p(T)$ der Intervalle $T$. Normiert auf $\int_0^{\infty} p(T) \; dT = 1$.
\frametitle{Interval statistics of homogeneous Poisson process} \item Mittleres Intervall $\mu_{ISI} = \langle T \rangle = \frac{1}{n}\sum\limits_{i=1}^n T_i$.
\begin{itemize} \item Varianz der Intervalle $\sigma_{ISI}^2 = \langle (T - \langle T \rangle)^2 \rangle$\vspace{1ex}
\item Exponential distribution of intervals $T$: $p(T) = \lambda e^{-\lambda T}$ \item Variationskoeffizient (``Coefficient of variation'') $CV_{ISI} = \frac{\sigma_{ISI}}{\mu_{ISI}}$.
\item Mean interval $\mu_{ISI} = \frac{1}{\lambda}$ \item Diffusions Koeffizient $D_{ISI} = \frac{\sigma_{ISI}^2}{2\mu_{ISI}^3}$.
\item Variance of intervals $\sigma_{ISI}^2 = \frac{1}{\lambda^2}$ \end{itemize}
\item Coefficient of variation $CV_{ISI} = 1$
\end{itemize} \subsection{Interval return maps}
\vfill Scatter plot von aufeinander folgenden Intervallen $(T_{i+k}, T_i)$ getrennt durch das ``lag'' $k$.
\includegraphics[width=0.45\textwidth]{poissonisihexp20hz}\hfill
\includegraphics[width=0.45\textwidth]{poissonisihexp100hz} \begin{figure}[t]
\end{frame} \includegraphics[width=1\textwidth]{returnmapexamples}
\includegraphics[width=1\textwidth]{serialcorrexamples}
\begin{frame} \caption{\label{returnmapfig}Interspike-Intervall return maps and serial correlations.}
\frametitle{Interval return maps} \end{figure}
Scatter plot between succeeding intervals separated by lag $k$.
\vfill \subsection{Serielle Korrelationen der Intervalle}
Poisson process $\lambda=100$\,Hz: Korrelationskoeffizient zwischen aufeinander folgenden Intervallen getrennt durch ``lag'' $k$:
\includegraphics[width=1\textwidth]{poissonreturnmap100hz}\hfill \[ \rho_k = \frac{\langle (T_{i+k} - \langle T \rangle)(T_i - \langle T \rangle) \rangle}{\langle (T_i - \langle T \rangle)^2\rangle} = \frac{{\rm cov}(T_{i+k}, T_i)}{{\rm var}(T_i)} \]
\end{frame} $\rho_0=1$ (Korrelation jedes Intervalls mit sich selber).
\begin{frame}
\frametitle{Serial interval correlations}
Correlation coefficients between succeeding intervals separated by lag $k$:
\[ \rho_k = \frac{\langle (T_{i+k} - \langle T \rangle)(T_i - \langle T \rangle) \rangle}{\langle (T_i - \langle T \rangle)^2\rangle} = \frac{{\rm cov}(T_{i+k}, T_i)}{{\rm var}(T_i)} \]
\begin{itemize}
\item $\rho_0=1$ (correlation of each interval with itself).
\item Poisson process: $\rho_k =0$ for $k>0$ (renewal process!)
\end{itemize}
\vfill
\includegraphics[width=0.7\textwidth]{poissonserial100hz}
\end{frame}
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\section{Count statistics} \section{Z\"ahlstatistik}
\begin{frame}
\frametitle{Count statistics}
Histogram of number of events $N$ (counts) within observation window of duration $W$.
\vfill \begin{figure}[t]
\includegraphics[width=0.48\textwidth]{poissoncounthist100hz10ms}\hfill \includegraphics[width=0.48\textwidth]{poissoncounthist100hz10ms}\hfill
\includegraphics[width=0.48\textwidth]{poissoncounthist100hz100ms} \includegraphics[width=0.48\textwidth]{poissoncounthist100hz100ms}
\end{frame} \caption{\label{countstatsfig}Count Statistik.}
\end{figure}
\begin{frame}
\frametitle{Count statistics of Poisson process} Statistik der Anzahl der Ereignisse $N_i$ innerhalb von Beobachtungsfenstern $i$ der Breite $W$.
Poisson distribution: \begin{itemize}
\[ P(k) = \frac{(\lambda W)^ke^{\lambda W}}{k!} \] \item Histogramm der counts $N_i$.
\item Mittlere Anzahl von Ereignissen: $\mu_N = \langle N \rangle$.
\vfill \item Varianz der Anzahl: $\sigma_N^2 = \langle (N - \langle N \rangle)^2 \rangle$.
\includegraphics[width=0.48\textwidth]{poissoncounthistdist100hz10ms}\hfill \item Fano Faktor (Varianz geteilt durch Mittelwert): $F = \frac{\sigma_N^2}{\mu_N}$.
\includegraphics[width=0.48\textwidth]{poissoncounthistdist100hz100ms} \end{itemize}
\end{frame}
Insbesondere ist die mittlere Rate der Ereignisse $r$ (``Spikes pro Zeit'', Feuerrate) gemessen in Hertz
\begin{frame} \[ r = \frac{\langle N \rangle}{W} \; . \]
\frametitle{Count statistics --- Fano factor}
Statistics of number of events $N$ within observation window of duration $W$. \begin{figure}[t]
\begin{itemize} \begin{minipage}[t]{0.49\textwidth}
\item Mean count: $\mu_N = \langle N \rangle$ Poisson process $\lambda=100$\,Hz:\\
\item Count variance: $\sigma_N^2 = \langle (N - \langle N \rangle)^2 \rangle$ \includegraphics[width=1\textwidth]{poissonfano100hz}
\item Fano factor (variance divided by mean): $F = \frac{\sigma_N^2}{\mu_N}$ \end{minipage}
\item Poisson process: $F=1$ \hfill
\end{itemize} \begin{minipage}[t]{0.49\textwidth}
\vfill LIF $I=10$, $\tau_{adapt}=100$\,ms:\\
Poisson process $\lambda=100$\,Hz: \includegraphics[width=1\textwidth]{lifadaptfano10-100ms}
\includegraphics[width=1\textwidth]{poissonfano100hz} \end{minipage}
\end{frame} \caption{\label{fanofig}Fano factor.}
\end{figure}
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\section{Integrate-and-fire models}
\begin{frame}
\frametitle{Integrate-and-fire models}
Leaky integrate-and-fire model (LIF):
\[ \tau \frac{dV}{dt} = -V + RI + D\xi \]
Whenever membrane potential $V(t)$ crosses the firing threshold $\theta$, a spike is emitted and
$V(t)$ is reset to $V_{reset}$.
\begin{itemize}
\item $\tau$: membrane time constant (typically 10\,ms)
\item $R$: input resistance (here 1\,mV (!))
\item $D\xi$: additive Gaussian white noise of strength $D$
\item $\theta$: firing threshold (here 10\,mV)
\item $V_{reset}$: reset potential (here 0\,mV)
\end{itemize}
\end{frame}
\begin{frame}
\frametitle{Integrate-and-fire models}
Discretization with time step $\Delta t$: $V(t) \rightarrow V_i,\;t_i = i \Delta t$.\\
Euler integration:
\begin{eqnarray*}
\frac{dV}{dt} & \approx & \frac{V_{i+1} - V_i}{\Delta t} \\
\Rightarrow \quad V_{i+1} & = & V_i + \Delta t \frac{-V_i+RI_i+\sqrt{2D\Delta t}N_i}{\tau}
\end{eqnarray*}
$N_i$ are normally distributed random numbers (Gaussian with zero mean and unit variance)
--- the $\sqrt{\Delta t}$ is for white noise.
\includegraphics[width=0.82\textwidth]{lifraster16}
\end{frame}
\begin{frame}
\frametitle{Interval statistics of LIF}
Interval distribution approaches Inverse Gaussian for large $I$:
\[ p(T) = \frac{1}{\sqrt{4\pi D T^3}}\exp\left[-\frac{(T-\langle T \rangle)^2}{4DT\langle T \rangle^2}\right] \]
where $\langle T \rangle$ is the mean interspike interval and $D$
is the diffusion coefficient.
\vfill
\includegraphics[width=0.45\textwidth]{lifisihdistr08}\hfill
\includegraphics[width=0.45\textwidth]{lifisihdistr16}
\end{frame}
\begin{frame}
\frametitle{Interval statistics of PIF}
For the perfect integrate-and-fire (PIF)
\[ \tau \frac{dV}{dt} = RI + D\xi \]
(the canonical model or supra-threshold firing on a limit cycle)\\
the Inverse Gaussian describes exactly the interspike interval distribution.
\vfill
\includegraphics[width=0.45\textwidth]{pifisihdistr01}\hfill
\includegraphics[width=0.45\textwidth]{pifisihdistr10}
\end{frame}
\begin{frame}
\frametitle{Interval return map of LIF}
LIF $I=15.7$:
\includegraphics[width=1\textwidth]{lifreturnmap16}
\end{frame}
\begin{frame}
\frametitle{Serial correlations of LIF}
LIF $I=15.7$:
\includegraphics[width=1\textwidth]{lifserial16}\\
Integrate-and-fire driven with white noise are still renewal processes!
\end{frame}
\begin{frame}
\frametitle{Count statistics of LIF}
LIF $I=15.7$:
\includegraphics[width=1\textwidth]{liffano16}\\
Fano factor is not one!
\end{frame}
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\begin{frame}
\frametitle{Interval statistics of LIF with OU noise}
\begin{eqnarray*}
\tau \frac{dV}{dt} & = & -V + RI + U \\
\tau_{OU} \frac{dU}{dt} & = & - U + D\xi
\end{eqnarray*}
Ohrnstein-Uhlenbeck noise is lowpass filtered white noise.
\includegraphics[width=0.45\textwidth]{lifouisihdistr08-100ms}\hfill
\includegraphics[width=0.45\textwidth]{lifouisihdistr16-100ms}\\
More peaky than the inverse Gaussian!
\end{frame}
\begin{frame}
\frametitle{Interval return map of LIF with OU noise}
LIF $I=15.7$, $\tau_{OU}=100$\,ms:
\includegraphics[width=1\textwidth]{lifoureturnmap16-100ms}
\end{frame}
\begin{frame}
\frametitle{Serial correlations of LIF with OU noise}
LIF $I=15.7$, $\tau_{OU}=100$\,ms:
\includegraphics[width=1\textwidth]{lifouserial16-100ms}\\
OU-noise introduces positive interval correlations!
\end{frame}
\begin{frame}
\frametitle{Count statistics of LIF with OU noise}
LIF $I=15.7$, $\tau_{OU}=100$\,ms:
\includegraphics[width=1\textwidth]{lifoufano16-100ms}\\
Fano factor increases with count window duration.
\end{frame}
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\begin{frame}
\frametitle{Interval statistics of LIF with adaptation}
\begin{eqnarray*}
\tau \frac{dV}{dt} & = & -V - A + RI + D\xi \\
\tau_{adapt} \frac{dA}{dt} & = & - A
\end{eqnarray*}
Adaptation $A$ with time constant $\tau_{adapt}$ and increment $\Delta A$ at spike.
\includegraphics[width=0.45\textwidth]{lifadaptisihdistr08-100ms}\hfill
\includegraphics[width=0.45\textwidth]{lifadaptisihdistr65-100ms}\\
Similar to LIF with white noise.
\end{frame}
\begin{frame}
\frametitle{Interval return map of LIF with adaptation}
LIF $I=10$, $\tau_{adapt}=100$\,ms:
\includegraphics[width=1\textwidth]{lifadaptreturnmap10-100ms}\\
Negative correlation at lag one.
\end{frame}
\begin{frame}
\frametitle{Serial correlations of LIF with adaptation}
LIF $I=10$, $\tau_{adapt}=100$\,ms:
\includegraphics[width=1\textwidth]{lifadaptserial10-100ms}\\
Adaptation with white noise introduces negative interval correlations!
\end{frame}
\begin{frame}
\frametitle{Count statistics of LIF with adaptation}
LIF $I=10$, $\tau_{adapt}=100$\,ms:
\includegraphics[width=1\textwidth]{lifadaptfano10-100ms}\\
Fano factor decreases with count window duration.
\end{frame}
\end{document}
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\section{Non stationary}
\subsection{Inhomogeneous Poisson process}
\subsection{Firing rate}
\subsection{Instantaneous rate}
\subsection{Autocorrelation}
\subsection{Crosscorrelation}
\subsection{Joint PSTH}
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\section{Renewal process}
\subsection{Superthreshold firing}
\subsection{Subthreshold firing}
\section{Non-renewal processes}
\subsection{Bursting}
\subsection{Resonator}
\subsection{Standard distributions}
\subsubsection{Gamma}
\subsubsection{How to read ISI histograms}
refractoriness, poisson tail, sub-, supra-threshold, missed spikes
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\section{Correlation with stimulus}
\subsection{Tuning curve}
\subsection{Linear filter}
\subsection{Spatiotemporal receptive field}
\subsection{Generalized linear model}
\begin{frame}
\end{frame}

View File

@ -1,7 +1,7 @@
%!PS-Adobe-2.0 EPSF-2.0 %!PS-Adobe-2.0 EPSF-2.0
%%Title: pointprocessscetchA.tex %%Title: pointprocessscetchA.tex
%%Creator: gnuplot 4.6 patchlevel 4 %%Creator: gnuplot 4.6 patchlevel 4
%%CreationDate: Sun Oct 26 14:09:12 2014 %%CreationDate: Mon Oct 26 09:31:15 2015
%%DocumentFonts: %%DocumentFonts:
%%BoundingBox: 50 50 373 135 %%BoundingBox: 50 50 373 135
%%EndComments %%EndComments
@ -430,10 +430,10 @@ SDict begin [
/Title (pointprocessscetchA.tex) /Title (pointprocessscetchA.tex)
/Subject (gnuplot plot) /Subject (gnuplot plot)
/Creator (gnuplot 4.6 patchlevel 4) /Creator (gnuplot 4.6 patchlevel 4)
/Author (jan) /Author (benda)
% /Producer (gnuplot) % /Producer (gnuplot)
% /Keywords () % /Keywords ()
/CreationDate (Sun Oct 26 14:09:12 2014) /CreationDate (Mon Oct 26 09:31:15 2015)
/DOCINFO pdfmark /DOCINFO pdfmark
end end
} ifelse } ifelse

View File

@ -1,7 +1,7 @@
%!PS-Adobe-2.0 EPSF-2.0 %!PS-Adobe-2.0 EPSF-2.0
%%Title: pointprocessscetchB.tex %%Title: pointprocessscetchB.tex
%%Creator: gnuplot 4.6 patchlevel 4 %%Creator: gnuplot 4.6 patchlevel 4
%%CreationDate: Sun Oct 26 17:34:18 2014 %%CreationDate: Mon Oct 26 09:31:16 2015
%%DocumentFonts: %%DocumentFonts:
%%BoundingBox: 50 50 373 237 %%BoundingBox: 50 50 373 237
%%EndComments %%EndComments
@ -430,10 +430,10 @@ SDict begin [
/Title (pointprocessscetchB.tex) /Title (pointprocessscetchB.tex)
/Subject (gnuplot plot) /Subject (gnuplot plot)
/Creator (gnuplot 4.6 patchlevel 4) /Creator (gnuplot 4.6 patchlevel 4)
/Author (jan) /Author (benda)
% /Producer (gnuplot) % /Producer (gnuplot)
% /Keywords () % /Keywords ()
/CreationDate (Sun Oct 26 17:34:18 2014) /CreationDate (Mon Oct 26 09:31:16 2015)
/DOCINFO pdfmark /DOCINFO pdfmark
end end
} ifelse } ifelse

View File

@ -0,0 +1,86 @@
import numpy as np
import matplotlib.pyplot as plt
def hompoisson(rate, trials, duration) :
spikes = []
for k in range(trials) :
times = []
t = 0.0
while t < duration :
t += np.random.exponential(1/rate)
times.append( t )
spikes.append( times )
return spikes
def inhompoisson(rate, trials, dt) :
spikes = []
p = rate*dt
for k in range(trials) :
x = np.random.rand(len(rate))
times = dt*np.nonzero(x<p)[0]
spikes.append( times )
return spikes
def pifspikes(input, trials, dt, D=0.1) :
vreset = 0.0
vthresh = 1.0
tau = 1.0
spikes = []
for k in range(trials) :
times = []
v = vreset
noise = np.sqrt(2.0*D)*np.random.randn(len(input))/np.sqrt(dt)
for k in xrange(len(noise)) :
v += (input[k]+noise[k])*dt/tau
if v >= vthresh :
v = vreset
times.append(k*dt)
spikes.append( times )
return spikes
# parameter:
rate = 20.0
drate = 50.0
trials = 10
duration = 2.0
dt = 0.001
tau = 0.1;
# homogeneous spike trains:
homspikes = hompoisson(rate, trials, duration)
# OU noise:
rng = np.random.RandomState(54637281)
time = np.arange(0.0, duration, dt)
x = np.zeros(time.shape)+rate
n = rng.randn(len(time))*drate*tau/np.sqrt(dt)+rate
for k in xrange(1,len(x)) :
x[k] = x[k-1] + (n[k]-x[k-1])*dt/tau
x[x<0.0] = 0.0
# inhomogeneous spike trains:
#inhspikes = inhompoisson(x, trials, dt)
# pif spike trains:
inhspikes = pifspikes(x, trials, dt, D=0.3)
fig = plt.figure( figsize=(9,4) )
ax = fig.add_subplot(1, 2, 1)
ax.set_title('stationary')
ax.set_xlim(0.0, duration)
ax.set_ylim(-0.5, trials-0.5)
ax.set_xlabel('Time [s]')
ax.set_ylabel('Trials')
ax.eventplot(homspikes, colors=[[0, 0, 0]], linelength=0.8)
ax = fig.add_subplot(1, 2, 2)
ax.set_title('non-stationary')
ax.set_xlim(0.0, duration)
ax.set_ylim(-0.5, trials-0.5)
ax.set_xlabel('Time [s]')
ax.set_ylabel('Trials')
ax.eventplot(inhspikes, colors=[[0, 0, 0]], linelength=0.8)
plt.tight_layout()
plt.savefig('rasterexamples.pdf')
plt.show()

View File

@ -0,0 +1,105 @@
import numpy as np
import matplotlib.pyplot as plt
def hompoisson(rate, trials, duration) :
spikes = []
for k in range(trials) :
times = []
t = 0.0
while t < duration :
t += np.random.exponential(1/rate)
times.append( t )
spikes.append( times )
return spikes
def inhompoisson(rate, trials, dt) :
spikes = []
p = rate*dt
for k in range(trials) :
x = np.random.rand(len(rate))
times = dt*np.nonzero(x<p)[0]
spikes.append( times )
return spikes
def pifspikes(input, trials, dt, D=0.1) :
vreset = 0.0
vthresh = 1.0
tau = 1.0
spikes = []
for k in range(trials) :
times = []
v = vreset
noise = np.sqrt(2.0*D)*np.random.randn(len(input))/np.sqrt(dt)
for k in xrange(len(noise)) :
v += (input[k]+noise[k])*dt/tau
if v >= vthresh :
v = vreset
times.append(k*dt)
spikes.append( times )
return spikes
def isis( spikes ) :
isi = []
for k in xrange(len(spikes)) :
isi.extend(np.diff(spikes[k]))
return np.array( isi )
def plotisih( ax, isis, binwidth=None ) :
if binwidth == None :
nperbin = 200.0 # average number of isis per bin
bins = len(isis)/nperbin # number of bins
binwidth = np.max(isis)/bins
if binwidth < 5e-4 : # half a millisecond
binwidth = 5e-4
h, b = np.histogram(isis, np.arange(0.0, np.max(isis)+binwidth, binwidth), density=True)
ax.text(0.9, 0.85, 'rate={:.0f}Hz'.format(1.0/np.mean(isis)), ha='right', transform=ax.transAxes)
ax.text(0.9, 0.75, 'mean={:.0f}ms'.format(1000.0*np.mean(isis)), ha='right', transform=ax.transAxes)
ax.text(0.9, 0.65, 'CV={:.2f}'.format(np.std(isis)/np.mean(isis)), ha='right', transform=ax.transAxes)
ax.set_xlabel('ISI [ms]')
ax.set_ylabel('p(ISI) [1/s]')
ax.bar( 1000.0*b[:-1], h, 1000.0*np.diff(b) )
def plotreturnmap(ax, isis, lag=1, max=None) :
ax.set_xlabel(r'ISI$_i$ [ms]')
ax.set_ylabel(r'ISI$_{i+1}$ [ms]')
if max != None :
ax.set_xlim(0.0, 1000.0*max)
ax.set_ylim(0.0, 1000.0*max)
ax.scatter( 1000.0*isis[:-lag], 1000.0*isis[lag:] )
# parameter:
rate = 20.0
drate = 50.0
trials = 10
duration = 10.0
dt = 0.001
tau = 0.1;
# homogeneous spike trains:
homspikes = hompoisson(rate, trials, duration)
# OU noise:
rng = np.random.RandomState(54637281)
time = np.arange(0.0, duration, dt)
x = np.zeros(time.shape)+rate
n = rng.randn(len(time))*drate*tau/np.sqrt(dt)+rate
for k in xrange(1,len(x)) :
x[k] = x[k-1] + (n[k]-x[k-1])*dt/tau
x[x<0.0] = 0.0
# pif spike trains:
inhspikes = pifspikes(x, trials, dt, D=0.3)
fig = plt.figure( figsize=(9,4) )
ax = fig.add_subplot(1, 2, 1)
ax.set_title('stationary')
plotreturnmap(ax, isis(homspikes), 1, 0.3)
ax = fig.add_subplot(1, 2, 2)
ax.set_title('non-stationary')
plotreturnmap(ax, isis(inhspikes), 1, 0.3)
plt.tight_layout()
plt.savefig('returnmapexamples.pdf')
#plt.show()

View File

@ -0,0 +1,117 @@
import numpy as np
import matplotlib.pyplot as plt
def hompoisson(rate, trials, duration) :
spikes = []
for k in range(trials) :
times = []
t = 0.0
while t < duration :
t += np.random.exponential(1/rate)
times.append( t )
spikes.append( times )
return spikes
def inhompoisson(rate, trials, dt) :
spikes = []
p = rate*dt
for k in range(trials) :
x = np.random.rand(len(rate))
times = dt*np.nonzero(x<p)[0]
spikes.append( times )
return spikes
def pifspikes(input, trials, dt, D=0.1) :
vreset = 0.0
vthresh = 1.0
tau = 1.0
spikes = []
for k in range(trials) :
times = []
v = vreset
noise = np.sqrt(2.0*D)*np.random.randn(len(input))/np.sqrt(dt)
for k in xrange(len(noise)) :
v += (input[k]+noise[k])*dt/tau
if v >= vthresh :
v = vreset
times.append(k*dt)
spikes.append( times )
return spikes
def isis( spikes ) :
isi = []
for k in xrange(len(spikes)) :
isi.extend(np.diff(spikes[k]))
return np.array( isi )
def plotisih( ax, isis, binwidth=None ) :
if binwidth == None :
nperbin = 200.0 # average number of isis per bin
bins = len(isis)/nperbin # number of bins
binwidth = np.max(isis)/bins
if binwidth < 5e-4 : # half a millisecond
binwidth = 5e-4
h, b = np.histogram(isis, np.arange(0.0, np.max(isis)+binwidth, binwidth), density=True)
ax.text(0.9, 0.85, 'rate={:.0f}Hz'.format(1.0/np.mean(isis)), ha='right', transform=ax.transAxes)
ax.text(0.9, 0.75, 'mean={:.0f}ms'.format(1000.0*np.mean(isis)), ha='right', transform=ax.transAxes)
ax.text(0.9, 0.65, 'CV={:.2f}'.format(np.std(isis)/np.mean(isis)), ha='right', transform=ax.transAxes)
ax.set_xlabel('ISI [ms]')
ax.set_ylabel('p(ISI) [1/s]')
ax.bar( 1000.0*b[:-1], h, 1000.0*np.diff(b) )
def plotreturnmap(ax, isis, lag=1, max=None) :
ax.set_xlabel(r'ISI$_i$ [ms]')
ax.set_ylabel(r'ISI$_{i+1}$ [ms]')
if max != None :
ax.set_xlim(0.0, 1000.0*max)
ax.set_ylim(0.0, 1000.0*max)
ax.scatter( 1000.0*isis[:-lag], 1000.0*isis[lag:] )
def plotserialcorr(ax, isis, maxlag=10) :
lags = np.arange(maxlag+1)
corr = [1.0]
for lag in lags[1:] :
corr.append(np.corrcoef(isis[:-lag], isis[lag:])[0,1])
ax.set_xlabel(r'lag $k$')
ax.set_ylabel(r'ISI correlation $\rho_k$')
ax.set_xlim(0.0, maxlag)
ax.set_ylim(-1.0, 1.0)
ax.plot(lags, corr, '.-', markersize=20)
# parameter:
rate = 20.0
drate = 50.0
trials = 10
duration = 500.0
dt = 0.001
tau = 0.1;
# homogeneous spike trains:
homspikes = hompoisson(rate, trials, duration)
# OU noise:
rng = np.random.RandomState(54637281)
time = np.arange(0.0, duration, dt)
x = np.zeros(time.shape)+rate
n = rng.randn(len(time))*drate*tau/np.sqrt(dt)+rate
for k in xrange(1,len(x)) :
x[k] = x[k-1] + (n[k]-x[k-1])*dt/tau
x[x<0.0] = 0.0
# pif spike trains:
inhspikes = pifspikes(x, trials, dt, D=0.3)
fig = plt.figure( figsize=(9,3) )
ax = fig.add_subplot(1, 2, 1)
plotserialcorr(ax, isis(homspikes))
ax.set_ylim(-0.2, 1.0)
ax = fig.add_subplot(1, 2, 2)
plotserialcorr(ax, isis(inhspikes))
ax.set_ylim(-0.2, 1.0)
plt.tight_layout()
plt.savefig('serialcorrexamples.pdf')
#plt.show()

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.9 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 41 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.9 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 47 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 3.2 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 3.4 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 3.4 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 23 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 19 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 20 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 11 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 12 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 266 KiB

Binary file not shown.

Binary file not shown.

After

Width:  |  Height:  |  Size: 15 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 5.8 KiB

View File

@ -86,6 +86,8 @@
\end{flushright} \end{flushright}
} }
\newcommand{\code}[1]{\texttt{#1}}
\input{../../latex/environments.tex} \input{../../latex/environments.tex}
\makeatother \makeatother
@ -103,11 +105,259 @@
\begin{enumerate} \begin{enumerate}
\item Graphische Darstellung von Daten \item Graphische Darstellung von Daten
\item Spiketrain Analyse \item Spiketrain Analyse
\item \"Ubungen, \"Ubungen, \"Ubungen.
\end{enumerate} \end{enumerate}
\end{frame} \end{frame}
\begin{frame}[plain]
\huge{1. Graphische Darstellung von Daten}\pause
\begin{figure}
\includegraphics[width=0.9\columnwidth]{images/convincing}
\end{figure}
\end{frame}
\begin{frame}
\frametitle{Graphische Darstellung von Daten}
\framesubtitle{Was soll ein Datenplot erreichen?}
\begin{itemize}
\item Ist eine m\"oglichst neutrale Darstellung der Daten.
\item Soll dem Leser die Daten greifbar machen und die Aussagen der
Analyse darstellen.
\item Erlaubt dem Leser die gezeigten Effekte selbst zu beguachten
und zu validieren.
\item Muss vollst\"andig annotiert sein.
\item Folgt dem Prinzip der \textbf{ink minimization}. (Das
Verh\"altnis aus Tinte, die f\"ur die Darstellung der Daten
gebraucht wird und der Menge Tinte, die f\"ur die Graphik
ben\"otigt wird sollte m\"oglichst gro{\ss} sein )
\end{itemize}
\end{frame}
\begin{frame}
\frametitle{Graphische Darstellung von Daten}
\framesubtitle{Was sollte vermieden werden?}
\begin{itemize}
\item Suggestive oder gar fehlleitende Darstellung.
\item Ablenkung durch unruhige oder \"uberm\"a{\ss}ige Effekte.
\item Comicartige Effekte...
\end{itemize}\pause
\begin{figure}
\includegraphics[width=0.35\columnwidth]{images/one_d_problem_c}
\end{figure}\pause
... aus{\ss}er sie werden rein zur Illustration benutzt ohne einen
Anspruch auf Richtigkeit zu erheben.
\end{frame}
\begin{frame}
\frametitle{Graphische Darstellung von Daten}
\framesubtitle{Suboptimale Beispiele}
\only <1> {
\begin{figure}
\includegraphics[width=0.5\columnwidth]{images/nobelbad}
\end{figure}
\vspace{0.25cm}
Aus Hafting et al., Nature, 2005
}
\only <2> {
\begin{figure}
\includegraphics[width=0.7\columnwidth]{images/misleading_pie}
\end{figure}
\vspace{0.5cm}
\url{https://en.wikipedia.org/wiki/Misleading_graph}
}
\only <3> {
\begin{figure}
\includegraphics[width=0.7\columnwidth]{images/sample_pie}
\end{figure}
\vspace{0.5cm}
\url{https://en.wikipedia.org/wiki/Misleading_graph}
}
\only <4> {
\begin{figure}
\includegraphics[width=0.4\columnwidth]{images/badbarright}
\end{figure}
\vspace{0.5cm}
\url{https://en.wikipedia.org/wiki/Misleading_graph}
}
\only <5> {
\begin{figure}
\includegraphics[width=0.4\columnwidth]{images/badbarleft}
\end{figure}
\vspace{0.5cm}
\url{https://en.wikipedia.org/wiki/Misleading_graph}
}
\only <6> {
\begin{figure}
\includegraphics[width=0.8\columnwidth]{images/badbarplot}
\end{figure}
\vspace{0.5cm}
\url{https://en.wikipedia.org/wiki/Misleading_graph}
}
\only <7> {
Wahl der Zeichenfl\"ache kann den visuellen Eindruck beeinflu{\ss}en.
\begin{columns}
\begin{column}{4.cm}
\begin{figure}
\includegraphics[width=0.7\columnwidth]{images/line_graph1}
\end{figure}
\end{column}
\begin{column}{4.cm}
\begin{figure}
\includegraphics[width=0.7\columnwidth]{images/line_graph1_3}
\end{figure}
\end{column}
\begin{column}{4.cm}
\begin{figure}
\includegraphics[width=0.7\columnwidth]{images/line_graph1_4}
\end{figure}
\end{column}
\end{columns}
\vspace{0.5cm}
\url{https://en.wikipedia.org/wiki/Misleading_graph}
}
\only <8> {
Vorsicht bei der Skalierung von Symbolen!
\begin{columns}
\begin{column}{4.cm}
\begin{figure}
\includegraphics[width=0.7\columnwidth]{images/improperly_scaled_graph}
\end{figure}
\end{column}
\begin{column}{4.cm}
\begin{figure}
\includegraphics[width=0.7\columnwidth]{images/properly_scaled_graph}
\end{figure}
\end{column}
\begin{column}{4.cm}
\begin{figure}
\includegraphics[width=0.7\columnwidth]{images/comparison_properly_improperly_graph}
\end{figure}
\end{column}
\end{columns}
\vspace{0.5cm}
\url{https://en.wikipedia.org/wiki/Misleading_graph}
}
\end{frame}
\begin{frame}[fragile]
\frametitle{Graphische Darstellung von Daten}
\framesubtitle{Plotting Interfaces in Matlab}
Es gibt zwei Wege Graphen zu bearbeiten:
\begin{enumerate}
\item Interaktiv \"uber das \textit{graphische User Interface}\pause
\item Die Kommandozeile bzw. in Skripten und Funktionen.\pause
\end{enumerate}
Beides hat seine Berechtigung und seine eigenen Vor- und Nachteile. Welche?
\end{frame}
\begin{frame}
\frametitle{Graphische Darstellung von Daten}
\framesubtitle{Ver\"anderung des Graphen \"uber die Kommandozeile}
\begin{itemize}
\item Erstellt ein Skript, dass einen Plot erstellt.
\item Dieser soll zwei Sinus unterschiedlicher Frequenz darstellen.
\end{itemize}
Wir werden jetzt die Kommandozeil bzw. das Skript verbessern um den
Plot ``sch\"oner'' zu machen.
\end{frame}
\begin{frame}
\frametitle{Graphische Darstellung von Daten}
\framesubtitle{Ver\"anderung des Graphen \"uber die Kommandozeile}
\begin{enumerate}
\item Einstellungen der Linienplots:
\begin{itemize}
\item St\"arke und Farbe.
\item Linienstil, Marker.
\end{itemize}\pause
\item Achsbeschriftung:
\begin{itemize}
\item \code{xlabel}, \code{ylabel}.
\item Schriftart und Gr\"o{\ss}e.
\end{itemize}\pause
\item Achsenskalierung und Ticks:
\begin{itemize}
\item Skalierung der Achsen (Minumum und Maxmimum, logarithmisch oder linear).
\item Manuelles Setzen der Ticks, ihrer Richtung und Beschriftung.
\item Grid or no Grid?
\end{itemize}\pause
\item Setzen von globalen Parametern:
\begin{itemize}
\item Einstellung der Papiergr\"o{\ss}e und plzieren der
Zeichenfl\"ache.
\item Box oder nicht?
\item Speichern der Abbildung als pdf.
\end{itemize}
\end{enumerate}
\end{frame}
\begin{frame} [fragile]
\frametitle{Graphische Darstellung von Daten}
\framesubtitle{Ver\"andern von Eigenschaften \"uber die Kommandozeile}
\vspace{-0.75em}
\scriptsize
\begin{lstlisting}
fig = figure();
set(gcf, 'PaperUnits', 'centimeters', 'PaperSize', [11.7 9.0]);
set(gcf, 'PaperPosition',[0.0 0.0 11.7 9.0], 'Color', 'white')
hold on
plot(time, neuronal_data, 'color', [ 0.2 0.5 0.7], 'linewidth', 1.)
plot(spike_times, ones(size(spike_times))*threshold, 'ro', 'markersize', 4)
line([time(1) time(end)], [threshold threshold], 'linestyle', '--',
'linewidth', 0.75, 'color', [0.9 0.9 0.9])
ylim([0 35])
xlim([0 2.25])
box('off')
xlabel('time [s]', 'fontname', 'MyriadPro-Regular', 'fontsize', 10)
ylabel('potential [mV]', 'fontname', 'MyriadPro-Regular', 'fontsize', 10)
title('pyramidal cell', 'fontname', 'MyriadPro-Regular', 'fontsize', 12)
set(gca, 'TickDir','out', 'linewidth', 1.5, 'fontname', 'MyriadPro-Regular')
saveas(fig, 'spike_detection.pdf', 'pdf')
\end{lstlisting}
\end{frame}
\begin{frame} [fragile]
\frametitle{Graphische Darstellung von Daten}
\framesubtitle{Ver\"andern von Eigenschaften \"uber die Kommandozeile}
\begin{figure}
\centering
\includegraphics[width=0.75\columnwidth]{./images/spike_detection}
\end{figure}
\end{frame}
\begin{frame} [fragile]
\frametitle{Graphische Darstellung von Daten}
\framesubtitle{Welche Art Plot wof\"ur?}
\url{http://www.mathworks.de/discovery/gallery.html}
\end{frame}
\begin{frame} [fragile]
\frametitle{Graphische Darstellung von Daten}
\framesubtitle{Was macht einen guten Abbildung aus?}
\begin{enumerate}
\item Klarheit.
\item Vollstaendige Beschriftung.
\item Deutliche Unterscheidbarkeit von Kurven.
\item Keine suggestive Darstellung.
\item Ausgewogenheit von Linienst\"arken Schrift- und Plotgr\"o{\ss}e.
\item Vermeidung von Suggestiven Darstellungen.
\item Fehlerbalken, wenn sie angebracht sind.
\end{enumerate}
\end{frame}
\begin{frame}[plain] \begin{frame}[plain]
\huge{2. Spiketrain Analyse I} \huge{2. Spiketrain Analyse I}
\end{frame} \end{frame}

View File

@ -59,7 +59,8 @@
% figures: % figures:
\setlength{\fboxsep}{0pt} \setlength{\fboxsep}{0pt}
\newcommand{\texpicture}[1]{{\sffamily\footnotesize\input{#1.tex}}} \newcommand{\texinputpath}{}
\newcommand{\texpicture}[1]{{\sffamily\footnotesize\input{\texinputpath#1.tex}}}
%\newcommand{\texpicture}[1]{\fbox{\sffamily\footnotesize\input{#1.tex}}} %\newcommand{\texpicture}[1]{\fbox{\sffamily\footnotesize\input{#1.tex}}}
%\newcommand{\texpicture}[1]{\setlength{\fboxsep}{2mm}\fbox{#1}} %\newcommand{\texpicture}[1]{\setlength{\fboxsep}{2mm}\fbox{#1}}
%\newcommand{\texpicture}[1]{} %\newcommand{\texpicture}[1]{}
@ -204,8 +205,9 @@
\newenvironment{definition}[1][]{\medskip\noindent\textbf{Definition}\ifthenelse{\equal{#1}{}}{}{ #1}:\newline}% \newenvironment{definition}[1][]{\medskip\noindent\textbf{Definition}\ifthenelse{\equal{#1}{}}{}{ #1}:\newline}%
{\medskip} {\medskip}
%%%%% exercises: %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\newcounter{maxexercise} \newcounter{maxexercise}
\setcounter{maxexercise}{9} % show listings up to exercise maxexercise \setcounter{maxexercise}{10} % show listings up to exercise maxexercise
\newcounter{theexercise} \newcounter{theexercise}
\setcounter{theexercise}{1} \setcounter{theexercise}{1}
\newcommand{\codepath}{} \newcommand{\codepath}{}
@ -213,7 +215,7 @@
\arabic{theexercise}:}\newline \newcommand{\exercisesource}{#1}}% \arabic{theexercise}:}\newline \newcommand{\exercisesource}{#1}}%
{\ifthenelse{\equal{\exercisesource}{}}{}{\ifthenelse{\value{theexercise}>\value{maxexercise}}{}{\medskip\lstinputlisting{\codepath\exercisesource}}}\medskip\stepcounter{theexercise}} {\ifthenelse{\equal{\exercisesource}{}}{}{\ifthenelse{\value{theexercise}>\value{maxexercise}}{}{\medskip\lstinputlisting{\codepath\exercisesource}}}\medskip\stepcounter{theexercise}}
\graphicspath{{statistics/lecture/}{statistics/lecture/figures/}{bootstrap/lecture/}{bootstrap/lecture/figures/}{likelihood/lecture/}{likelihood/lecture/figures/}} \graphicspath{{statistics/lecture/}{statistics/lecture/figures/}{bootstrap/lecture/}{bootstrap/lecture/figures/}{likelihood/lecture/}{likelihood/lecture/figures/}{pointprocesses/lecture/}{pointprocesses/lecture/figures/}}
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
@ -233,4 +235,8 @@
\renewcommand{\codepath}{likelihood/code/} \renewcommand{\codepath}{likelihood/code/}
\include{likelihood/lecture/likelihood} \include{likelihood/lecture/likelihood}
\renewcommand{\codepath}{pointprocesses/code/}
\renewcommand{\texinputpath}{pointprocesses/lecture/}
%\include{pointprocesses/lecture/pointprocesses}
\end{document} \end{document}

View File

@ -0,0 +1,7 @@
function y = boltzmann(parameter, x)
% parameter 1: alpha
% parameter 2: k
% parameter 3: x_0
% parameter 4: y_0
y = (parameter(1) ./ (1 + exp(-parameter(2) .* (x - parameter(3))))) + parameter(4);

View File

@ -0,0 +1,7 @@
function y = create_linear_data(x)
m = 2.5;
n = -0.35;
d = 2.5;
y = x .* m + n + randn(size(x)) .* d;

View File

@ -0,0 +1,7 @@
function param = estimate_regression(x,y,p_0)
objective_function = @(p)lsq_error(p, x, y);
param = fminunc(objective_function, p_0);
disp(param)
param1 = fminsearch(objective_function, p_0);
disp(param1)

View File

@ -0,0 +1,5 @@
function y = exponential(parameter, x)
% Function implements an exponential function with two parameters
% controlling the amplitude and the time constant.
y = parameter(1) .* exp(x./parameter(2));

View File

@ -0,0 +1,9 @@
function gradient = lsq_gradient_sigmoid(parameter, x, y)
h = 1e-6;
gradient = zeros(size(parameter));
for i = 1:length(parameter)
parameter_h = parameter;
parameter_h(i) = parameter_h(i) + h;
gradient(i) = (lsq_sigmoid_error(parameter_h, x, y) - lsq_sigmoid_error(parameter, x, y)) / h;
end

View File

@ -0,0 +1,8 @@
function error = lsq_sigmoid_error(parameter, x, y)
% p(1) the amplitude
% p(2) the slope
% p(3) the x-shift
% p(4) the y-shift
y_est = parameter(1)./(1+ exp(-parameter(2) .* (x - parameter(3)))) + parameter(4);
error = mean((y_est - y).^2);

View File

@ -0,0 +1,44 @@
%% fit the sigmoid
clear
close all
load('iv_curve.mat')
figure()
plot(voltage, current, 'o')
xlabel('voltate [mV]')
ylabel('current [pA]')
% amplitude, slope, x-shift, y-shift
%parameter = [10 0.25 -50, 2.5];
parameter = [20 0.5 -50, 2.5];
eps = 0.1;
% do the descent
gradient = [];
steps = 0;
error = [];
while isempty(gradient) || norm(gradient) > 0.01
steps = steps + 1;
gradient = lsq_gradient_sigmoid(parameter, voltage, current);
error(steps) = lsq_sigmoid_error(parameter, voltage, current);
parameter = parameter - eps .* gradient;
end
plot(1:steps, error)
disp('gradient descent done!')
disp(strcat('final position: ', num2str(parameter)))
disp(strcat('final error: ', num2str(error(end))))
%% use fminsearch
parameter = [10 0.5 -50, 2.5];
objective_function = @(p)lsq_sigmoid_error(p, voltage, current);
param = fminunc(objective_function, parameter);
disp(param)
param1 = fminsearch(objective_function, parameter);
disp(param1)

View File

@ -27,4 +27,4 @@ subplot(1, 2, 2);
plot(psigs, loglm); plot(psigs, loglm);
xlabel('standard deviation') xlabel('standard deviation')
ylabel('log likelihood') ylabel('log likelihood')
savefigpdf(gcf, 'mlestd.pdf', 12, 5); savefigpdf(gcf, 'mlestd.pdf', 15, 5);

Binary file not shown.

View File

@ -113,8 +113,10 @@ Absch\"atzung der Standardabweichung verdeutlichen.
\continue \continue
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\question \qt{Maximum-Likelihood-Sch\"atzer einer Ursprungsgeraden} \question \qt{Maximum-Likelihood-Sch\"atzer einer Ursprungsgeraden}
In der Vorlesung haben wir eine Gleichung f\"ur die Maximum-Likelihood In der Vorlesung haben wir folgende Formel f\"ur die Maximum-Likelihood
Absch\"atzung der Steigung einer Ursprungsgeraden hergeleitet. Absch\"atzung der Steigung $\theta$ einer Ursprungsgeraden durch $n$ Datenpunkte $(x_i|y_i)$ mit Standardabweichung $\sigma_i$ hergeleitet:
\[\theta = \frac{\sum_{i=1}^n \frac{x_iy_i}{\sigma_i^2}}{ \sum_{i=1}^n
\frac{x_i^2}{\sigma_i^2}} \]
\begin{parts} \begin{parts}
\part \label{mleslopefunc} Schreibe eine Funktion, die in einem $x$ und einem \part \label{mleslopefunc} Schreibe eine Funktion, die in einem $x$ und einem
$y$ Vektor die Datenpaare \"uberreicht bekommt und die Steigung der $y$ Vektor die Datenpaare \"uberreicht bekommt und die Steigung der
@ -146,13 +148,12 @@ nicht so einfach wie der Mittelwert und die Standardabweichung einer
Normalverteilung direkt aus den Daten berechnet werden k\"onnen. Solche Parameter Normalverteilung direkt aus den Daten berechnet werden k\"onnen. Solche Parameter
m\"ussen dann aus den Daten mit der Maximum-Likelihood-Methode gefittet werden. m\"ussen dann aus den Daten mit der Maximum-Likelihood-Methode gefittet werden.
Um dies zu veranschaulichen ziehen wir uns diesmal Zufallszahlen, die nicht einer Um dies zu veranschaulichen ziehen wir uns diesmal nicht normalverteilte Zufallszahlen, sondern Zufallszahlen aus der Gamma-Verteilung.
Normalverteilung entstammen, sonder aus der Gamma-Verteilung.
\begin{parts} \begin{parts}
\part \part
Finde heraus welche Funktion die Wahrscheinlichkeitsdichtefunktion Finde heraus welche \code{matlab} Funktion die
(probability density function) der Gamma-Verteilung in \code{matlab} Wahrscheinlichkeitsdichtefunktion (probability density function) der
berechnet. Gamma-Verteilung berechnet.
\part \part
Plotte mit Hilfe dieser Funktion die Wahrscheinlichkeitsdichtefunktion Plotte mit Hilfe dieser Funktion die Wahrscheinlichkeitsdichtefunktion
@ -169,17 +170,17 @@ Normalverteilung entstammen, sonder aus der Gamma-Verteilung.
\part \part
Finde heraus mit welcher \code{matlab}-Funktion eine beliebige Finde heraus mit welcher \code{matlab}-Funktion eine beliebige
Verteilung (``distribution'') und die Gammaverteilung an die Verteilung (``distribution'') an die Zufallszahlen nach der
Zufallszahlen nach der Maximum-Likelihood Methode gefittet werden Maximum-Likelihood Methode gefittet werden kann. Wie wird diese
kann. Funktion benutzt, um die Gammaverteilung an die Daten zu fitten?
\part \part
Bestimme mit dieser Funktion die Parameter der Bestimme mit dieser Funktion die Parameter der Gammaverteilung aus
Gammaverteilung aus den Zufallszahlen. den Zufallszahlen.
\part \part
Plotte anschlie{\ss}end Plotte anschlie{\ss}end die Gammaverteilung mit den gefitteten
die Gammaverteilung mit den gefitteten Parametern. Parametern.
\end{parts} \end{parts}
\begin{solution} \begin{solution}
\lstinputlisting{mlepdffit.m} \lstinputlisting{mlepdffit.m}

View File

@ -1,22 +1,29 @@
BASENAME=statistics BASENAME=statistics
PYFILES=$(wildcard *.py) PYFILES=$(wildcard *.py)
PYPDFFILES=$(PYFILES:.py=.pdf) PYPDFFILES=$(PYFILES:.py=.pdf)
pdf : $(BASENAME)-chapter.pdf $(PYPDFFILES) all : pdf
# script:
pdf : $(BASENAME)-chapter.pdf
$(BASENAME)-chapter.pdf : $(BASENAME)-chapter.tex $(BASENAME).tex $(BASENAME)-chapter.pdf : $(BASENAME)-chapter.tex $(BASENAME).tex $(PYPDFFILES)
pdflatex -interaction=scrollmode $< | tee /dev/stderr | fgrep -q "Rerun to get cross-references right" && pdflatex -interaction=scrollmode $< || true pdflatex -interaction=scrollmode $< | tee /dev/stderr | fgrep -q "Rerun to get cross-references right" && pdflatex -interaction=scrollmode $< || true
$(PYPDFFILES) : %.pdf : %.py $(PYPDFFILES) : %.pdf : %.py
python $< python $<
clean : clean :
rm -f *~ $(BASENAME)-chapter.aux $(BASENAME)-chapter.log $(BASENAME)-chapter.out $(BASENAME).aux $(BASENAME).log rm -f *~
rm -f $(BASENAME).aux $(BASENAME).log
rm -f $(BASENAME)-chapter.aux $(BASENAME)-chapter.log $(BASENAME)-chapter.out
rm -f $(PYPDFFILES) $(GPTTEXFILES)
cleanall : clean cleanall : clean
rm -f $(BASENAME)-chapter.pdf rm -f $(BASENAME)-chapter.pdf
watch : watchpdf :
while true; do ! make -q pdf && make pdf; sleep 0.5; done while true; do ! make -q pdf && make pdf; sleep 0.5; done

View File

@ -43,5 +43,5 @@ ax.annotate('maximum',
ax.boxplot( x, whis=100.0 ) ax.boxplot( x, whis=100.0 )
plt.tight_layout() plt.tight_layout()
plt.savefig('boxwhisker.pdf') plt.savefig('boxwhisker.pdf')
plt.show() #plt.show()

View File

@ -5,7 +5,6 @@ plt.xkcd()
fig = plt.figure( figsize=(6,5) ) fig = plt.figure( figsize=(6,5) )
n = 200 n = 200
for k, r in enumerate( [ 1.0, 0.6, 0.0, -0.9 ] ) : for k, r in enumerate( [ 1.0, 0.6, 0.0, -0.9 ] ) :
print r
x = np.random.randn( n ) x = np.random.randn( n )
y = r*x + np.sqrt(1.0-r*r)*np.random.randn( n ) y = r*x + np.sqrt(1.0-r*r)*np.random.randn( n )
ax = fig.add_subplot( 2, 2, k+1 ) ax = fig.add_subplot( 2, 2, k+1 )
@ -30,5 +29,4 @@ for k, r in enumerate( [ 1.0, 0.6, 0.0, -0.9 ] ) :
plt.tight_layout() plt.tight_layout()
plt.savefig('correlation.pdf') plt.savefig('correlation.pdf')
plt.show() #plt.show()

View File

@ -28,5 +28,4 @@ ax.set_ylabel( 'Probability' )
ax.hist([x2, x1], bins, normed=True, color=['#FFCC00', '#FFFF66' ]) ax.hist([x2, x1], bins, normed=True, color=['#FFCC00', '#FFFF66' ])
plt.tight_layout() plt.tight_layout()
fig.savefig( 'diehistograms.pdf' ) fig.savefig( 'diehistograms.pdf' )
plt.show() #plt.show()

View File

@ -29,5 +29,4 @@ ax.plot(x,g, 'b', lw=4)
ax.plot([0.0, 0.0], [0.0, 0.45], 'k', lw=2 ) ax.plot([0.0, 0.0], [0.0, 0.45], 'k', lw=2 )
plt.tight_layout() plt.tight_layout()
fig.savefig( 'median.pdf' ) fig.savefig( 'median.pdf' )
plt.show() #plt.show()

View File

@ -39,4 +39,4 @@ ax.scatter( x, z )
plt.tight_layout() plt.tight_layout()
plt.savefig('nonlincorrelation.pdf') plt.savefig('nonlincorrelation.pdf')
plt.show() #plt.show()

View File

@ -35,5 +35,5 @@ ax.hist(r, 20, normed=True, color='#FFCC00')
plt.tight_layout() plt.tight_layout()
fig.savefig( 'pdfhistogram.pdf' ) fig.savefig( 'pdfhistogram.pdf' )
plt.show() #plt.show()

View File

@ -32,5 +32,4 @@ ax.fill_between( x[(x>x1)&(x<x2)], 0.0, g[(x>x1)&(x<x2)], color='#cc0000' )
ax.plot(x,g, 'b', lw=4) ax.plot(x,g, 'b', lw=4)
plt.tight_layout() plt.tight_layout()
fig.savefig( 'pdfprobabilities.pdf' ) fig.savefig( 'pdfprobabilities.pdf' )
plt.show() #plt.show()

View File

@ -46,5 +46,4 @@ ax.plot([q[0], q[0]], [0.0, 0.4], 'k', lw=2 )
ax.plot([q[2], q[2]], [0.0, 0.4], 'k', lw=2 ) ax.plot([q[2], q[2]], [0.0, 0.4], 'k', lw=2 )
plt.tight_layout() plt.tight_layout()
fig.savefig( 'quartile.pdf' ) fig.savefig( 'quartile.pdf' )
plt.show() #plt.show()

View File

@ -0,0 +1,133 @@
\documentclass{beamer}
%%%%% title %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\title[]{Scientific Computing --- Statistics}
\author[]{Jan Benda}
\institute[]{Neuroethology}
\date[]{WS 14/15}
\titlegraphic{\includegraphics[width=0.3\textwidth]{UT_WBMW_Rot_RGB}}
%%%%% beamer %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\mode<presentation>
{
\usetheme{Singapore}
\setbeamercovered{opaque}
\usecolortheme{tuebingen}
\setbeamertemplate{navigation symbols}{}
\usefonttheme{default}
\useoutertheme{infolines}
% \useoutertheme{miniframes}
}
%\AtBeginSection[]
%{
% \begin{frame}<beamer>
% \begin{center}
% \Huge \insertsectionhead
% \end{center}
% \end{frame}
%}
\setbeamertemplate{blocks}[rounded][shadow=true]
\setcounter{tocdepth}{1}
%%%%% packages %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\usepackage[english]{babel}
\usepackage{amsmath}
\usepackage{bm}
\usepackage{pslatex} % nice font for pdf file
%\usepackage{multimedia}
\usepackage{dsfont}
\newcommand{\naZ}{\mathds{N}}
\newcommand{\gaZ}{\mathds{Z}}
\newcommand{\raZ}{\mathds{Q}}
\newcommand{\reZ}{\mathds{R}}
\newcommand{\reZp}{\mathds{R^+}}
\newcommand{\reZpN}{\mathds{R^+_0}}
\newcommand{\koZ}{\mathds{C}}
%%%% graphics %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\usepackage{graphicx}
\newcommand{\texpicture}[1]{{\sffamily\small\input{#1.tex}}}
%%%%% listings %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\usepackage{listings}
\lstset{
basicstyle=\ttfamily,
numbers=left,
showstringspaces=false,
language=Matlab,
commentstyle=\itshape\color{darkgray},
keywordstyle=\color{blue},
stringstyle=\color{green},
backgroundcolor=\color{blue!10},
breaklines=true,
breakautoindent=true,
columns=flexible,
frame=single,
captionpos=b,
xleftmargin=1em,
xrightmargin=1em,
aboveskip=10pt
}
\graphicspath{{figures/}}
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\begin{document}
\begin{frame}[plain]
\frametitle{}
\vspace{-1cm}
\titlepage % erzeugt Titelseite
\end{frame}
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\begin{frame}
\frametitle{Content}
\tableofcontents
\end{frame}
\subsection{What is inferential statistics?}
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\begin{frame}
\frametitle{sources of error in an experiment}
\begin{task}{Think about it for 2 min}
If you repeat a scientific experiment, why do you not get the same
result every time you repeat it?
\end{task}
\pause
\begin{itemize}
\item sampling error (a finite subset of the population of interest
is selected in each experiment)
\item nonsampling errors (e.g. noise, uncontrolled factors)
\end{itemize}
\end{frame}
% ----------------------------------------------------------
\begin{frame}[fragile]
\frametitle{statisticians are lazy}
\Large
\only<1>{
\begin{center}
\includegraphics[width=.8\linewidth]{2012-10-29_16-26-05_771.jpg}
\end{center}
\mycite{Larry Gonick, The Cartoon Guide to Statistics}
}\pause
\only<2>{
\begin{center}
\includegraphics[width=.8\linewidth]{2012-10-29_16-41-39_523.jpg}
\end{center}
\mycite{Larry Gonick, The Cartoon Guide to Statistics}
}\pause
\only<3>{
\begin{center}
\includegraphics[width=.8\linewidth]{2012-10-29_16-29-35_312.jpg}
\end{center}
\mycite{Larry Gonick, The Cartoon Guide to Statistics}
}
\end{frame}