diff --git a/simulations/code/normaldata.m b/simulations/code/normaldata.m index 7abdad3..96c9d69 100644 --- a/simulations/code/normaldata.m +++ b/simulations/code/normaldata.m @@ -1,8 +1,3 @@ -% getting familiar with the randn() function: -randn(1, 3) -randn(3, 1) -randn(2, 4) - % simulate tiger weights: mu = 220.0; % mean and ... sigma = 40.0; % ... standard deviation of the tigers in kg diff --git a/simulations/code/normaldata.out b/simulations/code/normaldata.out index 191dd9a..da35d98 100644 --- a/simulations/code/normaldata.out +++ b/simulations/code/normaldata.out @@ -1,21 +1,5 @@ >> normaldata -ans = - - -0.89120 1.19863 0.95487 - -ans = - - -1.10001 - 0.79473 - 0.85979 - -ans = - - -1.19206 0.58278 1.70286 -1.28122 - -0.19966 -1.85623 0.17962 -0.19272 - - n=100: m=218kg, std= 39kg m=223kg, std= 39kg diff --git a/simulations/code/randomnumbers.m b/simulations/code/randomnumbers.m new file mode 100644 index 0000000..22432b3 --- /dev/null +++ b/simulations/code/randomnumbers.m @@ -0,0 +1,17 @@ +% getting familiar with the rand() function: +rand(1, 3) +rand(3, 1) +rand(2, 4) + +% three times the same sequence of 10 random numbers: +n = 10; +for k = 1:3 + rand(1, n) +end + +% serial corraltion at lag 1: +n = 10000; +x = rand(n, 1); +r1 = corr(x(1:end-1), x(2:end)); +fprintf('correlation between subsequent random numbers: %.3f\n', r1); + diff --git a/simulations/code/randomnumbers.out b/simulations/code/randomnumbers.out new file mode 100644 index 0000000..a28350f --- /dev/null +++ b/simulations/code/randomnumbers.out @@ -0,0 +1,30 @@ +>> randomnumbers + +ans = + + 0.740875 0.193576 0.064584 + +ans = + + 0.061028 + 0.695705 + 0.177097 + +ans = + + 0.707430 0.404868 0.550246 0.393093 + 0.087565 0.473358 0.247850 0.161137 + +ans = + + 0.350969 0.340726 0.145924 0.769714 0.203317 0.066427 0.451685 0.959766 0.850558 0.642769 + +ans = + + 0.145262 0.175168 0.462693 0.089379 0.706870 0.353830 0.604305 0.405531 0.804180 0.253496 + +ans = + + 0.647119 0.468534 0.484289 0.586001 0.851326 0.972554 0.014812 0.906628 0.982962 0.575003 + +correlation between subsequent random numbers: 0.003 diff --git a/simulations/lecture/normaldata.py b/simulations/lecture/normaldata.py index cfed154..e3cc622 100644 --- a/simulations/lecture/normaldata.py +++ b/simulations/lecture/normaldata.py @@ -2,7 +2,7 @@ import numpy as np import scipy.stats as st import matplotlib.pyplot as plt import matplotlib.gridspec as gridspec -from plotstyle import colors, cm_size, show_spines, set_xlabel, set_ylabel, bar_fac +from plotstyle import * if __name__ == "__main__": # wikipedia: @@ -21,7 +21,7 @@ if __name__ == "__main__": ax1 = fig.add_subplot(spec[0, 0]) show_spines(ax1, 'lb') ax1.scatter(indices, data, c=colors['blue'], edgecolor='white', s=50) - set_xlabel(ax1, 'index') + set_xlabel(ax1, 'Index') set_ylabel(ax1, 'Weight', 'kg') ax1.set_xlim(-10, 310) ax1.set_ylim(0, 370) @@ -35,7 +35,7 @@ if __name__ == "__main__": bw = 20.0 h, b = np.histogram(data, np.arange(0, 401, bw)) ax2.barh(b[:-1], h/np.sum(h)/(b[1]-b[0]), fc=colors['yellow'], height=bar_fac*bw, align='edge') - set_xlabel(ax2, 'pdf', '1/kg') + set_xlabel(ax2, 'Pdf', '1/kg') ax2.set_xlim(0, 0.012) ax2.set_xticks([0, 0.005, 0.01]) ax2.set_xticklabels(['0', '0.005', '0.01']) diff --git a/simulations/lecture/randomnumbers.py b/simulations/lecture/randomnumbers.py index 4f45135..7a2609a 100644 --- a/simulations/lecture/randomnumbers.py +++ b/simulations/lecture/randomnumbers.py @@ -1,7 +1,7 @@ import numpy as np import matplotlib.pyplot as plt import matplotlib.gridspec as gridspec -from plotstyle import colors, cm_size, show_spines +from plotstyle import * if __name__ == "__main__": n = 21 diff --git a/simulations/lecture/simulations.tex b/simulations/lecture/simulations.tex index f308a57..6d69396 100644 --- a/simulations/lecture/simulations.tex +++ b/simulations/lecture/simulations.tex @@ -51,8 +51,11 @@ exactly the same sequence of noise values. This is useful for plots that involve some random numbers but should look the same whenever the script is run. -\begin{exercise}{}{} - Generate three times the same sequence of 20 uniformly distributed +\begin{exercise}{randomnumbers.m}{randomnumbers.out} + First, read the documentation of the \varcode{rand()} function and + check its output for some (small) input arguments. + + Generate three times the same sequence of 10 uniformly distributed numbers using the \code{rand()} and \code{rng()} functions. Generate 10\,000 uniformly distributed random numbers and compute @@ -109,17 +112,15 @@ mean we just add the desired mean $\mu$ to the random numbers: \end{figure} \begin{exercise}{normaldata.m}{normaldata.out} - First, read the documentation of the \varcode{randn()} function and - check its output for some (small) input arguments. Write a little - script that generates $n=100$ normally distributed data simulating - the weight of Bengal tiger males with mean 220\,kg and standard - deviation 40\,kg. Check the actual mean and standard deviation of - the generated data. Do this, let's say, five times using a - for-loop. Then increase $n$ to 10\,000 and run the code again. It is - so simple to measure the weight of 10\,000 tigers for getting a - really good estimate of their mean weight, isn't it? Finally plot - from the last generated data set of tiger weights the first 1000 - values as a function of their index. + Write a little script that generates $n=100$ normally distributed + data simulating the weight of Bengal tiger males with mean 220\,kg + and standard deviation 40\,kg. Check the actual mean and standard + deviation of the generated data. Do this, let's say, five times + using a for-loop. Then increase $n$ to 10\,000 and run the code + again. It is so simple to measure the weight of 10\,000 tigers for + getting a really good estimate of their mean weight, isn't it? + Finally plot from the last generated data set of tiger weights the + first 1000 values as a function of their index. \end{exercise} \subsection{Other probability densities} @@ -136,12 +137,12 @@ gamma \begin{figure}[t] \includegraphics[width=1\textwidth]{staticnonlinearity} \titlecaption{\label{staticnonlinearityfig} Generating data - fluctuating around a function.}{The open probability of the - mechontransducer channel in hair cells of the inner ear is a - saturating function of the deflection of hairs (left, red line). - Measured data will fluctuate around this function (blue dots). - Ideally the residuals (yellow histogram) are normally distributed - (right, red line).} + fluctuating around a function.}{The conductance of the + mechontransducer channels in hair cells of the inner ear is a + saturating function of the deflection of their hairs (left, red + line). Measured data will fluctuate around this function (blue + dots). Ideally the residuals (yellow histogram) are normally + distributed (right, red line).} \end{figure} Example: mechanotransduciton! diff --git a/simulations/lecture/staticnonlinearity.py b/simulations/lecture/staticnonlinearity.py index 169e104..737c723 100644 --- a/simulations/lecture/staticnonlinearity.py +++ b/simulations/lecture/staticnonlinearity.py @@ -2,10 +2,10 @@ import numpy as np import scipy.stats as st import matplotlib.pyplot as plt import matplotlib.gridspec as gridspec -from plotstyle import colors, cm_size, show_spines, set_xlabel, set_ylabel, bar_fac +from plotstyle import * def boltzmann(x, x0, k): - return 1.0/(1.0+np.exp(-k*(x-x0))) + return 8.0/(1.0+np.exp(-k*(x-x0))) if __name__ == "__main__": n = 50 @@ -13,7 +13,7 @@ if __name__ == "__main__": xmax = 18.0 x0 = 2.0 k = 0.25 - sigma = 0.08 + sigma = 0.6 rng = np.random.RandomState(15281) x = np.linspace(xmin, xmax, n) y = boltzmann(x, x0, k) + sigma*rng.randn(len(x)) @@ -28,28 +28,25 @@ if __name__ == "__main__": ax1.plot(xx, yy, colors['red'], lw=2) ax1.scatter(x, y, c=colors['blue'], edgecolor='white', s=50) set_xlabel(ax1, 'Hair deflection', 'nm') - set_ylabel(ax1, 'Open probability') + set_ylabel(ax1, 'Conductance', 'nS') ax1.set_xlim(-20, 20) - ax1.set_ylim(-0.2, 1.17) + ax1.set_ylim(-1.5, 9.5) ax1.set_xticks(np.arange(-20.0, 21.0, 10.0)) - ax1.set_yticks(np.arange(-0.2, 1.1, 0.2)) + ax1.set_yticks(np.arange(0, 9, 2)) ax2 = fig.add_subplot(spec[0, 1]) show_spines(ax2, 'lb') - xg = np.linspace(-1.0, 1.01, 200) + xg = np.linspace(-3.0, 3.01, 200) yg = st.norm.pdf(xg, 0.0, sigma) ax2.plot(xg, yg, colors['red'], lw=2) - bw = 0.05 - h, b = np.histogram(y-boltzmann(x, x0, k), np.arange(-1.0, 1.01, bw)) + bw = 0.25 + h, b = np.histogram(y-boltzmann(x, x0, k), np.arange(-3.0, 3.01, bw)) ax2.bar(b[:-1], h/np.sum(h)/(b[1]-b[0]), fc=colors['yellow'], width=bar_fac*bw, align='edge') - set_xlabel(ax2, 'residuals', 'nm') - set_ylabel(ax2, 'pdf') - ax2.set_xlim(-0.3, 0.3) - ax2.set_ylim(0, 5.05) - #ax2.set_xticks([0, 0.005, 0.01]) - #ax2.set_xticklabels(['0', '0.005', '0.01']) - #ax2.set_yticks(np.arange(0, 351, 100)) - #ax2.set_yticklabels([]) + set_xlabel(ax2, 'Residuals', 'nS') + set_ylabel(ax2, 'Pdf', '1/nS') + ax2.set_xlim(-2.5, 2.5) + ax2.set_ylim(0, 0.75) + ax2.set_yticks(np.arange(0, 0.75, 0.2)) fig.savefig("staticnonlinearity.pdf") plt.close()