[simulations] added exercise randomnumbers

This commit is contained in:
Jan Benda 2019-12-27 21:15:46 +01:00
parent 6604261978
commit 2a0e1adff8
8 changed files with 85 additions and 61 deletions

View File

@ -1,8 +1,3 @@
% getting familiar with the randn() function:
randn(1, 3)
randn(3, 1)
randn(2, 4)
% simulate tiger weights: % simulate tiger weights:
mu = 220.0; % mean and ... mu = 220.0; % mean and ...
sigma = 40.0; % ... standard deviation of the tigers in kg sigma = 40.0; % ... standard deviation of the tigers in kg

View File

@ -1,21 +1,5 @@
>> normaldata >> normaldata
ans =
-0.89120 1.19863 0.95487
ans =
-1.10001
0.79473
0.85979
ans =
-1.19206 0.58278 1.70286 -1.28122
-0.19966 -1.85623 0.17962 -0.19272
n=100: n=100:
m=218kg, std= 39kg m=218kg, std= 39kg
m=223kg, std= 39kg m=223kg, std= 39kg

View File

@ -0,0 +1,17 @@
% getting familiar with the rand() function:
rand(1, 3)
rand(3, 1)
rand(2, 4)
% three times the same sequence of 10 random numbers:
n = 10;
for k = 1:3
rand(1, n)
end
% serial corraltion at lag 1:
n = 10000;
x = rand(n, 1);
r1 = corr(x(1:end-1), x(2:end));
fprintf('correlation between subsequent random numbers: %.3f\n', r1);

View File

@ -0,0 +1,30 @@
>> randomnumbers
ans =
0.740875 0.193576 0.064584
ans =
0.061028
0.695705
0.177097
ans =
0.707430 0.404868 0.550246 0.393093
0.087565 0.473358 0.247850 0.161137
ans =
0.350969 0.340726 0.145924 0.769714 0.203317 0.066427 0.451685 0.959766 0.850558 0.642769
ans =
0.145262 0.175168 0.462693 0.089379 0.706870 0.353830 0.604305 0.405531 0.804180 0.253496
ans =
0.647119 0.468534 0.484289 0.586001 0.851326 0.972554 0.014812 0.906628 0.982962 0.575003
correlation between subsequent random numbers: 0.003

View File

@ -2,7 +2,7 @@ import numpy as np
import scipy.stats as st import scipy.stats as st
import matplotlib.pyplot as plt import matplotlib.pyplot as plt
import matplotlib.gridspec as gridspec import matplotlib.gridspec as gridspec
from plotstyle import colors, cm_size, show_spines, set_xlabel, set_ylabel, bar_fac from plotstyle import *
if __name__ == "__main__": if __name__ == "__main__":
# wikipedia: # wikipedia:
@ -21,7 +21,7 @@ if __name__ == "__main__":
ax1 = fig.add_subplot(spec[0, 0]) ax1 = fig.add_subplot(spec[0, 0])
show_spines(ax1, 'lb') show_spines(ax1, 'lb')
ax1.scatter(indices, data, c=colors['blue'], edgecolor='white', s=50) ax1.scatter(indices, data, c=colors['blue'], edgecolor='white', s=50)
set_xlabel(ax1, 'index') set_xlabel(ax1, 'Index')
set_ylabel(ax1, 'Weight', 'kg') set_ylabel(ax1, 'Weight', 'kg')
ax1.set_xlim(-10, 310) ax1.set_xlim(-10, 310)
ax1.set_ylim(0, 370) ax1.set_ylim(0, 370)
@ -35,7 +35,7 @@ if __name__ == "__main__":
bw = 20.0 bw = 20.0
h, b = np.histogram(data, np.arange(0, 401, bw)) h, b = np.histogram(data, np.arange(0, 401, bw))
ax2.barh(b[:-1], h/np.sum(h)/(b[1]-b[0]), fc=colors['yellow'], height=bar_fac*bw, align='edge') ax2.barh(b[:-1], h/np.sum(h)/(b[1]-b[0]), fc=colors['yellow'], height=bar_fac*bw, align='edge')
set_xlabel(ax2, 'pdf', '1/kg') set_xlabel(ax2, 'Pdf', '1/kg')
ax2.set_xlim(0, 0.012) ax2.set_xlim(0, 0.012)
ax2.set_xticks([0, 0.005, 0.01]) ax2.set_xticks([0, 0.005, 0.01])
ax2.set_xticklabels(['0', '0.005', '0.01']) ax2.set_xticklabels(['0', '0.005', '0.01'])

View File

@ -1,7 +1,7 @@
import numpy as np import numpy as np
import matplotlib.pyplot as plt import matplotlib.pyplot as plt
import matplotlib.gridspec as gridspec import matplotlib.gridspec as gridspec
from plotstyle import colors, cm_size, show_spines from plotstyle import *
if __name__ == "__main__": if __name__ == "__main__":
n = 21 n = 21

View File

@ -51,8 +51,11 @@ exactly the same sequence of noise values. This is useful for plots
that involve some random numbers but should look the same whenever the that involve some random numbers but should look the same whenever the
script is run. script is run.
\begin{exercise}{}{} \begin{exercise}{randomnumbers.m}{randomnumbers.out}
Generate three times the same sequence of 20 uniformly distributed First, read the documentation of the \varcode{rand()} function and
check its output for some (small) input arguments.
Generate three times the same sequence of 10 uniformly distributed
numbers using the \code{rand()} and \code{rng()} functions. numbers using the \code{rand()} and \code{rng()} functions.
Generate 10\,000 uniformly distributed random numbers and compute Generate 10\,000 uniformly distributed random numbers and compute
@ -109,17 +112,15 @@ mean we just add the desired mean $\mu$ to the random numbers:
\end{figure} \end{figure}
\begin{exercise}{normaldata.m}{normaldata.out} \begin{exercise}{normaldata.m}{normaldata.out}
First, read the documentation of the \varcode{randn()} function and Write a little script that generates $n=100$ normally distributed
check its output for some (small) input arguments. Write a little data simulating the weight of Bengal tiger males with mean 220\,kg
script that generates $n=100$ normally distributed data simulating and standard deviation 40\,kg. Check the actual mean and standard
the weight of Bengal tiger males with mean 220\,kg and standard deviation of the generated data. Do this, let's say, five times
deviation 40\,kg. Check the actual mean and standard deviation of using a for-loop. Then increase $n$ to 10\,000 and run the code
the generated data. Do this, let's say, five times using a again. It is so simple to measure the weight of 10\,000 tigers for
for-loop. Then increase $n$ to 10\,000 and run the code again. It is getting a really good estimate of their mean weight, isn't it?
so simple to measure the weight of 10\,000 tigers for getting a Finally plot from the last generated data set of tiger weights the
really good estimate of their mean weight, isn't it? Finally plot first 1000 values as a function of their index.
from the last generated data set of tiger weights the first 1000
values as a function of their index.
\end{exercise} \end{exercise}
\subsection{Other probability densities} \subsection{Other probability densities}
@ -136,12 +137,12 @@ gamma
\begin{figure}[t] \begin{figure}[t]
\includegraphics[width=1\textwidth]{staticnonlinearity} \includegraphics[width=1\textwidth]{staticnonlinearity}
\titlecaption{\label{staticnonlinearityfig} Generating data \titlecaption{\label{staticnonlinearityfig} Generating data
fluctuating around a function.}{The open probability of the fluctuating around a function.}{The conductance of the
mechontransducer channel in hair cells of the inner ear is a mechontransducer channels in hair cells of the inner ear is a
saturating function of the deflection of hairs (left, red line). saturating function of the deflection of their hairs (left, red
Measured data will fluctuate around this function (blue dots). line). Measured data will fluctuate around this function (blue
Ideally the residuals (yellow histogram) are normally distributed dots). Ideally the residuals (yellow histogram) are normally
(right, red line).} distributed (right, red line).}
\end{figure} \end{figure}
Example: mechanotransduciton! Example: mechanotransduciton!

View File

@ -2,10 +2,10 @@ import numpy as np
import scipy.stats as st import scipy.stats as st
import matplotlib.pyplot as plt import matplotlib.pyplot as plt
import matplotlib.gridspec as gridspec import matplotlib.gridspec as gridspec
from plotstyle import colors, cm_size, show_spines, set_xlabel, set_ylabel, bar_fac from plotstyle import *
def boltzmann(x, x0, k): def boltzmann(x, x0, k):
return 1.0/(1.0+np.exp(-k*(x-x0))) return 8.0/(1.0+np.exp(-k*(x-x0)))
if __name__ == "__main__": if __name__ == "__main__":
n = 50 n = 50
@ -13,7 +13,7 @@ if __name__ == "__main__":
xmax = 18.0 xmax = 18.0
x0 = 2.0 x0 = 2.0
k = 0.25 k = 0.25
sigma = 0.08 sigma = 0.6
rng = np.random.RandomState(15281) rng = np.random.RandomState(15281)
x = np.linspace(xmin, xmax, n) x = np.linspace(xmin, xmax, n)
y = boltzmann(x, x0, k) + sigma*rng.randn(len(x)) y = boltzmann(x, x0, k) + sigma*rng.randn(len(x))
@ -28,28 +28,25 @@ if __name__ == "__main__":
ax1.plot(xx, yy, colors['red'], lw=2) ax1.plot(xx, yy, colors['red'], lw=2)
ax1.scatter(x, y, c=colors['blue'], edgecolor='white', s=50) ax1.scatter(x, y, c=colors['blue'], edgecolor='white', s=50)
set_xlabel(ax1, 'Hair deflection', 'nm') set_xlabel(ax1, 'Hair deflection', 'nm')
set_ylabel(ax1, 'Open probability') set_ylabel(ax1, 'Conductance', 'nS')
ax1.set_xlim(-20, 20) ax1.set_xlim(-20, 20)
ax1.set_ylim(-0.2, 1.17) ax1.set_ylim(-1.5, 9.5)
ax1.set_xticks(np.arange(-20.0, 21.0, 10.0)) ax1.set_xticks(np.arange(-20.0, 21.0, 10.0))
ax1.set_yticks(np.arange(-0.2, 1.1, 0.2)) ax1.set_yticks(np.arange(0, 9, 2))
ax2 = fig.add_subplot(spec[0, 1]) ax2 = fig.add_subplot(spec[0, 1])
show_spines(ax2, 'lb') show_spines(ax2, 'lb')
xg = np.linspace(-1.0, 1.01, 200) xg = np.linspace(-3.0, 3.01, 200)
yg = st.norm.pdf(xg, 0.0, sigma) yg = st.norm.pdf(xg, 0.0, sigma)
ax2.plot(xg, yg, colors['red'], lw=2) ax2.plot(xg, yg, colors['red'], lw=2)
bw = 0.05 bw = 0.25
h, b = np.histogram(y-boltzmann(x, x0, k), np.arange(-1.0, 1.01, bw)) h, b = np.histogram(y-boltzmann(x, x0, k), np.arange(-3.0, 3.01, bw))
ax2.bar(b[:-1], h/np.sum(h)/(b[1]-b[0]), fc=colors['yellow'], width=bar_fac*bw, align='edge') ax2.bar(b[:-1], h/np.sum(h)/(b[1]-b[0]), fc=colors['yellow'], width=bar_fac*bw, align='edge')
set_xlabel(ax2, 'residuals', 'nm') set_xlabel(ax2, 'Residuals', 'nS')
set_ylabel(ax2, 'pdf') set_ylabel(ax2, 'Pdf', '1/nS')
ax2.set_xlim(-0.3, 0.3) ax2.set_xlim(-2.5, 2.5)
ax2.set_ylim(0, 5.05) ax2.set_ylim(0, 0.75)
#ax2.set_xticks([0, 0.005, 0.01]) ax2.set_yticks(np.arange(0, 0.75, 0.2))
#ax2.set_xticklabels(['0', '0.005', '0.01'])
#ax2.set_yticks(np.arange(0, 351, 100))
#ax2.set_yticklabels([])
fig.savefig("staticnonlinearity.pdf") fig.savefig("staticnonlinearity.pdf")
plt.close() plt.close()