Reorganized the folders and started a common script for the lectures.
This commit is contained in:
BIN
regression/code/iv_curve.mat
Normal file
BIN
regression/code/iv_curve.mat
Normal file
Binary file not shown.
BIN
regression/code/lin_regression.mat
Normal file
BIN
regression/code/lin_regression.mat
Normal file
Binary file not shown.
6
regression/code/lsq_error.m
Normal file
6
regression/code/lsq_error.m
Normal file
@@ -0,0 +1,6 @@
|
||||
function error = lsq_error(parameter, x, y)
|
||||
% parameter(1) is the slope
|
||||
% parameter(2) is the intercept
|
||||
|
||||
f_x = x .* parameter(1) + parameter(2);
|
||||
error = mean((f_x - y).^2);
|
||||
7
regression/code/lsq_gradient.m
Normal file
7
regression/code/lsq_gradient.m
Normal file
@@ -0,0 +1,7 @@
|
||||
function gradient = lsq_gradient(parameter, x, y)
|
||||
h = 1e-6;
|
||||
|
||||
partial_m = (lsq_error([parameter(1)+h, parameter(2)],x,y) - lsq_error(parameter,x,y))/ h;
|
||||
partial_n = (lsq_error([parameter(1), parameter(2)+h],x,y) - lsq_error(parameter,x,y))/ h;
|
||||
|
||||
gradient = [partial_m, partial_n];
|
||||
9
regression/code/lsq_gradient_sigmoid.m
Normal file
9
regression/code/lsq_gradient_sigmoid.m
Normal file
@@ -0,0 +1,9 @@
|
||||
function gradient = lsq_gradient_sigmoid(parameter, x, y)
|
||||
h = 1e-6;
|
||||
|
||||
gradient = zeros(size(parameter));
|
||||
for i = 1:length(parameter)
|
||||
parameter_h = parameter;
|
||||
parameter_h(i) = parameter_h(i) + h;
|
||||
gradient(i) = (lsq_sigmoid_error(parameter_h, x, y) - lsq_sigmoid_error(parameter, x, y)) / h;
|
||||
end
|
||||
8
regression/code/lsq_sigmoid_error.m
Normal file
8
regression/code/lsq_sigmoid_error.m
Normal file
@@ -0,0 +1,8 @@
|
||||
function error = lsq_sigmoid_error(parameter, x, y)
|
||||
% p(1) the amplitude
|
||||
% p(2) the slope
|
||||
% p(3) the x-shift
|
||||
% p(4) the y-shift
|
||||
|
||||
y_est = parameter(1)./(1+ exp(-parameter(2) .* (x - parameter(3)))) + parameter(4);
|
||||
error = mean((y_est - y).^2);
|
||||
BIN
regression/code/membraneVoltage.mat
Normal file
BIN
regression/code/membraneVoltage.mat
Normal file
Binary file not shown.
112
regression/code/plot_error_surface.m
Normal file
112
regression/code/plot_error_surface.m
Normal file
@@ -0,0 +1,112 @@
|
||||
clear
|
||||
close all
|
||||
|
||||
%% first, plot the raw data
|
||||
load('lin_regression.mat');
|
||||
|
||||
figure()
|
||||
plot(x,y, 'o')
|
||||
xlabel('Input')
|
||||
ylabel('Output')
|
||||
|
||||
%% plot the error surface
|
||||
clear
|
||||
load('lin_regression.mat')
|
||||
ms = -5:0.25:5;
|
||||
ns = -30:1:30;
|
||||
|
||||
error_surf = zeros(length(ms), length(ns));
|
||||
|
||||
for i = 1:length(ms)
|
||||
for j = 1:length(ns)
|
||||
error_surf(i,j) = lsq_error([ms(i), ns(j)], x, y);
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
% plot the error surface
|
||||
figure()
|
||||
[N,M] = meshgrid(ns, ms);
|
||||
s = surface(M,N,error_surf);
|
||||
xlabel('slope')
|
||||
ylabel('intercept')
|
||||
zlabel('error')
|
||||
view(3)
|
||||
% rotate(s, [1 1 0], 25 )
|
||||
|
||||
%% Plot the gradient at different points in the surface
|
||||
clear
|
||||
load('lin_regression.mat')
|
||||
|
||||
ms = -1:0.5:5;
|
||||
ns = -10:1:10;
|
||||
|
||||
error_surf = zeros(length(ms), length(ns));
|
||||
gradient_m = zeros(size(error_surf));
|
||||
gradient_n = zeros(size(error_surf));
|
||||
|
||||
for i = 1:length(ms)
|
||||
for j = 1:length(ns)
|
||||
error_surf(i,j) = lsq_error([ms(i), ns(j)], x, y);
|
||||
grad = lsq_gradient([ms(i), ns(j)], x, y);
|
||||
gradient_m(i,j) = grad(1);
|
||||
gradient_n(i,j) = grad(2);
|
||||
end
|
||||
end
|
||||
|
||||
figure()
|
||||
hold on
|
||||
[N, M] = meshgrid(ns, ms);
|
||||
surface(M,N, error_surf, 'FaceAlpha', 0.5);
|
||||
contour(M,N, error_surf, 50);
|
||||
quiver(M,N, gradient_m, gradient_n)
|
||||
view(3)
|
||||
xlabel('slope')
|
||||
ylabel('intercept')
|
||||
zlabel('error')
|
||||
|
||||
%% do the gradient descent
|
||||
clear
|
||||
close all
|
||||
|
||||
load('lin_regression.mat')
|
||||
|
||||
ms = -1:0.5:5;
|
||||
ns = -10:1:10;
|
||||
|
||||
position = [-2. 10.];
|
||||
gradient = [];
|
||||
error = [];
|
||||
eps = 0.01;
|
||||
|
||||
% claculate error surface
|
||||
error_surf = zeros(length(ms), length(ns));
|
||||
for i = 1:length(ms)
|
||||
for j = 1:length(ns)
|
||||
error_surf(i,j) = lsq_error([ms(i), ns(j)], x, y);
|
||||
end
|
||||
end
|
||||
figure()
|
||||
hold on
|
||||
[N, M] = meshgrid(ns, ms);
|
||||
surface(M,N, error_surf, 'FaceAlpha', 0.5);
|
||||
view(3)
|
||||
xlabel('slope')
|
||||
ylabel('intersection')
|
||||
zlabel('error')
|
||||
|
||||
% do the descent
|
||||
|
||||
while isempty(gradient) || norm(gradient) > 0.1
|
||||
gradient = lsq_gradient(position, x,y);
|
||||
error = lsq_error(position, x, y);
|
||||
plot3(position(1), position(2), error, 'o', 'color', 'red')
|
||||
position = position - eps .* gradient;
|
||||
pause(0.25)
|
||||
end
|
||||
disp('gradient descent done!')
|
||||
disp(strcat('final position: ', num2str(position)))
|
||||
disp(strcat('final error: ', num2str(error)))
|
||||
|
||||
|
||||
|
||||
44
regression/code/sigmoidal_gradient_descent.m
Normal file
44
regression/code/sigmoidal_gradient_descent.m
Normal file
@@ -0,0 +1,44 @@
|
||||
|
||||
|
||||
%% fit the sigmoid
|
||||
|
||||
clear
|
||||
close all
|
||||
|
||||
load('iv_curve.mat')
|
||||
|
||||
figure()
|
||||
plot(voltage, current, 'o')
|
||||
xlabel('voltate [mV]')
|
||||
ylabel('current [pA]')
|
||||
|
||||
% amplitude, slope, x-shift, y-shift
|
||||
%parameter = [10 0.25 -50, 2.5];
|
||||
parameter = [20 0.5 -50, 2.5];
|
||||
|
||||
eps = 0.1;
|
||||
% do the descent
|
||||
gradient = [];
|
||||
steps = 0;
|
||||
error = [];
|
||||
|
||||
while isempty(gradient) || norm(gradient) > 0.01
|
||||
steps = steps + 1;
|
||||
gradient = lsq_gradient_sigmoid(parameter, voltage, current);
|
||||
error(steps) = lsq_sigmoid_error(parameter, voltage, current);
|
||||
parameter = parameter - eps .* gradient;
|
||||
end
|
||||
plot(1:steps, error)
|
||||
|
||||
disp('gradient descent done!')
|
||||
disp(strcat('final position: ', num2str(parameter)))
|
||||
disp(strcat('final error: ', num2str(error(end))))
|
||||
|
||||
%% use fminsearch
|
||||
parameter = [10 0.5 -50, 2.5];
|
||||
|
||||
objective_function = @(p)lsq_sigmoid_error(p, voltage, current);
|
||||
param = fminunc(objective_function, parameter);
|
||||
disp(param)
|
||||
param1 = fminsearch(objective_function, parameter);
|
||||
disp(param1)
|
||||
22
regression/lecture/Makefile
Normal file
22
regression/lecture/Makefile
Normal file
@@ -0,0 +1,22 @@
|
||||
BASENAME=linear_regression
|
||||
PYFILES=$(wildcard *.py)
|
||||
PYPDFFILES=$(PYFILES:.py=.pdf)
|
||||
|
||||
pdf : $(BASENAME).pdf $(PYPDFFILES)
|
||||
|
||||
$(BASENAME).pdf : $(BASENAME).tex
|
||||
pdflatex -interaction=scrollmode $< | tee /dev/stderr | fgrep -q "Rerun to get cross-references right" && pdflatex -interaction=scrollmode $< || true
|
||||
|
||||
$(PYPDFFILES) : %.pdf : %.py
|
||||
python $<
|
||||
|
||||
clean :
|
||||
rm -f *~ $(BASENAME).aux $(BASENAME).log $(BASENAME).out $(BASENAME).toc $(BASENAME).nav $(BASENAME).snm $(BASENAME).vrb
|
||||
|
||||
cleanall : clean
|
||||
rm -f $(BASENAME).pdf
|
||||
|
||||
watch :
|
||||
while true; do ! make -q pdf && make pdf; sleep 0.5; done
|
||||
|
||||
|
||||
61
regression/lecture/beamercolorthemetuebingen.sty
Normal file
61
regression/lecture/beamercolorthemetuebingen.sty
Normal file
@@ -0,0 +1,61 @@
|
||||
% Copyright 2007 by Till Tantau
|
||||
%
|
||||
% This file may be distributed and/or modified
|
||||
%
|
||||
% 1. under the LaTeX Project Public License and/or
|
||||
% 2. under the GNU Public License.
|
||||
%
|
||||
% See the file doc/licenses/LICENSE for more details.
|
||||
|
||||
\usepackage{color}
|
||||
\definecolor{karminrot}{RGB}{165,30,55}
|
||||
\definecolor{gold}{RGB}{180,160,105}
|
||||
\definecolor{anthrazit}{RGB}{50 ,65 ,75 }
|
||||
|
||||
\mode<presentation>
|
||||
|
||||
\setbeamercolor*{normal text}{fg=anthrazit,bg=white}
|
||||
\setbeamercolor*{alerted text}{fg=anthrazit}
|
||||
\setbeamercolor*{example text}{fg=anthrazit}
|
||||
\setbeamercolor*{structure}{fg=gold,bg=karminrot}
|
||||
|
||||
\providecommand*{\beamer@bftext@only}{%
|
||||
\relax
|
||||
\ifmmode
|
||||
\expandafter\beamer@bftext@warning
|
||||
\else
|
||||
\expandafter\bfseries
|
||||
\fi
|
||||
}
|
||||
\providecommand*{\beamer@bftext@warning}{%
|
||||
\ClassWarning{beamer}
|
||||
{Cannot use bold for alerted text in math mode}%
|
||||
}
|
||||
|
||||
\setbeamerfont{alerted text}{series=\beamer@bftext@only}
|
||||
|
||||
\setbeamercolor{palette primary}{fg=karminrot,bg=white}
|
||||
\setbeamercolor{palette secondary}{fg=gold,bg=white}
|
||||
\setbeamercolor{palette tertiary}{fg=anthrazit,bg=white}
|
||||
\setbeamercolor{palette quaternary}{fg=black,bg=white}
|
||||
|
||||
\setbeamercolor{sidebar}{bg=karminrot!100}
|
||||
|
||||
\setbeamercolor{palette sidebar primary}{fg=karminrot}
|
||||
\setbeamercolor{palette sidebar secondary}{fg=karminrot}
|
||||
\setbeamercolor{palette sidebar tertiary}{fg=karminrot}
|
||||
\setbeamercolor{palette sidebar quaternary}{fg=karminrot}
|
||||
|
||||
\setbeamercolor{item projected}{fg=black,bg=black!20}
|
||||
|
||||
\setbeamercolor*{block body}{}
|
||||
\setbeamercolor*{block body alerted}{}
|
||||
\setbeamercolor*{block body example}{}
|
||||
\setbeamercolor*{block title}{parent=structure}
|
||||
\setbeamercolor*{block title alerted}{parent=alerted text}
|
||||
\setbeamercolor*{block title example}{parent=example text}
|
||||
|
||||
\setbeamercolor*{titlelike}{parent=structure}
|
||||
|
||||
\mode
|
||||
<all>
|
||||
BIN
regression/lecture/figures/charging_curve.pdf
Normal file
BIN
regression/lecture/figures/charging_curve.pdf
Normal file
Binary file not shown.
BIN
regression/lecture/figures/lin_regress.pdf
Normal file
BIN
regression/lecture/figures/lin_regress.pdf
Normal file
Binary file not shown.
BIN
regression/lecture/figures/lin_regress_abscissa.pdf
Normal file
BIN
regression/lecture/figures/lin_regress_abscissa.pdf
Normal file
Binary file not shown.
BIN
regression/lecture/figures/lin_regress_slope.pdf
Normal file
BIN
regression/lecture/figures/lin_regress_slope.pdf
Normal file
Binary file not shown.
BIN
regression/lecture/figures/linear_least_squares.pdf
Normal file
BIN
regression/lecture/figures/linear_least_squares.pdf
Normal file
Binary file not shown.
BIN
regression/lecture/figures/one_d_problem_a.pdf
Normal file
BIN
regression/lecture/figures/one_d_problem_a.pdf
Normal file
Binary file not shown.
BIN
regression/lecture/figures/one_d_problem_b.pdf
Normal file
BIN
regression/lecture/figures/one_d_problem_b.pdf
Normal file
Binary file not shown.
BIN
regression/lecture/figures/one_d_problem_c.pdf
Normal file
BIN
regression/lecture/figures/one_d_problem_c.pdf
Normal file
Binary file not shown.
BIN
regression/lecture/figures/surface.pdf
Normal file
BIN
regression/lecture/figures/surface.pdf
Normal file
Binary file not shown.
454
regression/lecture/linear_regression.tex
Normal file
454
regression/lecture/linear_regression.tex
Normal file
@@ -0,0 +1,454 @@
|
||||
\documentclass{beamer}
|
||||
\usepackage{xcolor}
|
||||
\usepackage{listings}
|
||||
\usepackage{pgf}
|
||||
%\usepackage{pgf,pgfarrows,pgfnodes,pgfautomata,pgfheaps,pgfshade}
|
||||
%\usepackage{multimedia}
|
||||
|
||||
\usepackage[english]{babel}
|
||||
\usepackage{movie15}
|
||||
\usepackage[latin1]{inputenc}
|
||||
\usepackage{times}
|
||||
\usepackage{amsmath}
|
||||
\usepackage{bm}
|
||||
\usepackage[T1]{fontenc}
|
||||
\usepackage[scaled=.90]{helvet}
|
||||
\usepackage{scalefnt}
|
||||
\usepackage{tikz}
|
||||
\usepackage{ textcomp }
|
||||
\usepackage{soul}
|
||||
\usepackage{hyperref}
|
||||
\definecolor{lightblue}{rgb}{.7,.7,1.}
|
||||
\definecolor{mygreen}{rgb}{0,1.,0}
|
||||
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
\mode<presentation>
|
||||
{
|
||||
\usetheme{Singapore}
|
||||
\setbeamercovered{opaque}
|
||||
\usecolortheme{tuebingen}
|
||||
\setbeamertemplate{navigation symbols}{}
|
||||
\usefonttheme{default}
|
||||
\useoutertheme{infolines}
|
||||
% \useoutertheme{miniframes}
|
||||
}
|
||||
|
||||
\AtBeginSection[]
|
||||
{
|
||||
\begin{frame}<beamer>
|
||||
\begin{center}
|
||||
\Huge \insertsectionhead
|
||||
\end{center}
|
||||
% \frametitle{\insertsectionhead}
|
||||
% \tableofcontents[currentsection,hideothersubsections]
|
||||
\end{frame}
|
||||
}
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%5
|
||||
|
||||
\setbeamertemplate{blocks}[rounded][shadow=true]
|
||||
|
||||
\title[]{Scientific Computing -- Statistik}
|
||||
\author[]{Jan Grewe, Fabian Sinz\\Abteilung f\"ur Neuroethologie\\
|
||||
Universit\"at T\"ubingen}
|
||||
|
||||
\institute[Wissenschaftliche Datenverarbeitung]{}
|
||||
\date{12.10.2015 - 06.11.2015}
|
||||
%\logo{\pgfuseimage{../../resources/UT_BM_Rot_RGB.pdf}}
|
||||
|
||||
\subject{Einf\"uhrung in die wissenschaftliche Datenverarbeitung}
|
||||
\vspace{1em}
|
||||
\titlegraphic{
|
||||
\includegraphics[width=0.5\linewidth]{../../resources/UT_WBMW_Rot_RGB}
|
||||
}
|
||||
%%%%%%%%%% configuration for code
|
||||
\lstset{
|
||||
basicstyle=\ttfamily,
|
||||
numbers=left,
|
||||
showstringspaces=false,
|
||||
language=Matlab,
|
||||
commentstyle=\itshape\color{darkgray},
|
||||
keywordstyle=\color{blue},
|
||||
stringstyle=\color{green},
|
||||
backgroundcolor=\color{blue!10},
|
||||
breaklines=true,
|
||||
breakautoindent=true,
|
||||
columns=flexible,
|
||||
frame=single,
|
||||
captionpos=b,
|
||||
xleftmargin=1em,
|
||||
xrightmargin=1em,
|
||||
aboveskip=10pt
|
||||
}
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
\newcommand{\mycite}[1]{
|
||||
\begin{flushright}
|
||||
\tiny \color{black!80} #1
|
||||
\end{flushright}
|
||||
}
|
||||
|
||||
\newcommand{\code}[1]{\texttt{#1}}
|
||||
|
||||
\input{../../latex/environments.tex}
|
||||
\makeatother
|
||||
|
||||
\begin{document}
|
||||
|
||||
\begin{frame}[plain]
|
||||
\frametitle{}
|
||||
\vspace{-1cm}
|
||||
\titlepage % erzeugt Titelseite
|
||||
\end{frame}
|
||||
|
||||
\begin{frame}[plain]
|
||||
\huge{Curve Fitting/Optimierung mit dem Gradientenabstiegsverfahren}
|
||||
\end{frame}
|
||||
|
||||
\begin{frame}
|
||||
\frametitle{Fitting und Optimierung}
|
||||
\framesubtitle{\"Ubersicht}
|
||||
\begin{enumerate}
|
||||
\item Das Problem: Wir haben beobachtete Daten und ein Modell, das die Daten erkl\"aren soll.
|
||||
\item Wie finden wir die Parameter (des Modells), die die Daten am Besten erkl\"aren?
|
||||
\item L\"osung: Anpassen der Parameter an die Daten (Fitting).
|
||||
\item Wie macht man das?
|
||||
\end{enumerate}
|
||||
\end{frame}
|
||||
|
||||
\begin{frame}
|
||||
\frametitle{Fitting und Optimierung}
|
||||
\framesubtitle{Ein 1-D Beispiel}
|
||||
\begin{columns}
|
||||
\begin{column}{6.25cm}
|
||||
\begin{figure}
|
||||
\includegraphics[width=1.\columnwidth]{figures/one_d_problem_a.pdf}
|
||||
\end{figure}
|
||||
\end{column}
|
||||
\begin{column}{6.5cm}
|
||||
\begin{itemize}
|
||||
\item z.B. eine Reihe Me{\ss}werte bei einer Bedingung.
|
||||
\item Ich suche den y-Wert, der die Daten am besten
|
||||
repr\"asentiert.
|
||||
\item F\"ur jeden m\"oglichen y-Wert wird die mittlere
|
||||
quadratische Abweichung zu allen Daten berechnet:\\
|
||||
\[ error = \frac{1}{N}\sum_{i=1}^{N}(y_i - y_{test})^2 \]
|
||||
\end{itemize}
|
||||
\end{column}
|
||||
\end{columns}\pause
|
||||
Wie finde ich den besten Wert heraus?
|
||||
\end{frame}
|
||||
|
||||
\begin{frame}
|
||||
\frametitle{Fitting und Optimierung}
|
||||
\framesubtitle{Ein 1-D Beispiel}
|
||||
\only<1> {
|
||||
\begin{columns}
|
||||
\begin{column}{4.5cm}
|
||||
\begin{figure}
|
||||
\includegraphics[width=1.\columnwidth]{figures/one_d_problem_b.pdf}
|
||||
\end{figure}
|
||||
\end{column}
|
||||
\begin{column}{8cm}
|
||||
\begin{itemize}
|
||||
\item Man folgt dem Gradienten!
|
||||
\item Der Gradient kann numerisch berechnet werden indem man ein
|
||||
(sehr kleines) ``Steigungsdreieck'' an den Positionen anlegt.\\ \vspace{0.25cm}
|
||||
$\frac{\Delta error}{\Delta y} = \frac{error(y+h) - error(y)}{h}$
|
||||
\end{itemize}
|
||||
\end{column}
|
||||
\end{columns}
|
||||
}
|
||||
\only<2>{
|
||||
\begin{columns}
|
||||
\begin{column}{4.5cm}
|
||||
\begin{figure}
|
||||
\includegraphics[width=1.\columnwidth]{figures/one_d_problem_c.pdf}
|
||||
\end{figure}
|
||||
\end{column}
|
||||
\begin{column}{8cm}
|
||||
\begin{itemize}
|
||||
\item Man folgt dem Gradienten!
|
||||
\item Der Gradient kann numerisch berechnet werden indem man ein
|
||||
(sehr kleines) ``Steigungsdreieck'' an den Positionen anlegt.\\ \vspace{0.25cm}
|
||||
$\frac{\Delta error}{\Delta y} = \frac{error(y+h) - error(y)}{h}$
|
||||
\item Da, wo der Gradient seine Nullstelle hat, liegt der beste y-Wert.
|
||||
\end{itemize}
|
||||
\end{column}
|
||||
\end{columns}
|
||||
}
|
||||
\end{frame}
|
||||
|
||||
\begin{frame}
|
||||
\frametitle{Fitting und Optimierung}
|
||||
\framesubtitle{Lineare Regression}
|
||||
\only<1-2> {
|
||||
\begin{figure}
|
||||
\includegraphics[width=0.45\columnwidth]{figures/lin_regress.pdf}
|
||||
\end{figure}
|
||||
}
|
||||
\only<2>{
|
||||
Nehmen wir mal einen linearen Zusammenhang zwischen \textit{Input}
|
||||
und \textit{Output} an. ($y = m\cdot x + n$)
|
||||
}
|
||||
\only<3> {
|
||||
Ver\"anderung der Steigung:
|
||||
\begin{figure}
|
||||
\includegraphics[width=0.45\columnwidth]{figures/lin_regress_slope.pdf}
|
||||
\end{figure}
|
||||
}
|
||||
\only<4> {
|
||||
Ver\"anderung des y-Achsenabschnitts:
|
||||
\begin{figure}
|
||||
\includegraphics[width=0.45\columnwidth]{figures/lin_regress_abscissa.pdf}
|
||||
\end{figure}
|
||||
}
|
||||
\end{frame}
|
||||
|
||||
\begin{frame}
|
||||
\frametitle{Fitting und Optimierung}
|
||||
\framesubtitle{Lineare Regresssion}
|
||||
|
||||
\huge{Welche Kombination ist die richtige?}
|
||||
\end{frame}
|
||||
|
||||
\begin{frame}
|
||||
\frametitle{Fitting und Optimierung}
|
||||
\framesubtitle{Lineare Regression - Methode der kleinsten quadratischen Abweichung}
|
||||
\begin{columns}
|
||||
\begin{column}{4.5cm}
|
||||
\begin{figure}
|
||||
\includegraphics[width=\columnwidth]{figures/linear_least_squares.pdf}
|
||||
\end{figure}
|
||||
\footnotesize{\url{http://en.wikipedia.org/wiki/Linear_least_squares_(mathematics)}}
|
||||
\end{column}
|
||||
\begin{column}{7cm}
|
||||
\begin{enumerate}
|
||||
\item Die am h\"aufigstern Angewandte Methode ist die der
|
||||
kleinsten quadratischen Abweichungen.
|
||||
\item Es wird versucht die Summe der quadratischen Abweichung zu
|
||||
minimieren.
|
||||
\end{enumerate}
|
||||
\[g(m,n) = \frac{1}{N}\sum^{N}_{1=1} \left( y_i - f_{m, n}(x_i)\right )^2\]
|
||||
\end{column}
|
||||
\end{columns}
|
||||
\end{frame}
|
||||
|
||||
|
||||
\begin{frame}[fragile]
|
||||
\frametitle{Fitting und Optimierung}
|
||||
\framesubtitle{Lineare Regression - Methode der kleinsten quadratischen Abweichun}
|
||||
\begin{itemize}
|
||||
\item Was heisst das: Minimieren der Summe der kleinsten
|
||||
quadratischen Abweichungen?
|
||||
\item Kann man einen Algortihmus zur L\"osung des Problems
|
||||
erstellen?
|
||||
\item Kann man das visualisieren?
|
||||
\end{itemize}\pause
|
||||
\begin{columns}
|
||||
\begin{column}{5.5cm}
|
||||
\tiny
|
||||
\begin{lstlisting}
|
||||
x_range = linspace(-1, 1, 20);
|
||||
y_range = linspace(-5, 5, 20);
|
||||
|
||||
[X, Y] = meshgrid(x_range, y_range);
|
||||
Z = X.^2 + Y.^2;
|
||||
surf(X, Y, Z);
|
||||
colormap('autumn')
|
||||
xlabel('x')
|
||||
ylabel('y')
|
||||
zlabel('z')
|
||||
\end{lstlisting}
|
||||
\end{column}
|
||||
\begin{column}{5.5cm}
|
||||
\begin{figure}
|
||||
\includegraphics[width=0.9\columnwidth]{figures/surface.pdf}
|
||||
\end{figure}
|
||||
\end{column}
|
||||
\end{columns}
|
||||
\end{frame}
|
||||
|
||||
|
||||
\begin{frame}[fragile]
|
||||
\frametitle{Fitting und Optimierung}
|
||||
\framesubtitle{Lineare Regression - Methode der kleinsten quadratischen Abweichung}
|
||||
\textbf{Aufgabe}
|
||||
\begin{enumerate}
|
||||
\item Ladet den Datensatz \textit{lin\_regression.mat} in den
|
||||
Workspace. Wie sehen die Daten aus?
|
||||
\item Schreibt eine Funktion \code{lsq\_error}, die den Fehler
|
||||
brechnet:
|
||||
\begin{itemize}
|
||||
\item \"Ubernimmt einen 2-elementigen Vektor, der die Parameter
|
||||
\code{m} und \code{n} enth\"alt, die x-Werte und y-Werte.
|
||||
\item Die Funktion gibt den Fehler zur\"uck.
|
||||
\end{itemize}
|
||||
\item Schreibt ein Skript dass den Fehler in Abh\"angigkeit von
|
||||
\code{m} und \code{n} als surface plot darstellt (\code{surf}
|
||||
Funktion).
|
||||
\item Wie k\"onnen wir diesen Plot benutzen um die beste Kombination
|
||||
zu finden?
|
||||
\item Wo lieft die beste Kombination?
|
||||
\end{enumerate}
|
||||
\end{frame}
|
||||
|
||||
|
||||
\begin{frame}[fragile]
|
||||
\frametitle{Fitting und Optimierung}
|
||||
\framesubtitle{Lineare Regression - Methode der kleinsten quadratischen Abweichung}
|
||||
\begin{itemize}
|
||||
\item Wie findet man die Extrempunkte in einer Kurve?\pause
|
||||
\item Ableitung der Funktion auf Null setzen und nach x aufl\"osen.
|
||||
\item Definition der Ableitung:\\ \vspace{0.25cm}
|
||||
\begin{center}
|
||||
$ f'(x) = \lim\limits_{h \rightarrow 0} \frac{f(x + h) - f(x)}{h} $
|
||||
\vspace{0.25cm}\pause
|
||||
\end{center}
|
||||
\item Bei zwei Parametern $g(m,n)$ k\"onnen wie die partielle
|
||||
Ableitung bez\"uglich eines Parameters benutzen um die
|
||||
Ver\"anderung des Fehlers bei Ver\"anderung eines Parameters
|
||||
auszuwerten.
|
||||
\item Partielle Ableitung nach \code{m}?\\\pause
|
||||
\vspace{0.25cm}
|
||||
\begin{center}
|
||||
$\frac{\partial g(m,n)}{\partial m} = \lim\limits_{h \rightarrow 0} \frac{g(m + h, n) - g(m,n)}{h}$
|
||||
\vspace{0.25cm}
|
||||
\end{center}
|
||||
\end{itemize}
|
||||
\end{frame}
|
||||
|
||||
|
||||
\begin{frame}
|
||||
\frametitle{Fitting und Optimierung}
|
||||
\framesubtitle{Lineare Regression - Gradientenabstieg}
|
||||
\large{Der Gradient:}
|
||||
\begin{center}
|
||||
$\bigtriangledown g(m,n) = \left( \frac{\partial g(m,n)}{\partial m}, \frac{\partial g(m,n)}{\partial n}\right)$
|
||||
\end{center}
|
||||
Ist der Vektor mit den partiellen Ableitungen nach \code{m} und
|
||||
\code{n}.
|
||||
|
||||
\pause Numerisch kann die Ableitung durch einen sehr kleinen Schritt
|
||||
angen\"ahert werden.
|
||||
\begin{center}
|
||||
$\frac{\partial g(m,n)}{\partial m} = \lim\limits_{h \rightarrow
|
||||
0} \frac{g(m + h, n) - g(m,n)}{h} \approx \frac{g(m + h, n) -
|
||||
g(m,n)}{h}$
|
||||
\end{center}
|
||||
f\"ur sehr kleine Schritte \code{h}.
|
||||
\end{frame}
|
||||
|
||||
|
||||
\begin{frame}
|
||||
\frametitle{Fitting und Optimierung}
|
||||
\framesubtitle{Lineare Regression - Gradientenabstieg}
|
||||
Plotten des Gradientenfeldes:
|
||||
\begin{itemize}
|
||||
\item Ladet die Daten in \code{lin\_regession.mat}.
|
||||
\item Schreibt eine Funktion \code{lsq\_gradient.m} in dem gleichen
|
||||
Muster wie \code{lsq\_error.m}. Die Funktion berechnet
|
||||
den Gradienten an einer Position (Kombination von Parametern),
|
||||
wenn ein kleiner Schritt gemacht wird (\code{h=1e-6;}).
|
||||
\item Variiert \code{m} im Bereich von -2 bis +5 und \code{n} im
|
||||
Bereich -10 bis 10.
|
||||
\item Plottet die Fehlerfl\"ache als \code{surface} und
|
||||
\code{contour} plot in die gleiche Abbildung.
|
||||
\item F\"ugt die Gradienten als \code{quiver} plot hinzu.
|
||||
\item Was sagen die Pfeile? Wie passen Pfeile und Fl\"ache zusammen?
|
||||
\end{itemize}
|
||||
\end{frame}
|
||||
|
||||
|
||||
\begin{frame}
|
||||
\frametitle{Fitting und Optimierung}
|
||||
\framesubtitle{Lineare Regression - Gradientenabstieg}
|
||||
\begin{itemize}
|
||||
\item Der Gradient zeigt in die Richtung des gr\"o{\ss}ten \textbf{Anstiegs}. \pause
|
||||
\item Wie kann der Gradient nun dazu genutzt werden zum Minimum zu kommen?\pause
|
||||
\item \textbf{Man nehme: $-\bigtriangledown g(m,n)$!}\pause
|
||||
\vspace{0.25cm}
|
||||
\item Wir haben jetzt alle Zutaten um den Gradientenabstieg zu formulieren.
|
||||
\end{itemize}
|
||||
\end{frame}
|
||||
|
||||
|
||||
\begin{frame}
|
||||
\frametitle{Fitting und Optimierung}
|
||||
\framesubtitle{Gradientenabstieg - Algorithums}
|
||||
\begin{enumerate}
|
||||
\item Starte mit einer beliebigen Parameterkombination $p_0 = (m_0,
|
||||
n_0)$.
|
||||
\item Wiederhole solange wie die der Gradient \"uber einer
|
||||
bestimmten Schwelle ist:
|
||||
\begin{itemize}
|
||||
\item Berechne den Gradienten an der akutellen Position $p_t$.
|
||||
\item Gehe einen kleinen Schritt in die entgegensetzte Richtung des
|
||||
Gradienten:\\
|
||||
\begin{center}
|
||||
$p_{t+1} = p_t - \epsilon \cdot \bigtriangledown g(m_t, n_t)$
|
||||
\end{center}
|
||||
wobei $\epsilon$ eine kleine Zahl (0.01) ist.
|
||||
\end{itemize}
|
||||
\end{enumerate}
|
||||
\end{frame}
|
||||
|
||||
|
||||
\begin{frame}
|
||||
\frametitle{Fitting und Optimierung}
|
||||
\framesubtitle{Gradientenabstieg - \"Ubung}
|
||||
\begin{enumerate}
|
||||
\item Implementiert den Grandientenabstieg f\"ur das Fitten der
|
||||
linearen Geradengleichung an die Daten.
|
||||
\item Plottet f\"ur jeden Schritt den surface plot und die aktuelle
|
||||
Position als roten Punkt (nutzt \code{plot3}).
|
||||
\item Plottet f\"ur jeden Schritt den Fit in einen separaten plot.
|
||||
\item Nutzt \code{pause(0.1)} nach jedem Schritt um die Entwicklung
|
||||
des Fits zu beobachten.
|
||||
\end{enumerate}
|
||||
\end{frame}
|
||||
|
||||
|
||||
\begin{frame}
|
||||
\frametitle{Fitting und Optimierung}
|
||||
\framesubtitle{Gradientenabstieg - \"Ubung II}
|
||||
\begin{columns}
|
||||
\begin{column}{6cm}
|
||||
\begin{figure}
|
||||
\includegraphics[width=1\columnwidth]{figures/charging_curve.pdf}
|
||||
\end{figure}
|
||||
\end{column}
|
||||
\begin{column}{7cm}
|
||||
\begin{itemize}
|
||||
\item Ladet die Daten aus der \code{membraneVoltage.mat}.
|
||||
\item Plottet die Rohdaten.
|
||||
\item Fittet folgende Funktion an die Daten:\\
|
||||
\begin{center}
|
||||
$f_{A,\tau}(t) = A \cdot \left(1 - e^{-\frac{t}{\tau}}\right )$
|
||||
\end{center}
|
||||
\item An welcher Stelle muss der Code von oben ver\"andert
|
||||
werden?
|
||||
\item Plottet die Daten zusammen mit dem Fit.
|
||||
\end{itemize}
|
||||
\end{column}
|
||||
\end{columns}
|
||||
\end{frame}
|
||||
|
||||
|
||||
\begin{frame}[fragile]
|
||||
\frametitle{Fitting und Optimierung}
|
||||
\framesubtitle{Fitting mit Matlab}
|
||||
\begin{itemize}
|
||||
\item Es gibt mehrere Funktionen in Matlab, die eine Optimierung
|
||||
automatisch durchf\"uhren.
|
||||
\item z.B. \code{fminunc, lsqcurvefit, fminsearch, lsqnonlin, ...}
|
||||
\item Einige der Funktionen stecken allerdings in der
|
||||
\textit{Optimization Toolbox}, die nicht zum Standard Matlab
|
||||
geh\"ort.
|
||||
\end{itemize}
|
||||
\begin{lstlisting}
|
||||
function param = estimated_regression(x, y, start_parameter)
|
||||
objective_function = @(p)(lsq_error(p, x, y));
|
||||
param = fminunc(objective_function, start_parameter)
|
||||
\end{lstlisting}
|
||||
\end{frame}
|
||||
|
||||
\end{document}
|
||||
Reference in New Issue
Block a user