diff --git a/regression/exercises/Makefile b/regression/exercises/Makefile index 27691d9..ba0b38c 100644 --- a/regression/exercises/Makefile +++ b/regression/exercises/Makefile @@ -1,34 +1,4 @@ -TEXFILES=$(wildcard exercises??.tex) -EXERCISES=$(TEXFILES:.tex=.pdf) -SOLUTIONS=$(EXERCISES:exercises%=solutions%) +TEXFILES=$(wildcard gradientdescent-?.tex) -.PHONY: pdf exercises solutions watch watchexercises watchsolutions clean +include ../../exercises.mk -pdf : $(SOLUTIONS) $(EXERCISES) - -exercises : $(EXERCISES) - -solutions : $(SOLUTIONS) - -$(SOLUTIONS) : solutions%.pdf : exercises%.tex instructions.tex - { echo "\\documentclass[answers,12pt,a4paper,pdftex]{exam}"; sed -e '1d' $<; } > $(patsubst %.pdf,%.tex,$@) - pdflatex -interaction=scrollmode $(patsubst %.pdf,%.tex,$@) | tee /dev/stderr | fgrep -q "Rerun to get cross-references right" && pdflatex -interaction=scrollmode $(patsubst %.pdf,%.tex,$@) || true - rm $(patsubst %.pdf,%,$@).[!p]* - -$(EXERCISES) : %.pdf : %.tex instructions.tex - pdflatex -interaction=scrollmode $< | tee /dev/stderr | fgrep -q "Rerun to get cross-references right" && pdflatex -interaction=scrollmode $< || true - -watch : - while true; do ! make -q pdf && make pdf; sleep 0.5; done - -watchexercises : - while true; do ! make -q exercises && make exercises; sleep 0.5; done - -watchsolutions : - while true; do ! make -q solutions && make solutions; sleep 0.5; done - -clean : - rm -f *~ *.aux *.log *.out - -cleanup : clean - rm -f $(SOLUTIONS) $(EXERCISES) diff --git a/regression/code/checkdescent.m b/regression/exercises/checkdescent.m similarity index 100% rename from regression/code/checkdescent.m rename to regression/exercises/checkdescent.m diff --git a/regression/code/descent.m b/regression/exercises/descent.m similarity index 100% rename from regression/code/descent.m rename to regression/exercises/descent.m diff --git a/regression/code/descentfit.m b/regression/exercises/descentfit.m similarity index 100% rename from regression/code/descentfit.m rename to regression/exercises/descentfit.m diff --git a/regression/exercises/exercises01.tex b/regression/exercises/gradientdescent-1.tex similarity index 64% rename from regression/exercises/exercises01.tex rename to regression/exercises/gradientdescent-1.tex index 9e7c5b3..8275723 100644 --- a/regression/exercises/exercises01.tex +++ b/regression/exercises/gradientdescent-1.tex @@ -1,60 +1,17 @@ \documentclass[12pt,a4paper,pdftex]{exam} -\usepackage[german]{babel} -\usepackage{natbib} -\usepackage{xcolor} -\usepackage{graphicx} -\usepackage[small]{caption} -\usepackage{sidecap} -\usepackage{pslatex} -\usepackage{amsmath} -\usepackage{amssymb} -\setlength{\marginparwidth}{2cm} -\usepackage[breaklinks=true,bookmarks=true,bookmarksopen=true,pdfpagemode=UseNone,pdfstartview=FitH,colorlinks=true,citecolor=blue]{hyperref} - -%%%%% text size %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% -\usepackage[left=20mm,right=20mm,top=25mm,bottom=25mm]{geometry} -\pagestyle{headandfoot} -\ifprintanswers -\newcommand{\stitle}{: Solutions} -\else -\newcommand{\stitle}{} -\fi -\header{{\bfseries\large Exercise 10\stitle}}{{\bfseries\large Gradient descent}}{{\bfseries\large December 16th, 2019}} -\firstpagefooter{Dr. Jan Grewe}{Phone: 29 74588}{Email: - jan.grewe@uni-tuebingen.de} -\runningfooter{}{\thepage}{} - -\setlength{\baselineskip}{15pt} -\setlength{\parindent}{0.0cm} -\setlength{\parskip}{0.3cm} -\renewcommand{\baselinestretch}{1.15} - -\newcommand{\code}[1]{\texttt{#1}} -\renewcommand{\solutiontitle}{\noindent\textbf{Solution:}\par\noindent} -%%%%% listings %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% -\usepackage{listings} -\lstset{ - language=Matlab, - basicstyle=\ttfamily\footnotesize, - numbers=left, - numberstyle=\tiny, - title=\lstname, - showstringspaces=false, - commentstyle=\itshape\color{darkgray}, - breaklines=true, - breakautoindent=true, - columns=flexible, - frame=single, - xleftmargin=1em, - xrightmargin=1em, - aboveskip=10pt -} - -%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% +\newcommand{\exercisetopic}{Resampling} +\newcommand{\exercisenum}{9} +\newcommand{\exercisedate}{December 21th, 2020} + +\input{../../exercisesheader} + +\firstpagefooter{Prof. Dr. Jan Benda}{}{jan.benda@uni-tuebingen.de} + +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% \begin{document} -\input{instructions} +\input{../../exercisestitle} \begin{questions} @@ -96,7 +53,7 @@ the parameter values at the minimum of the cost function and a vector with the value of the cost function at each step of the algorithm. \begin{solution} - \lstinputlisting{../code/descent.m} + \lstinputlisting{descent.m} \end{solution} \part Plot the data and the straight line with the parameter @@ -105,7 +62,7 @@ \part Plot the development of the costs as a function of the iteration step. \begin{solution} - \lstinputlisting{../code/descentfit.m} + \lstinputlisting{descentfit.m} \end{solution} \part For checking the gradient descend method from (a) compare @@ -116,7 +73,7 @@ minimum gradient. What are good values such that the gradient descent gets closest to the true minimum of the cost function? \begin{solution} - \lstinputlisting{../code/checkdescent.m} + \lstinputlisting{checkdescent.m} \end{solution} \part Use the functions \code{polyfit()} and \code{lsqcurvefit()} @@ -124,7 +81,7 @@ line that fits the data. Compare the resulting fit parameters of those functions with the ones of your gradient descent algorithm. \begin{solution} - \lstinputlisting{../code/linefit.m} + \lstinputlisting{linefit.m} \end{solution} \end{parts} diff --git a/regression/code/linefit.m b/regression/exercises/linefit.m similarity index 100% rename from regression/code/linefit.m rename to regression/exercises/linefit.m