[regression] updated exercise infrastructure

This commit is contained in:
Jan Benda 2020-12-15 13:50:42 +01:00
parent d6756d4975
commit 9db487fb25
6 changed files with 16 additions and 89 deletions

View File

@ -1,34 +1,4 @@
TEXFILES=$(wildcard exercises??.tex)
EXERCISES=$(TEXFILES:.tex=.pdf)
SOLUTIONS=$(EXERCISES:exercises%=solutions%)
TEXFILES=$(wildcard gradientdescent-?.tex)
.PHONY: pdf exercises solutions watch watchexercises watchsolutions clean
include ../../exercises.mk
pdf : $(SOLUTIONS) $(EXERCISES)
exercises : $(EXERCISES)
solutions : $(SOLUTIONS)
$(SOLUTIONS) : solutions%.pdf : exercises%.tex instructions.tex
{ echo "\\documentclass[answers,12pt,a4paper,pdftex]{exam}"; sed -e '1d' $<; } > $(patsubst %.pdf,%.tex,$@)
pdflatex -interaction=scrollmode $(patsubst %.pdf,%.tex,$@) | tee /dev/stderr | fgrep -q "Rerun to get cross-references right" && pdflatex -interaction=scrollmode $(patsubst %.pdf,%.tex,$@) || true
rm $(patsubst %.pdf,%,$@).[!p]*
$(EXERCISES) : %.pdf : %.tex instructions.tex
pdflatex -interaction=scrollmode $< | tee /dev/stderr | fgrep -q "Rerun to get cross-references right" && pdflatex -interaction=scrollmode $< || true
watch :
while true; do ! make -q pdf && make pdf; sleep 0.5; done
watchexercises :
while true; do ! make -q exercises && make exercises; sleep 0.5; done
watchsolutions :
while true; do ! make -q solutions && make solutions; sleep 0.5; done
clean :
rm -f *~ *.aux *.log *.out
cleanup : clean
rm -f $(SOLUTIONS) $(EXERCISES)

View File

@ -1,60 +1,17 @@
\documentclass[12pt,a4paper,pdftex]{exam}
\usepackage[german]{babel}
\usepackage{natbib}
\usepackage{xcolor}
\usepackage{graphicx}
\usepackage[small]{caption}
\usepackage{sidecap}
\usepackage{pslatex}
\usepackage{amsmath}
\usepackage{amssymb}
\setlength{\marginparwidth}{2cm}
\usepackage[breaklinks=true,bookmarks=true,bookmarksopen=true,pdfpagemode=UseNone,pdfstartview=FitH,colorlinks=true,citecolor=blue]{hyperref}
%%%%% text size %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\usepackage[left=20mm,right=20mm,top=25mm,bottom=25mm]{geometry}
\pagestyle{headandfoot}
\ifprintanswers
\newcommand{\stitle}{: Solutions}
\else
\newcommand{\stitle}{}
\fi
\header{{\bfseries\large Exercise 10\stitle}}{{\bfseries\large Gradient descent}}{{\bfseries\large December 16th, 2019}}
\firstpagefooter{Dr. Jan Grewe}{Phone: 29 74588}{Email:
jan.grewe@uni-tuebingen.de}
\runningfooter{}{\thepage}{}
\setlength{\baselineskip}{15pt}
\setlength{\parindent}{0.0cm}
\setlength{\parskip}{0.3cm}
\renewcommand{\baselinestretch}{1.15}
\newcommand{\code}[1]{\texttt{#1}}
\renewcommand{\solutiontitle}{\noindent\textbf{Solution:}\par\noindent}
%%%%% listings %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\usepackage{listings}
\lstset{
language=Matlab,
basicstyle=\ttfamily\footnotesize,
numbers=left,
numberstyle=\tiny,
title=\lstname,
showstringspaces=false,
commentstyle=\itshape\color{darkgray},
breaklines=true,
breakautoindent=true,
columns=flexible,
frame=single,
xleftmargin=1em,
xrightmargin=1em,
aboveskip=10pt
}
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\newcommand{\exercisetopic}{Resampling}
\newcommand{\exercisenum}{9}
\newcommand{\exercisedate}{December 21th, 2020}
\input{../../exercisesheader}
\firstpagefooter{Prof. Dr. Jan Benda}{}{jan.benda@uni-tuebingen.de}
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\begin{document}
\input{instructions}
\input{../../exercisestitle}
\begin{questions}
@ -96,7 +53,7 @@
the parameter values at the minimum of the cost function and a vector
with the value of the cost function at each step of the algorithm.
\begin{solution}
\lstinputlisting{../code/descent.m}
\lstinputlisting{descent.m}
\end{solution}
\part Plot the data and the straight line with the parameter
@ -105,7 +62,7 @@
\part Plot the development of the costs as a function of the
iteration step.
\begin{solution}
\lstinputlisting{../code/descentfit.m}
\lstinputlisting{descentfit.m}
\end{solution}
\part For checking the gradient descend method from (a) compare
@ -116,7 +73,7 @@
minimum gradient. What are good values such that the gradient
descent gets closest to the true minimum of the cost function?
\begin{solution}
\lstinputlisting{../code/checkdescent.m}
\lstinputlisting{checkdescent.m}
\end{solution}
\part Use the functions \code{polyfit()} and \code{lsqcurvefit()}
@ -124,7 +81,7 @@
line that fits the data. Compare the resulting fit parameters of
those functions with the ones of your gradient descent algorithm.
\begin{solution}
\lstinputlisting{../code/linefit.m}
\lstinputlisting{linefit.m}
\end{solution}
\end{parts}