Skip to content

Commit 3b2217e

Browse files
committed
Update ddasp_exercise_slides.tex
bugfix wording: perceptron -> neuron
1 parent ae56f61 commit 3b2217e

File tree

1 file changed

+16
-16
lines changed

1 file changed

+16
-16
lines changed

slides/ddasp_exercise_slides.tex

Lines changed: 16 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -2977,9 +2977,9 @@ \subsection{Fundamentals}
29772977

29782978
\begin{frame}[t]{Output Layer for Regression Model}
29792979

2980-
$\cdot$ Output layer exhibits $i=1 \dots K$ perceptrons
2980+
$\cdot$ Output layer exhibits $i=1 \dots K$ neurons
29812981

2982-
$\cdot$ Activation function $\sigma(\cdot)$ for $i\text{-th}$ perceptron: \underline{linear}
2982+
$\cdot$ Activation function $\sigma(\cdot)$ for $i\text{-th}$ neuron: \underline{linear}
29832983

29842984
$$\sigma(z_i) = z_i$$
29852985

@@ -2993,9 +2993,9 @@ \subsection{Fundamentals}
29932993

29942994
\begin{frame}[t]{Output Layer for Binary Classification Model}
29952995

2996-
$\cdot$ Output layer exhibits two perceptrons with shared input weights, hence acting on same $z$
2996+
$\cdot$ Output layer exhibits two neurons with shared input weights, hence acting on same $z$
29972997

2998-
$\cdot$ Activation functions $\sigma(\cdot)_{1,2}$ for the two perceptrons: \underline{sigmoid} / complementary sigmoid
2998+
$\cdot$ Activation functions $\sigma(\cdot)_{1,2}$ for the two neurons: \underline{sigmoid} / complementary sigmoid
29992999

30003000
$$\sigma_1(z) = \frac{1}{1+\e^{-z}} = \frac{\e^{z}}{\e^{z}+1} \qquad\qquad \sigma_2(z) = 1-\sigma_1(z) = \frac{1}{1 + \e^{z}} = \frac{\e^{-z}}{\e^{-z}+1}$$
30013001

@@ -3029,9 +3029,9 @@ \subsection{Fundamentals}
30293029

30303030
\begin{frame}[t]{Output Layer for Binary Classification Model}
30313031

3032-
$\cdot$ Output layer exhibits a single perceptron
3032+
$\cdot$ Output layer exhibits a single neuron
30333033

3034-
$\cdot$ Activation function $\sigma(\cdot)$ for this single output perceptron: \underline{sigmoid}
3034+
$\cdot$ Activation function $\sigma(\cdot)$ for this single output neuron: \underline{sigmoid}
30353035

30363036
$$\sigma(z) = \frac{1}{1+\e^{-z}} = \frac{\e^{z}}{\e^{z}+1}$$
30373037

@@ -3064,15 +3064,15 @@ \subsection{Fundamentals}
30643064

30653065
\begin{frame}[t]{Output Layer for Multi-Class Classification Model}
30663066

3067-
$\cdot$ Output layer exhibits $i=1 \dots K$ perceptrons for $K$ mutually exclusive classes
3067+
$\cdot$ Output layer exhibits $i=1 \dots K$ neurons for $K$ mutually exclusive classes
30683068

3069-
$\cdot$ Activation function $\sigma(\cdot)$ for $i\text{-th}$ perceptron: \underline{softmax}
3069+
$\cdot$ Activation function $\sigma(\cdot)$ for $i\text{-th}$ neuron: \underline{softmax}
30703070

30713071
$$
30723072
\sigma(z_i) = \frac{\e^{z_i}}{\sum\limits_{i'=1}^{K} \e^{z_{i'}}} \qquad \text{hence, }\sum\limits_{i=1}^{K} \sigma(z_i) = 1
3073-
\text{, which couples the perceptrons}
3073+
\text{, which couples the neurons}
30743074
$$
3075-
%which couples the perceptrons in the output layer
3075+
%which couples the neurons in the output layer
30763076

30773077
$\cdot$ Derivatives to set up the Jacobian matrix
30783078

@@ -3127,7 +3127,7 @@ \subsection{Exercise 11}
31273127
\begin{itemize}
31283128
\item XOR is a classification problem, which cannot be handled by linear algebra
31293129
\item introduce two nonlinearities: add bias, non-linear activation function
3130-
\item perceptron concept
3130+
\item neuron / perceptron concept
31313131
\item general architecture of non-linear models
31323132
\end{itemize}
31333133
\end{frame}
@@ -3357,7 +3357,7 @@ \subsection{Exercise 11}
33573357

33583358
\begin{frame}[t]{A Non-Linear Model for XOR}
33593359

3360-
$\cdot$ weight matrix and bias vector to represent perceptron \textcolor{C0}{1} and \textcolor{C3}{2}
3360+
$\cdot$ weight matrix and bias vector to represent neurons \textcolor{C0}{1} and \textcolor{C3}{2}
33613361
$$
33623362
\bm{W}_\text{layer 1} =
33633363
\begin{bmatrix}
@@ -3373,7 +3373,7 @@ \subsection{Exercise 11}
33733373
\end{bmatrix}
33743374
$$
33753375

3376-
$\cdot$ weight vector and bias scalar to represent perceptron \textcolor{C1}{3}
3376+
$\cdot$ weight vector and bias scalar to represent neuron \textcolor{C1}{3}
33773377
$$
33783378
\bm{W}_\text{layer 2} =
33793379
\begin{bmatrix}
@@ -3416,7 +3416,7 @@ \subsection{Exercise 11}
34163416

34173417
$\cdot$ solution known from book Goodfellow et al. (2016): Deep Learning. MIT Press, Ch. 6.1
34183418

3419-
$\cdot$ weight matrix and bias vector to represent perceptron \textcolor{C0}{1} and \textcolor{C3}{2}
3419+
$\cdot$ weight matrix and bias vector to represent neurons \textcolor{C0}{1} and \textcolor{C3}{2}
34203420
$$
34213421
\bm{W}_\text{layer 1} =
34223422
\begin{bmatrix}
@@ -3432,7 +3432,7 @@ \subsection{Exercise 11}
34323432
\end{bmatrix}
34333433
$$
34343434

3435-
$\cdot$ weight vector and bias scalar to represent perceptron \textcolor{C1}{3}
3435+
$\cdot$ weight vector and bias scalar to represent neuron \textcolor{C1}{3}
34363436
$$
34373437
\bm{W}_\text{layer 2} =
34383438
\begin{bmatrix}
@@ -3616,7 +3616,7 @@ \subsection{Exercise 12}
36163616
\end{tikzpicture}
36173617
\end{center}
36183618

3619-
$\cdot$ Activation function $\sigma(\cdot)$ for this single output perceptron: \underline{sigmoid}
3619+
$\cdot$ Activation function $\sigma(\cdot)$ for this single output neuron: \underline{sigmoid}
36203620

36213621
$$\hat{y} = \sigma(z) = \frac{1}{1+\e^{-z}} = \frac{\e^{z}}{\e^{z}+1}\qquad\qquad
36223622
\frac{\partial \sigma(z)}{\partial z} = \frac{\e^{z}}{(\e^{z}+1)^2} = \sigma(z) \cdot (1-\sigma(z))

0 commit comments

Comments
 (0)