Skip to content

Commit f74bed7

Browse files
committed
Completed gradient descent
1 parent b171e0e commit f74bed7

File tree

6 files changed

+261
-30
lines changed

6 files changed

+261
-30
lines changed

about.html

+9-3
Original file line numberDiff line numberDiff line change
@@ -250,7 +250,7 @@ <h2>Planned Topics</h2>
250250
<center>
251251
<div style="background:black; border: 1px solid red;">
252252
<p class="flow-text">
253-
If this website gets a larger audience, then I will have greater impetus to work onthese topics!
253+
If this website gets a larger audience, then I will have greater impetus to work on these topics!
254254
</p>
255255
</div>
256256
</center>
@@ -279,6 +279,7 @@ <h2>Planned Topics</h2>
279279
<li>Boltzmann Distribution in a randomized exchange lattice</li>
280280
<li>Working of Gradient Descent Algorithm</li>
281281
<li>Solve Travelling Salesman using Genetic ALgorithm, Simulated Annealing and Ant-Colony Optimization</li>
282+
<li>Raycasting</li>
282283
</ol>
283284

284285
<p class="flow-text">
@@ -287,9 +288,14 @@ <h2>Planned Topics</h2>
287288
then please contact me via GitHub or email me at <a
288289
289290
<br>
290-
<br>
291-
Thanks for visiting!
292291
</p>
292+
<center>
293+
<div style="background:black; border: 1px solid red;">
294+
<p class="flow-text">
295+
Thanks for visiting!
296+
</p>
297+
</div>
298+
</center>
293299
</div>
294300
</div>
295301
</div>

gradient_descent/basic.js

+18-6
Original file line numberDiff line numberDiff line change
@@ -12,13 +12,22 @@ if (/Android|webOS|iPhone|iPad|iPod|BlackBerry|IEMobile|Opera Mini/i.test(naviga
1212
let canvas = document.getElementById("canvas");
1313
let context = canvas.getContext("2d");
1414

15+
let cost_display = document.getElementById("cost-display");
16+
let coeffs_display = document.getElementById("coeffs-display");
17+
18+
let degree_display = document.getElementById("degree-display");
19+
let degree_input = document.getElementById("degree-input");
20+
21+
let alpha_display = document.getElementById("alpha-display");
22+
let alpha_input = document.getElementById("alpha-input");
23+
1524
if (mobile) {
1625
canvas_width = 0.9 * screen_width;
1726
}
1827
else {
19-
canvas_width = 0.6 * screen_width;
28+
canvas_width = 0.45 * screen_width;
2029
}
21-
canvas_height = canvas_width / 1.618;
30+
canvas_height = canvas_width;
2231

2332
canvas.width = canvas_width;
2433
canvas.height = canvas_height;
@@ -31,18 +40,21 @@ let animate = window.requestAnimationFrame
3140
};
3241

3342
function step() {
34-
if (!paused) {
35-
update();
36-
}
3743
render();
3844
animate(step);
3945
}
4046

41-
window.onload = function() {
47+
window.onload = function () {
48+
defaultParams();
4249
initParams();
4350
animate(step);
4451
}
4552

53+
function defaultParams() {
54+
degree_input.value = 1;
55+
alpha_input.value = 0;
56+
}
57+
4658
let click_x, click_y, pressed;
4759

4860
if(mobile) {

gradient_descent/simulation.html

+83-18
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,7 @@
1010
<!-- Materialize -->
1111
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/materialize/1.0.0/css/materialize.min.css" />
1212
<script src="https://cdnjs.cloudflare.com/ajax/libs/materialize/1.0.0/js/materialize.min.js"></script>
13-
13+
1414
<script src="../helper.js" defer></script>
1515
<script src="basic.js" defer></script>
1616
<script src="user_input.js" defer></script>
@@ -27,7 +27,7 @@
2727
<script async src="https://www.googletagmanager.com/gtag/js?id=G-M95CKRP8HB"></script>
2828
<script>
2929
window.dataLayer = window.dataLayer || [];
30-
function gtag(){window.dataLayer.push(arguments);}
30+
function gtag() { window.dataLayer.push(arguments); }
3131
gtag('js', new Date());
3232

3333
gtag('config', 'G-M95CKRP8HB');
@@ -45,22 +45,87 @@ <h1 id="main-heading">Visualize It</h1>
4545
</ul>
4646
</div>
4747
</nav>
48-
</body>
4948

50-
<div class="text">
51-
<h2>Gradient Descent</h2>
52-
53-
<br>
54-
<center>
55-
<canvas id="canvas"></canvas>
56-
</center>
57-
<br>
58-
59-
<br>
60-
<hr>
61-
<br>
62-
63-
<p class="center-align">Developed by ChanRT | Fork me at <a href="https://www.github.com/chanrt">GitHub</a></p>
64-
</div>
49+
<div class="text">
50+
<h2>Gradient Descent</h2>
51+
<center>
52+
<p>
53+
Gradient descent is an iterative optimisation algorithm that is commonly used in Machine Learning algorithms to
54+
minimize cost functions.
55+
</p>
56+
</center>
57+
<br>
58+
<div class="container" style="width:90%;">
59+
<div class="row">
60+
<div class="col s12 l8">
61+
<canvas id="canvas"></canvas>
62+
</div>
63+
<div class="col s12 l4">
64+
<center>
65+
<b>
66+
Click on the canvas to introduce a point <br> Remove a point by clicking on it <br> The blue line depicts
67+
the fit
68+
</b>
69+
<br> <br>
70+
<button class="btn purple darken-4" onclick="clearPoints()">Clear Points</button>
71+
<br> <br>
72+
<hr>
73+
<br>
74+
<button class="btn purple darken-4" onclick="update()">Iterate</button>
75+
<button class="btn purple darken-4" onclick="resetTheta()">Reset</button>
76+
<p id="cost-display"></p>
77+
<span id="degree-display"></span>
78+
<input id="degree-input" type="range" min="0" max="10" step="1" oninput="updateParams('degree')"
79+
onchange="updateParams('degree')">
80+
<span id="alpha-display"></span>
81+
<input id="alpha-input" type="range" min="-3" max="3" step="0.1" oninput="updateParams('alpha')"
82+
onchange="updateParams('alpha')">
83+
<br>
84+
<p id="coeffs-display"></p>
85+
</center>
86+
</div>
87+
</div>
88+
</div>
89+
90+
<br>
91+
<hr>
92+
93+
<h3>Description</h3>
94+
<p>
95+
Consider a bunch of \(m\) points of the form \( (x_i, y_i) \), and a polynomial \( h_{\theta} (x) \) of order \( n
96+
\):
97+
\[ h_{\theta} (x) = \theta_0 + \theta_1 x + \theta_2 x^2 + \dots + \theta_n x^n \]
98+
Our task is to assign all the \(\theta\)'s such that the polynomial fits the points. In order to quantify
99+
how good the fit is, we define something called the cost function \( J(\theta) \):
100+
\[ J(\theta) = \frac{1}{2m} \sum_{i = 1}^{m} (h_{\theta} (x_i) - y_i)^2 \]
101+
A lower cost function implies a better fit. Hence, we want to find the \(\theta\)'s for which this function is
102+
minimized. This can be achieved via the Gradient Descent algorithm, which dictates that the \(\theta\)'s should be
103+
modified in the following way:
104+
\[ \theta_j = \theta_j - \frac{\alpha}{m} \sum_{i = 1}^{m} (h_{\theta} (x_i) - y_i) \cdot x_j \]
105+
Here, \( \alpha \) is known as the learning rate. It is the rate at which the \(\theta\)'s try to approach their
106+
minimum. It must be optimized correctly, depending on the problem. A low learning rate results in a very slow
107+
decrease in cost function. A high learning rate causes the cost function to blow up or oscillate.
108+
</p>
109+
110+
<br>
111+
<hr>
112+
113+
<b>Note:</b>
114+
<ol>
115+
<li>The learning rate must be optimized correctly. Click on reset if the blue line vanishes, and adjust the
116+
learning rate accordingly.</li>
117+
<li>The concept of gradient descent can be scaled to more variables easily. Infact, even neural networks utilize
118+
gradient descent to optimize the weights and biases of neurons in every level.</li>
119+
<li><a href="../polynomial_regression/simulation.html">Polynomial regression</a> directly finds the minimum of the
120+
cost function, by using calculus to find the minima and linear algebra to solve for it. However, that method
121+
doesn't scale well as the number of points are increased.</li>
122+
</ol>
123+
124+
<br>
125+
<hr>
126+
127+
<p class="center-align">Developed by ChanRT | Fork me at <a href="https://www.github.com/chanrt">GitHub</a></p>
128+
</div>
129+
</body>
65130

66131
</html>

gradient_descent/simulation.js

+146-1
Original file line numberDiff line numberDiff line change
@@ -1,15 +1,160 @@
1+
let points, point_radius;
2+
3+
let extent;
4+
5+
let degree, thetas;
6+
7+
let alpha = 1;
8+
19
function update() {
10+
if (points.length > 0) {
11+
let temp_thetas = [];
12+
for (let i = 0; i < thetas.length; i++) {
13+
temp_thetas.push(0);
14+
}
15+
16+
for (let j = 0; j < thetas.length; j++) {
17+
let sum = 0;
18+
for (let i = 0; i < points.length; i++) {
19+
sum += (getY(points[i].x) - points[i].y) * Math.pow(points[i].x, j);
20+
}
21+
temp_thetas[j] = thetas[j] - alpha * sum / points.length;
22+
}
223

24+
thetas = temp_thetas;
25+
updateParams("cost");
26+
updateParams("coeffs");
27+
}
328
}
429

530
function render() {
31+
context.fillStyle = "#000000";
32+
context.fillRect(0, 0, canvas_width, canvas_height);
33+
34+
context.strokeStyle = "#ffffff";
35+
context.beginPath();
36+
context.moveTo(0, canvas_height / 2);
37+
context.lineTo(canvas_width, canvas_height / 2);
38+
context.stroke();
639

40+
context.beginPath();
41+
context.moveTo(canvas_width / 2, 0);
42+
context.lineTo(canvas_width / 2, canvas_height);
43+
context.stroke();
44+
45+
context.fillStyle = "#ffffff";
46+
for (let point of points) {
47+
context.beginPath();
48+
context.arc(canvas_width / 2 + point.x * extent, canvas_height / 2 - point.y * extent, 5, 0, 2 * Math.PI);
49+
context.fill();
50+
}
51+
52+
context.fillStyle = "#0000ff";
53+
for (let x = -1; x < 1; x += 1 / canvas_width) {
54+
let y = getY(x);
55+
context.fillRect(canvas_width / 2 + x * extent, canvas_height / 2 - y * extent, 2, 2);
56+
}
757
}
858

9-
function updateParams(variable) {
59+
function getY(x) {
60+
let value = 0;
61+
for (let i = 0; i < degree + 1; i++) {
62+
value += thetas[i] * Math.pow(x, i);
63+
}
64+
return value;
65+
}
1066

67+
function updateParams(variable) {
68+
if (variable == "degree") {
69+
degree = Number.parseInt(degree_input.value);
70+
degree_display.innerHTML = `Degree of fitting polynomial: ${degree}`;
71+
thetas = [];
72+
for (let i = 0; i < degree + 1; i++) {
73+
thetas.push(0);
74+
}
75+
updateParams("coeffs");
76+
}
77+
if (variable == "alpha") {
78+
alpha = Math.pow(10, Number.parseFloat(alpha_input.value));
79+
alpha_display.innerHTML = `Learning rate: ${alpha.toFixed(3)}`;
80+
}
81+
if (variable == "cost") {
82+
if (points.length > 0) {
83+
cost_display.innerHTML = `Cost: ${calculateCost().toFixed(6)}`;
84+
}
85+
else {
86+
cost_display.innerHTML = "";
87+
}
88+
}
89+
if (variable == "coeffs") {
90+
let string = "";
91+
for (let i = 0; i < thetas.length; i++) {
92+
if (i == 0) {
93+
string += `${thetas[i].toFixed(6)}`;
94+
}
95+
else if (i == 1) {
96+
string += ` + ${thetas[i].toFixed(6)} x`;
97+
}
98+
else {
99+
string += ` + ${thetas[i].toFixed(6)} x<sup>${i}</sup>`;
100+
}
101+
}
102+
coeffs_display.innerHTML = `Fitting polynomial: ${string}`;
103+
}
11104
}
12105

13106
function initParams() {
107+
points = [];
108+
point_radius = canvas_width / 40;
109+
extent = canvas_width / 2;
110+
111+
updateParams("degree");
112+
updateParams("alpha");
113+
updateParams("cost");
114+
updateParams("coeffs");
115+
}
116+
117+
function calculateCost() {
118+
let sum = 0;
119+
for (let point of points) {
120+
sum += Math.pow(getY(point.x) - point.y, 2);
121+
}
122+
return sum / (2 * points.length);
123+
}
124+
125+
function addPoint() {
126+
let x = (click_x - canvas_width / 2) / extent;
127+
let y = (canvas_height / 2 - click_y) / extent;
128+
129+
if (!checkPoint(x, y)) {
130+
points.push({
131+
x: x,
132+
y: y
133+
});
134+
}
135+
}
136+
137+
function checkPoint(x, y) {
138+
for (let point of points) {
139+
if (Math.sqrt(Math.pow(x - point.x, 2) + Math.pow(y - point.y, 2)) < point_radius / extent) {
140+
points = points.filter(p => p !== point);
141+
return true;
142+
}
143+
}
144+
return false;
145+
}
146+
147+
function resetTheta() {
148+
for (let i = 0; i < thetas.length; i++) {
149+
thetas[i] = 0;
150+
}
151+
updateParams("cost");
152+
updateParams("coeffs");
153+
}
14154

155+
function clearPoints() {
156+
points = [];
157+
resetTheta();
158+
updateParams("cost");
159+
updateParams("coeffs");
15160
}

gradient_descent/user_input.js

+1-1
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
function clicked() {
2-
2+
addPoint();
33
}
44

55
function moved() {

polynomial_regression/simulation.html

+4-1
Original file line numberDiff line numberDiff line change
@@ -94,7 +94,8 @@ <h3>Brief Description</h3>
9494
Gaussian elimination + back-substitution, or inversion).
9595
<br>
9696
<br>
97-
Please refer to <a target="_blank" href="http://polynomialregression.drque.net/math.html">this website</a> for viewing the exact
97+
Please refer to <a target="_blank" href="http://polynomialregression.drque.net/math.html">this website</a> for
98+
viewing the exact
9899
mathematics involved in polynomial regression.
99100
</p>
100101

@@ -108,6 +109,8 @@ <h3>Brief Description</h3>
108109
fact that there is always a line (which is a 1 dimensional polynomial) joining 2 points.</li>
109110
<li>A polynomial of degree m requires atleast m+1 points for a proper fit. If this condition is not satisfied, then
110111
the extra coefficients will be extremely low or zero, which badly skewes the curve.</li>
112+
<li><a href="../gradient_descent/simulation.html">Gradient Descent</a> can be used for polynomial regression too,
113+
and adopts an iterative approach to find the best fit.</li>
111114
</ol>
112115

113116
<br>

0 commit comments

Comments
 (0)