-
Notifications
You must be signed in to change notification settings - Fork 1
/
Copy pathmc-production.tex
360 lines (324 loc) · 10.3 KB
/
mc-production.tex
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
\documentclass[pdf]
{beamer}
\usepackage{outlines}
\mode<presentation>{\usetheme{}}
\title{Monte Carlo in Movie Production}
\subtitle{Ray Tracing and Sampling}
\author{Slavomir Kaslev \\
\href{mailto:[email protected]}{[email protected]}}
\begin{document}
\begin{frame}
\titlepage
\end{frame}
\begin{frame}{About me}
\begin{itemize}
\pause
\item My name is Slavomir Kaslev
\pause
\item Software engineer
\pause
\item I work at Worldwide FX as Head of R\&D
\end{itemize}
\end{frame}
\begin{frame}{About Worldwide FX}
\begin{itemize}
\pause
\item Founded in 2001
\pause
\item The first VFX studio in Bulgaria
\pause
\item $\sim 250$ visual artists
\pause
\item More than 115 feature film projects completed
\end{itemize}
\end{frame}
\begin{frame}{Studio demo reel}
\end{frame}
\begin{frame}{Software we use}
\begin{itemize}
\pause
\item Linux on workstations, farm nodes and servers
\pause
\item Mac OS and Windows on some workstations
\pause
\item Avid, Blender, Mudbox, Mari, Maya, Houdini, Katana, RenderMan, Nuke, RV, Massive, Photoshop, Unity, ZBrush, 3DEqualizer, ...
\pause
\item In-house tools
\pause
\item ``Software is eating the world'' \pause Why? \pause Hint: Dependent Types
\end{itemize}
\end{frame}
\begin{frame}{Rendering}
\begin{itemize}
\pause
\item Renderer is a software that generates an image from 3D scene description
\pause
\item 3delight, Arnold, RenderMan, V-Ray, mental ray, Maxwell, Redshift, etc
\pause
\item Open source: Cycles, pbrt, appleseed, Mitsuba, LuxRender, etc
\pause
\item In-house: Hyperion at Disney, Manuka at Weta Digital, Plume at ILM, Blue Sky, etc
\pause
\item Ray tracing won at least in the movie industry. \pause Why?
\pause
Computer time is cheap, people time is expensive.
\pause
\item ``Physically Based Rendering: From Theory to Implementation'' by Matt Pharr and Greg Humphreys
\end{itemize}
\end{frame}
\begin{frame}{Bussiness card ray tracer}
\includegraphics[scale=0.115]{images/card}
\end{frame}
\begin{frame}{The Rendering Equation}
\begin{align*}
L(p, \omega_o) = L_e(p, \omega_o) + \int_{S^2} f(p, \omega_o, \omega_i) L(t(p, \omega_i), -\omega_i) \left| \cos \theta_i \right| d \omega_i
\end{align*}
\end{frame}
\begin{frame}{Analytical solution: Operator formulation}
\begin{itemize}
\pause
\item The light transport operator
\begin{align*}
L = L_e + \boldsymbol T L
\end{align*}
\pause
\item What is the solution?
\pause
\begin{align*}
(\boldsymbol I - \boldsymbol T) L & = L_e \\
L & = (\boldsymbol I - \boldsymbol T)^{-1} L_e
\end{align*}
\pause
\item Power series representation: $L = L_e + \boldsymbol T L_e + \boldsymbol T^{2} L_e + ...$
\pause
\begin{align*}
L & = \sum_{n=0}^{\infty} \boldsymbol T^{n} L_e
\end{align*}
\pause
\item The solution operator
\begin{align*}
\boldsymbol S & = (\boldsymbol I - \boldsymbol T)^{-1} \\
L & = \boldsymbol S L_e
\end{align*}
\end{itemize}
\end{frame}
\begin{frame}{Numerical solution: Integral over paths}
\small
\begin{align*}
L(p^{\prime} \to p) = L_e(p^{\prime} \to p) + \int_{A} f(p^{\prime\prime} \to p^{\prime} \to p) L(p^{\prime\prime} \to p^{\prime}) G(p^{\prime\prime} \leftrightarrow p^{\prime}) d A(p^{\prime\prime})
\end{align*}
\normalsize
\end{frame}
\begin{frame}{Generating paths}
\includegraphics[scale=0.4]{images/paths}
\end{frame}
\begin{frame}{Strategies for generating paths}
\begin{itemize}
\pause
\item Light tracing
\pause
\item Path tracing
\pause
\item Bidirectional path tracing
\pause
\item Vertex connection and merging (VCM)
\pause
\item Still an active topic of research
\end{itemize}
\end{frame}
\begin{frame}{Monte Carlo method}
\begin{itemize}
\pause
\item Monte Carlo methods are a broad class of computational algorithms that rely on repeated random sampling to obtain numerical results
\pause
\item Enrico Fermi first experimented with the Monte Carlo method while studying neutron diffusion in the 1930s
\pause
\item The modern version of the Monte Carlo method was invented in the late 1940s by Stanislaw Ulam
\pause
\item Monte Carlo methods were central to the simulations required for the Manhattan Project
\end{itemize}
\end{frame}
\begin{frame}{Monte Carlo method I}
Suppose we want to find the value of $I$ where
\bigskip
\begin{align*}
I = \int_{x \in [0, 1]^s} f(x) dx
\end{align*}
\bigskip
\pause
and also suppose we have a random variable $X \in [0, 1]^s$ with probability density function $p(x)$.
\pause
\bigskip
What can we say about the expected value of $f(X)$?
\pause
\bigskip
Well by definition of expected value we know that
\bigskip
\begin{align*}
\mathbf{E}[f(X)] = \int_{x \in [0, 1]^s} f(x) p(x) dx
\end{align*}
\end{frame}
\begin{frame}{Monte Carlo method II}
What about the expected value of $\frac{f(X)}{p(X)}$?
\pause
\bigskip
Let's see
\begin{align*}
\mathbf{E}[\frac{f(X)}{p(X)}] &= \int_{x \in [0, 1]^s} \frac{f(x)}{p(x)} p(x) dx \\
& = \int_{x \in [0, 1]^s} f(x) dx = I
\end{align*}
\pause
\bigskip
Therefore
\begin{align*}
I = \mathbf{E}[\frac{f(X)}{p(X)}]
\end{align*}
\end{frame}
\begin{frame}{Monte Carlo method III}
\pause
The general idea is to approximate the integral we're interested in
\begin{align*}
I = \int_{x \in [0, 1]^s} f(x) dx
\end{align*}
\pause
by a sum
\begin{align*}
\widetilde{I_N} = \frac{1}{N} \sum_{i=1}^{N}{\frac{f(x_i)}{p(x_i)}}
\end{align*}
\pause
It can be shown that $\widetilde{I_N}$ is an unbiased estimator of $I$ which
converges to the correct value as $N \to \infty$
\pause
\begin{align*}
\mathbf{E}[\widetilde{I_N}] & = I \\
\mathbf{V}[\widetilde{I_N}] & = \frac{1}{N} \mathbf{V}[\frac{f(X)}{p(X)}] \\
\sigma[\widetilde{I_N}] & = \frac{1}{\sqrt{N}} \sigma[\frac{f(X)}{p(X)}]
\end{align*}
\end{frame}
\begin{frame}{Random Numbers: xkcd \#221}
\includegraphics[scale=0.77]{images/xkcd221}
\end{frame}
\begin{frame}{Quasi-Monte Carlo Method}
\begin{outline}
\pause
\1 The Koksma-Hlawka inequality
\begin{align*}
\left| \widetilde{I_N} - I \right| \leq \mathbf{V}[f] D_N^{*}(x_1, ..., x_N)
\end{align*}
\pause
\1 Roughly speaking the star discrepancy of a point set $D_N^{*}(x_1, ..., x_N)$ is a
measure of how uniformly the sequence $x_1, ..., x_N$ is distributed in $[0, 1]^s$
\pause
\1 The main discrepancy conjecture
\begin{align*}
D_N^{*}(x_1, ..., x_N) \geq c_s \frac{(\ln{N})^{s}}{N}
\end{align*}
\pause
\2 Proved for $s \leq 2$ by W. M. Schmidt. The problem is stil open in higher dimensions.
\pause
\1 Low discrepancy sequences: van der Corput, Halton, Hammersley, Sobol and others
\begin{align*}
D_N^{*}(x_1, ..., x_N) \leq C \frac{(\ln{N})^{s}}{N}
\end{align*}
\end{outline}
\end{frame}
\begin{frame}{Let's design a sampler: fract demo}
\begin{itemize}
\small
\pause
\item Python, NumPy, Z3 Theorem prover
\pause
\item Z3 is an open source Satisfiability Modulo Theories (SMT) solver by Microsoft Research
\pause
\item https://github.com/skaslev/fract/blob/master/mc-production.ipynb
\pause
\item https://github.com/skaslev/pyman
\end{itemize}
\end{frame}
\begin{frame}{Future}
\begin{outline}
\pause
\1 ``It's hard to make predictions, especially about the future.'' \pause Niels Bohr
\pause
\1 Faster Monte Carlo convergence
\pause
\2 Can we do better than $O(N^{-1/2})$? \pause Maybe. \pause Maybe not.
\pause
\2 ``Advanced mathematical methods for scientists and engineers'' by Carl Bender
\pause
\1 Noise filtering and machine learning
\pause
\1 Better mathematical models
\pause
\2 Renderer based on Quantum Electrodynamics (QED) rather than classical approximations
\pause
\2 ``QED: The Strange Theory of Light and Matter'' by Richard Feynman
\pause
\2 ``Richard Feynman Lecture on Quantum Electrodynamics: QED'' https://www.youtube.com/watch?v=LPDP\_8X5Hug
\pause
\end{outline}
\end{frame}
\begin{frame}{The Feynman Algorithm}
\pause
\begin{itemize}
\item Write down the problem
\pause
\item Think real hard
\pause
\item Write down the solution
\end{itemize}
\end{frame}
\begin{frame}{$A x = b$}
\pause
\begin{itemize}
\item Write down the problem
\pause
\item Rewrite the problem as $A x = b$
\pause
\item Use off the shelf solver or write a specialized one
\end{itemize}
\pause
``All problems in computer graphics can be solved with a matrix inversion.'' Jim Blinn
\end{frame}
\begin{frame}{Eigenvalue problems}
\pause
\begin{itemize}
\item Write down the problem
\pause
\item Rewrite the problem as $A x = \lambda x$
\pause
\item Use off the shelf solver or write a specialized one
\end{itemize}
\end{frame}
\begin{frame}{Perturbation theory}
\begin{itemize}
\pause
\item Write down the problem
\pause
\item Insert a ``small'' parameter $\epsilon$ in so that the problem is trivial when $\epsilon = 0$
\pause
\item Guess the solution has the form $x = \sum_{n=0}^{\infty}{a_n \epsilon^n}$
\pause
\item Plug that back in the equation and solve for $a_n$
\pause
\item Sum the series and substitue $\epsilon = 1$
\end{itemize}
\end{frame}
\begin{frame}{Further reading}
\small
\begin{itemize}
\item ``Physically Based Rendering: From Theory to Implementation'' by Matt Pharr and Greg Humphreys
\item ``Robust Monte Carlo Methods for Light Transport Simulation'', Eric Veach, Ph.D. dissertation
\item ``Light Transport Simulation with Vertex Connection and Merging'' by Iliyan Georgiev, Jaroslav Křivánek, Tomáš Davidovič, Philipp Slusallek
\item ``Random Number Generation and Quasi-Monte Carlo Methods'' by Harald Niederreiter
\item ``Quantum Mechanics and Path Integrals'' by Richard Feynman
\item ``An Introduction to the Analysis of Algorithms'' by Robert Sedgewick and Phillipe Flajolet
\item ``Mathematical Physics'', Carl Bender, https://www.youtube.com/watch?v=LYNOGk3ZjFM
\item ``fract'', source code from this presentation, https://github.com/skaslev/fract
\end{itemize}
\end{frame}
\begin{frame}{Questions?}
\end{frame}
\begin{frame}{Thank you}
\end{frame}
\end{document}