Skip to content

Commit bc0cae2

Browse files
authored
Merge pull request #127 from jwvdm/master
Minor updates to bio, added recent publications
2 parents 88e8806 + feaf117 commit bc0cae2

File tree

2 files changed

+152
-20
lines changed

2 files changed

+152
-20
lines changed

_bibliography/JanWillemVanDeMeent.bib

Lines changed: 145 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -1,19 +1,147 @@
1+
@inproceedings{guzmancordero2025exponential,
2+
title = {Controlled Generation with Equivariant Variational Flow Matching},
3+
author = {Eijkelboom, Floor and Zimmermann, Heiko and Bekkers, Erik and Welling, Max and Naesseth, Christian and {van de Meent}, Jan-Willem},
4+
booktitle = {International Conference on Machine Learning},
5+
year = {2025},
6+
abbr = {ICML}
7+
}
8+
9+
@inproceedings{guzmancordero2025exponential,
10+
title = {Exponential Family Variational Flow Matching for Tabular Data Generation},
11+
author = {{Guzm\'an-Cordero}*, Andr\'es and Eijkelboom*, Floor and {van de Meent}, Jan-Willem},
12+
booktitle = {International Conference on Machine Learning},
13+
year = {2025},
14+
abbr = {ICML}
15+
}
16+
17+
@inproceedings{zhdanov2025erwin,
18+
title = {Erwin: {{A Tree-based Hierarchical Transformer}} for {{Large-scale Physical Systems}}},
19+
shorttitle = {Erwin},
20+
author = {Zhdanov, Maksim and Welling, Max and {van de Meent}, Jan-Willem},
21+
booktitle = {International Conference on Machine Learning},
22+
year = {2025},
23+
number = {arXiv:2502.17019},
24+
eprint = {2502.17019},
25+
primaryclass = {cs},
26+
publisher = {arXiv},
27+
doi = {10.48550/arXiv.2502.17019},
28+
urldate = {2025-05-06},
29+
abstract = {Large-scale physical systems defined on irregular grids pose significant scalability challenges for deep learning methods, especially in the presence of long-range interactions and multi-scale coupling. Traditional approaches that compute all pairwise interactions, such as attention, become computationally prohibitive as they scale quadratically with the number of nodes. We present Erwin, a hierarchical transformer inspired by methods from computational many-body physics, which combines the efficiency of tree-based algorithms with the expressivity of attention mechanisms. Erwin employs ball tree partitioning to organize computation, which enables linear-time attention by processing nodes in parallel within local neighborhoods of fixed size. Through progressive coarsening and refinement of the ball tree structure, complemented by a novel cross-ball interaction mechanism, it captures both fine-grained local details and global features. We demonstrate Erwin's effectiveness across multiple domains, including cosmology, molecular dynamics, and particle fluid dynamics, where it consistently outperforms baseline methods both in accuracy and computational efficiency.},
30+
archiveprefix = {arXiv},
31+
keywords = {Computer Science - Artificial Intelligence,Computer Science - Computer Vision and Pattern Recognition,Computer Science - Machine Learning},
32+
abbr = {ICML},
33+
html = {https://arxiv.org/abs/2502.17019},
34+
pdf = {https://arxiv.org/pdf/2502.17019}
35+
}
36+
37+
@article{dijkman2025learning,
38+
title = {Learning {{Neural Free-Energy Functionals}} with {{Pair-Correlation Matching}}},
39+
author = {Dijkman, Jacobus and Dijkstra, Marjolein and {van Roij}, Ren{\'e} and Welling, Max and {van de Meent}, Jan-Willem and Ensing, Bernd},
40+
year = {2025},
41+
month = feb,
42+
journal = {Physical Review Letters},
43+
volume = {134},
44+
number = {5},
45+
pages = {056103},
46+
publisher = {American Physical Society},
47+
doi = {10.1103/PhysRevLett.134.056103},
48+
urldate = {2025-05-06},
49+
abstract = {The intrinsic Helmholtz free-energy functional, the centerpiece of classical density functional theory, is at best only known approximately for 3D systems. Here we introduce a method for learning a neural-network approximation of this functional by exclusively training on a dataset of radial distribution functions, circumventing the need to sample costly heterogeneous density profiles in a wide variety of external potentials. For a supercritical Lennard-Jones system with planar symmetry, we demonstrate that the learned neural free-energy functional accurately predicts inhomogeneous density profiles under various complex external potentials obtained from simulations.},
50+
abbr = {PRL},
51+
html = {https://journals.aps.org/prl/abstract/10.1103/PhysRevLett.134.056103},
52+
pdf = {https://arxiv.org/pdf/2403.15007}
53+
}
54+
55+
@inproceedings{biza25onrobot,
56+
author = {Ondrej Biza and
57+
Thomas Weng and
58+
Lingfeng Sun and
59+
Karl Schmeckpeper and
60+
Tarik Kelestemur and
61+
Yecheng Jason Ma and
62+
Robert Platt and
63+
Jan{-}Willem {van de Meent} and
64+
Lawson L. S. Wong},
65+
title = {On-Robot Reinforcement Learning with Goal-Contrastive Rewards},
66+
booktitle = {Proceedings of the 2025 IEEE International Conference on Robotics and Automation, ICRA'25},
67+
year = {2025},
68+
abbr = {ICRA},
69+
html = {https://arxiv.org/abs/2410.19989},
70+
pdf = {https://arxiv.org/pdf/2410.19989}
71+
}
72+
73+
@inproceedings{kunze2024practical,
74+
title = {Practical Shuffle Coding},
75+
booktitle = {Advances in Neural Information Processing Systems},
76+
author = {Kunze, Julius and Severo, Daniel and {van de Meent}, Jan-Willem and Townsend, James},
77+
editor = {Globerson, A. and Mackey, L. and Belgrave, D. and Fan, A. and Paquet, U. and Tomczak, J. and Zhang, C.},
78+
year = {2024},
79+
volume = {37},
80+
pages = {84081--84113},
81+
abbr = {NeurIPS},
82+
publisher = {Curran Associates, Inc.},
83+
html = {https://proceedings.neurips.cc/paper_files/paper/2024/hash/98d17a9632e1534bae96793e99dc3c2d-Abstract-Conference.html},
84+
pdf = {https://proceedings.neurips.cc/paper_files/paper/2024/file/98d17a9632e1534bae96793e99dc3c2d-Paper-Conference.pdf}
85+
}
86+
87+
@inproceedings{eijkelboom2024variational,
88+
title = {Variational Flow Matching for Graph Generation},
89+
booktitle = {Advances in Neural Information Processing Systems},
90+
author = {Eijkelboom, Floor and Bartosh, Grigory and Naesseth, Christian A. and Welling, Max and {van de Meent}, Jan-Willem},
91+
editor = {Globerson, A. and Mackey, L. and Belgrave, D. and Fan, A. and Paquet, U. and Tomczak, J. and Zhang, C.},
92+
year = {2024},
93+
volume = {37},
94+
pages = {11735--11764},
95+
publisher = {Curran Associates, Inc.},
96+
abbr = {NeurIPS},
97+
html = {https://proceedings.neurips.cc/paper_files/paper/2024/hash/15b780350b302a1bf9a3bd273f5c15a4-Abstract-Conference.html},
98+
pdf = {https://proceedings.neurips.cc/paper_files/paper/2024/file/15b780350b302a1bf9a3bd273f5c15a4-Paper-Conference.pdf}
99+
}
100+
101+
@inproceedings{zimmermann2024visa,
102+
title = {{{VISA}}: {{Variational}} Inference with Sequential Sample-Average Approximations},
103+
booktitle = {Advances in Neural Information Processing Systems},
104+
author = {Zimmermann, Heiko and Naesseth, Christian A. and {van de Meent}, Jan-Willem},
105+
editor = {Globerson, A. and Mackey, L. and Belgrave, D. and Fan, A. and Paquet, U. and Tomczak, J. and Zhang, C.},
106+
year = {2024},
107+
volume = {37},
108+
pages = {138789--138808},
109+
publisher = {Curran Associates, Inc.},
110+
abbr = {NeurIPS},
111+
html = {https://proceedings.neurips.cc/paper_files/paper/2024/hash/fa948624dfde013671e72c1a7ca4aebc-Abstract-Conference.html},
112+
pdf = {https://proceedings.neurips.cc/paper_files/paper/2024/file/fa948624dfde013671e72c1a7ca4aebc-Paper-Conference.pdf}
113+
}
114+
115+
@inproceedings{mcinerney2024reducing,
116+
title = {Towards {{Reducing Diagnostic Errors}} with {{Interpretable Risk Prediction}}},
117+
author = {McInerney, Denis Jered and Dickinson, William and Flynn, Lucy C. and Young, Andrea C. and Young, Geoffrey S. and {van de Meent}, Jan-Willem and Wallace, Byron C.},
118+
booktitle = {2024 Annual Conference of the North American Chapter of the Association for Computational Linguistics (NAACL)},
119+
year = {2024},
120+
abstract = {Many diagnostic errors occur because clinicians cannot easily access relevant information in patient Electronic Health Records (EHRs). In this work we propose a method to use LLMs to identify pieces of evidence in patient EHR data that indicate increased or decreased risk of specific diagnoses; our ultimate aim is to increase access to evidence and reduce diagnostic errors. In particular, we propose a Neural Additive Model to make predictions backed by evidence with individualized risk estimates at time-points where clinicians are still uncertain, aiming to specifically mitigate delays in diagnosis and errors stemming from an incomplete differential. To train such a model, it is necessary to infer temporally fine-grained retrospective labels of eventual "true" diagnoses. We do so with LLMs, to ensure that the input text is from before a confident diagnosis can be made. We use an LLM to retrieve an initial pool of evidence, but then refine this set of evidence according to correlations learned by the model. We conduct an in-depth evaluation of the usefulness of our approach by simulating how it might be used by a clinician to decide between a pre-defined list of differential diagnoses.},
121+
abbr = {NAACL},
122+
html = {https://aclanthology.org/2024.naacl-long.399/},
123+
pdf = {https://aclanthology.org/2024.naacl-long.399.pdf}
124+
}
125+
126+
1127
@inproceedings{kunze2024entropy,
2128
title={Entropy Coding of Unordered Data Structures},
3129
author={Julius Kunze and Daniel Severo and Giulio Zani and Jan-Willem {van de Meent} and James Townsend},
4130
booktitle={International Conference on Learning Representations (ICLR)},
5131
year={2024},
6-
abbr={ICLR}
132+
abbr={ICLR},
133+
html={https://openreview.net/forum?id=PggJ9CbEN7},
134+
pdf={https://openreview.net/pdf?id=PggJ9CbEN7}
7135
}
8136

9-
@inproceedings{
10-
mcinerney2023chill,
11-
title={{CH}i{LL}: Zero-shot Custom Interpretable Feature Extraction from Clinical Notes with Large Language Models},
12-
author={McInerney, Denis Jered and Young, Geoffrey and van de Meent, Jan-Willem and Wallace, Byron},
13-
booktitle={The 2023 Conference on Empirical Methods in Natural Language Processing},
14-
year={2023},
15-
url={https://openreview.net/forum?id=TSdWY9GaHA},
16-
abbr={EMNLP}
137+
@inproceedings{mcinerney2023chill,
138+
title={{CH}i{LL}: Zero-shot Custom Interpretable Feature Extraction from Clinical Notes with Large Language Models},
139+
author={McInerney, Denis Jered and Young, Geoffrey and van de Meent, Jan-Willem and Wallace, Byron},
140+
booktitle={The 2023 Conference on Empirical Methods in Natural Language Processing},
141+
year={2023},
142+
html={https://openreview.net/forum?id=TSdWY9GaHA},
143+
pdf={https://openreview.net/pdf?id=TSdWY9GaHA},
144+
abbr={EMNLP}
17145
}
18146

19147
@inproceedings{
@@ -23,7 +151,8 @@ @inproceedings{
23151
booktitle={Thirty-seventh Conference on Neural Information Processing Systems (to appear)},
24152
month={12},
25153
year={2023},
26-
html={https://openreview.net/forum?id=1tviRBNxI9},
154+
html={https://proceedings.neurips.cc/paper_files/paper/2023/hash/1c12ccfc7720f6b680edea17300bfc2b-Abstract-Conference.html},
155+
pdf={https://proceedings.neurips.cc/paper_files/paper/2023/file/1c12ccfc7720f6b680edea17300bfc2b-Paper-Conference.pdf},
27156
abbr={NeurIPS}
28157
}
29158

@@ -34,7 +163,8 @@ @inproceedings{
34163
booktitle={7th Annual Conference on Robot Learning},
35164
month={11},
36165
year={2023},
37-
html={https://openreview.net/forum?id=RaNAaxZfKi8},
166+
html={https://proceedings.mlr.press/v229/biza23a.html},
167+
pdf={https://proceedings.mlr.press/v229/biza23a/biza23a.pdf},
38168
abbr={CoRL}
39169
}
40170

@@ -47,8 +177,8 @@ @inproceedings{sennesh2023string
47177
abstract = {A growing body of research on probabilistic programs and causal models has highlighted the need to reason compositionally about model classes that extend directed graphical models. Both probabilistic programs and causal models define a joint probability density over a set of random variables, and exhibit sparse structure that can be used to reason about causation and conditional independence. This work builds on recent work on Markov categories of probabilistic mappings to define a category whose morphisms combine a joint density, factorized over each sample space, with a deterministic mapping from samples to return values. This is a step towards closing the gap between recent category-theoretic descriptions of probability measures, and the operational definitions of factorized densities that are commonly employed in probabilistic programming and causal inference.},
48178
archiveprefix = {arxiv},
49179
keywords = {Computer Science - Machine Learning,Computer Science - Programming Languages,Mathematics - Category Theory,Mathematics - Probability},
50-
html = {https://arxiv.org/abs/2305.02506},
51-
pdf = {https://arxiv.org/pdf/2305.02506.pdf},
180+
html = {https://openreview.net/forum?id=tfNdfCEWy2},
181+
pdf = {https://arxiv.org/pdf/2305.02506},
52182
abbr={ACT}
53183
}
54184

@@ -166,7 +296,7 @@ @article{sennesh2022interoception
166296
pages = {108242},
167297
issn = {0301-0511},
168298
doi = {10.1016/j.biopsycho.2021.108242},
169-
url = {https://www.sciencedirect.com/science/article/pii/S0301051121002350},
299+
html = {https://www.sciencedirect.com/science/article/pii/S0301051121002350},
170300
urldate = {2022-04-22},
171301
abstract = {The brain regulates the body by anticipating its needs and attempting to meet them before they arise – a process called allostasis. Allostasis requires a model of the changing sensory conditions within the body, a process called interoception. In this paper, we examine how interoception may provide performance feedback for allostasis. We suggest studying allostasis in terms of control theory, reviewing control theory’s applications to related issues in physiology, motor control, and decision making. We synthesize these by relating them to the important properties of allostatic regulation as a control problem. We then sketch a novel formalism for how the brain might perform allostatic control of the viscera by analogy to skeletomotor control, including a mathematical view on how interoception acts as performance feedback for allostasis. Finally, we suggest ways to test implications of our hypotheses.},
172302
langid = {english},
@@ -422,7 +552,7 @@ @inproceedings{tolpin_ifl_2016
422552
pages = {6:1--6:12},
423553
articleno = {6},
424554
numpages = {12},
425-
url = {http://doi.acm.org/10.1145/3064899.3064910},
555+
html = {http://doi.acm.org/10.1145/3064899.3064910},
426556
doi = {10.1145/3064899.3064910},
427557
acmid = {3064910},
428558
publisher = {ACM},

_people/JanWillemVanDeMeent.md

Lines changed: 7 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -8,16 +8,15 @@ office: Science Park, Lab 42, L4.13
88
lab: AMLab and Delta Lab
99
institute: Informatics Institute
1010
university: University of Amsterdam
11-
one_liner: Probabilistic programming, inference, deep learning, and their applications.
11+
one_liner: AI for scalable and data-efficient scientific computation.
1212
description: |
13-
Dr. Jan-Willem van de Meent is an Associate Professor (Universitair Hoofddocent) at the University of Amsterdam. He co-directs the [AMLab](https://amlab.science.uva.nl/) with Max Welling and co-directs the [Uva Bosch Delta Lab](https://ivi.fnwi.uva.nl/uvaboschdeltalab/) with Theo Gevers. He previously held a position as an Assistant Professor at Northeastern University, where he continues to co-advise and collaborate. Prior to becoming faculty at Northeastern, he held a postdoctoral position with Frank Wood at Oxford, as well as a postdoctoral position with Chris Wiggins and Ruben Gonzalez at Columbia University. He carried out his PhD research in biophysics at Leiden and Cambridge with Wim van Saarloos and Ray Goldstein.
13+
Dr. Jan-Willem van de Meent is an Associate Professor (Universitair Hoofddocent) at the University of Amsterdam. He directs the [AMLab](https://amlab.science.uva.nl/), co-directs the [Uva Bosch Delta Lab](https://ivi.fnwi.uva.nl/uvaboschdeltalab/), and directs the [Amsterdam ELLIS Unit](https://ivi.fnwi.uva.nl/ellis/). He previously held a position as an Assistant Professor at Northeastern University, where he continues to co-advise and collaborate. Prior to becoming faculty at Northeastern, he held a postdoctoral position with Frank Wood at Oxford, as well as a postdoctoral position with Chris Wiggins and Ruben Gonzalez at Columbia University. He carried out his PhD research in biophysics at Leiden and Cambridge with Wim van Saarloos and Ray Goldstein. He served as a founding co-chair of the international conference on probabilistic programming ([PROBPROG](https://probprog.cc/)) and served as a program chair for the international conference on artificial intelligence and statistics ([AISTATS](https://aistats.org/aistats2023/)). He was the recipient of numerous grants, including an NWO Rubicon Fellowship and of an NSF CAREER award.
14+
15+
Jan-Willem van de Meent’s research seeks to understand what methods in AI have the potential to generalize across diverse application domains, and how we can think compositionally about such methods. One aspect of his work focuses on methods development in generative AI, deep learning, and probabilistic programming. He also collaborates extensively in a range of application domains. In the past he has worked on problems in biophysics, neuroscience, healthcare, and robotics. His current collaborations focus on physical chemistry, fluid mechanics, and materials science. The two problems he currently cares about most are uses of AI to make scientific computation more scalable, and maximizing data-efficiency of AI methods in the context of scientific domains.
1416
15-
Jan-Willem van de Meent’s group develops models for artificial intelligence by combining probabilistic programming and deep learning. A major theme in this work is understanding how we can develop data-efficient models in machine learning by incorporating knowledge of an underlying physical system, causal structure, or symmetries of the underlying domain. At a technical level, his group develops inference methods for probabilistic programming systems. He is one of the creators of [Anglican](https://probprog.github.io/anglican/), a probabilistic language based on Clojure and of [Probabilistic Torch](https://github.com/probtorch/probtorch), a library for deep generative models that extends PyTorch. He is also an author on a forthcoming book on probabilistic programming, a draft of which is available on arXiv. To ground methodological work in practice, his group collaborates with researchers in neuroscience, NLP, healthcare, robotics, physics, and chemistry.
1617
17-
Jan-Willem van de Meent served as a founding co-chair of the international conference on probabilistic programming ([PROBPROG](https://probprog.cc/)). He is currently serving as a program chair for the international conference on artificial intelligence and statistics ([AISTATS](https://aistats.org/aistats2023/)). He was the recipient of an NWO Rubicon Fellowship and of an NSF CAREER award.
1818
img: assets/img/JanWillemVanDeMeent.jpg
1919
img_promo:
20-
personal_page: https://jwvdm.github.io
2120
scholar_userid: aCGsfUAAAAAJ
2221
github_username: jwvdm
2322
twitter_username: jwvdm
@@ -32,6 +31,9 @@ category: Staff
3231
<div class="publications">
3332
<h2>Recent Publications</h2>
3433

34+
<h3>2025</h3>
35+
{% bibliography --file JanWillemVanDeMeent -q @*[year=2025]* %}
36+
3537
<h3>2024</h3>
3638
{% bibliography --file JanWillemVanDeMeent -q @*[year=2024]* %}
3739

0 commit comments

Comments
 (0)