Skip to content

Commit db1896d

Browse files
authored
Merge pull request #24 from ReScience/10.5281_zenodo.5217602
Added entry 10.5281/zenodo.5217602
2 parents 112d6f3 + 634e889 commit db1896d

File tree

3 files changed

+140
-0
lines changed

3 files changed

+140
-0
lines changed

10.5281_zenodo.5217602/article.bib

Lines changed: 22 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,22 @@
1+
@Article {Varma:2021,
2+
author = {Mukund Varma and Nishant Prabhu},
3+
title = {{[Re] On the Relationship between Self-Attention and Convolutional Layers}},
4+
journal = {ReScience C},
5+
year = {2021},
6+
month = {8},
7+
volume = {7},
8+
number = {1},
9+
pages = {{#6}},
10+
doi = {10.5281/zenodo.5217602},
11+
url = {https://zenodo.org/record/5217602/files/article.pdf},
12+
code_url = {https://github.com/NishantPrabhu/Self-Attention-and-Convolutions},
13+
code_doi = {},
14+
code_swh = {swh:1:dir:6ab40b1686ee05bc3f9413ced6b1a84c6b203814},
15+
data_url = {},
16+
data_doi = {},
17+
review_url = {https://github.com/ReScience/submissions/issues/53},
18+
type = {Editorial},
19+
language = {Python},
20+
domain = {},
21+
keywords = {rescience c, rescience x, python, pytorch, self-attention}
22+
}

10.5281_zenodo.5217602/article.pdf

4.91 MB
Binary file not shown.

10.5281_zenodo.5217602/article.yaml

Lines changed: 118 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,118 @@
1+
# To be filled by the author(s) at the time of submission
2+
# -------------------------------------------------------
3+
4+
# Title of the article:
5+
# - For a successful replication, it should be prefixed with "[Re]"
6+
# - For a failed replication, it should be prefixed with "[¬Re]"
7+
# - For other article types, no instruction (but please, not too long)
8+
title: "[Re] On the Relationship between Self-Attention and Convolutional Layers"
9+
10+
# List of authors with name, orcid number, email and affiliation
11+
# Affiliation "*" means contact author (required even for single-authored papers)
12+
authors:
13+
- name: Mukund Varma
14+
orcid: 0000-0001-6480-3126
15+
16+
affiliations: 1
17+
18+
- name: Nishant Prabhu
19+
orcid: 0000-0001-8776-1993
20+
21+
affiliations: 1,* # * is for contact author
22+
23+
# List of affiliations with code (corresponding to author affiliations), name
24+
# and address. You can also use these affiliations to add text such as "Equal
25+
# contributions" as name (with no address).
26+
affiliations:
27+
- code: 1
28+
name: Indian Institute of Technology Madras
29+
address: Chennai, India
30+
31+
# List of keywords (adding the programming language might be a good idea)
32+
keywords: rescience c, rescience x, python, pytorch, self-attention
33+
34+
# Code URL and DOI/SWH (url is mandatory for replication, doi after acceptance)
35+
# You can get a DOI for your code from Zenodo, or an SWH identifier from
36+
# Software Heritage.
37+
# see https://guides.github.com/activities/citable-code/
38+
code:
39+
- url: https://github.com/NishantPrabhu/Self-Attention-and-Convolutions
40+
- doi:
41+
- swh: swh:1:dir:6ab40b1686ee05bc3f9413ced6b1a84c6b203814
42+
43+
# Data URL and DOI (optional if no data)
44+
data:
45+
- url:
46+
- doi:
47+
48+
# Information about the original article that has been replicated
49+
replication:
50+
- cite: "Jean-Baptiste Cordonnier and Andreas Loukas and Martin Jaggi.
51+
On the Relationship between Self-Attention and Convolutional Layers.
52+
International Conference on Learning Representations." # Full textual citation
53+
- bib: # Bibtex key (if any) in your bibliography file
54+
- url: https://arxiv.org/pdf/1911.03584.pdf # URL to the PDF, try to link to a non-paywall version
55+
- doi: # Regular digital object identifier
56+
57+
# Don't forget to surround abstract with double quotes
58+
abstract: "In this report, we perform a detailed study on the paper 'On the Relationship between Self-Attention and Convolutional Layers', which provides theoretical and experimental evidence that self attention layers can behave like convolutional layers.
59+
The proposed method does not obtain state-of-the-art performance but rather answers an interesting question - do self-attention layers process images in a similar manner to convolutional layers?
60+
This has inspired many recent works which propose fully-attentional models for image recognition.
61+
We focus on experimentally validating the claims of the original paper and our inferences from the results led us to propose a new variant of the attention operation - Hierarchical Attention.
62+
The proposed method shows significantly improved performance with fewer parameters, hence validating our hypothesis.
63+
To facilitate further study, all the code used in our experiments are publicly available here - https://github.com/NishantPrabhu/Self-Attention-and-Convolutions."
64+
65+
# Bibliography file (yours)
66+
bibliography: bibliography.bib
67+
68+
# Type of the article
69+
# Type can be:
70+
# * Editorial
71+
# * Letter
72+
# * Replication
73+
type: Editorial
74+
75+
# Scientific domain of the article (e.g. Computational Neuroscience)
76+
# (one domain only & try to be not overly specific)
77+
domain:
78+
79+
# Coding language (main one only if several)
80+
language: Python
81+
82+
83+
# To be filled by the author(s) after acceptance
84+
# -----------------------------------------------------------------------------
85+
86+
# For example, the URL of the GitHub issue where review actually occured
87+
review:
88+
- url: https://github.com/ReScience/submissions/issues/53
89+
90+
contributors:
91+
- name: Olivia Guest
92+
orcid: 0000-0002-1891-0972
93+
role: editor
94+
- name: Nicholas Sexton
95+
orcid: 0000-0003-1236-1711
96+
role: reviewer
97+
- name: Xiaoliang (Ken) Luo
98+
orcid: 0000-0002-5297-2114
99+
role: reviewer
100+
101+
# This information will be provided by the editor
102+
dates:
103+
- received: 03 April, 2021
104+
- accepted: 30 Jul, 2021
105+
- published: 27 Aug, 2021
106+
107+
# This information will be provided by the editor
108+
article:
109+
- number: 6 # Article number will be automatically assigned during publication
110+
- doi: 10.5281/zenodo.5217602 # DOI from Zenodo
111+
- url: https://zenodo.org/record/5217602/files/article.pdf # Final PDF URL (Zenodo or rescience website?)
112+
113+
# This information will be provided by the editor
114+
journal:
115+
- name: "ReScience C"
116+
- issn: 2430-3658
117+
- volume: 7
118+
- issue: 1

0 commit comments

Comments
 (0)