Skip to content
This repository was archived by the owner on Nov 16, 2023. It is now read-only.

Commit 3331225

Browse files
committed
Initial commit of ONNX.js v0.1.0
0 parents  commit 3331225

File tree

197 files changed

+26762
-0
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

197 files changed

+26762
-0
lines changed

.clang-format

+7
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,7 @@
1+
---
2+
Language: JavaScript
3+
BasedOnStyle: Google
4+
ColumnLimit: 120
5+
---
6+
Language: Cpp
7+
BasedOnStyle: LLVM

.gitignore

+14
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,14 @@
1+
dist/
2+
node_modules/
3+
types/
4+
5+
lib/**/*.js
6+
lib/**/*.js.map
7+
test/**/*.js
8+
test/**/*.js.map
9+
tools/**/*.js
10+
tools/**/*.js.map
11+
12+
npm-debug.log
13+
.DS_Store
14+
yarn-error.log

.gitmodules

+12
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,12 @@
1+
[submodule "deps/emsdk"]
2+
path = deps/emsdk
3+
url = https://github.com/juj/emsdk.git
4+
ignore = dirty
5+
[submodule "deps/eigen"]
6+
path = deps/eigen
7+
url = https://github.com/eigenteam/eigen-git-mirror.git
8+
ignore = dirty
9+
[submodule "deps/data"]
10+
path = deps/data
11+
url = https://github.com/Microsoft/onnxjs-demo.git
12+
branch = data

.huskyrc

+6
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,6 @@
1+
{
2+
"skipCI": false,
3+
"hooks": {
4+
"pre-commit": "npm run lint && npm run format && node -e \"var lines = require('child_process').execSync('git ls-files -m').toString(); var tsFound = lines.split('\\n').map(i => i.trim()).find(i => i.endsWith('.ts')); if (tsFound) { console.error('ERR File not formatted: ' + tsFound); process.exit(1); }\""
5+
}
6+
}

.npmignore

+23
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,23 @@
1+
benchmark/
2+
deps/
3+
examples/
4+
node_modules/
5+
src/
6+
test/
7+
tools/
8+
9+
lib/**/*.ts
10+
11+
.vscode
12+
13+
.clang-format
14+
.huskyrc
15+
.gitmodules
16+
.npmrc
17+
.npmignore
18+
npm-debug.log
19+
20+
karma.conf.js
21+
tsconfig.json
22+
tslint.json
23+
webpack.config.js

.vscode/launch.json

+34
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,34 @@
1+
{
2+
// Use IntelliSense to learn about possible attributes.
3+
// Hover to view descriptions of existing attributes.
4+
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
5+
"version": "0.2.0",
6+
"configurations": [
7+
{
8+
"type": "chrome",
9+
"request": "attach",
10+
"name": "Attach to Chrome",
11+
"port": 9333,
12+
"webRoot": "${workspaceFolder}",
13+
"sourceMaps": true,
14+
"smartStep": true
15+
},
16+
{
17+
"type": "node",
18+
"request": "launch",
19+
"name": "Launch Mocha Tests (node)",
20+
"program": "${workspaceFolder}/node_modules/mocha/bin/_mocha",
21+
"args": [
22+
"-u",
23+
"tdd",
24+
"--timeout",
25+
"999999",
26+
"--colors",
27+
"${workspaceFolder}/test/unittest"
28+
],
29+
"internalConsoleOptions": "openOnSessionStart",
30+
"sourceMaps": true,
31+
"smartStep": true
32+
}
33+
]
34+
}

.vscode/settings.json

+28
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,28 @@
1+
// Place your settings in this file to overwrite default and user settings.
2+
{
3+
"search.exclude": {
4+
"**/node_modules": true,
5+
"./dist": true,
6+
"./types": true,
7+
},
8+
"files.exclude": {
9+
"lib/**/*.js.map": true,
10+
"lib/**/*.js": true,
11+
},
12+
"tslint.enable": true,
13+
"tslint.run": "onType",
14+
"tslint.configFile": "tslint.json",
15+
"files.trimTrailingWhitespace": true,
16+
"editor.tabSize": 2,
17+
"editor.insertSpaces": true,
18+
"[typescript]": {
19+
"editor.formatOnSave": true
20+
},
21+
"editor.rulers": [120],
22+
"clang-format.style": "file",
23+
"files.insertFinalNewline": true,
24+
"editor.detectIndentation": false,
25+
"editor.wrappingIndent": "none",
26+
"typescript.tsdk": "node_modules/typescript/lib",
27+
"clang-format.executable": "${workspaceRoot}/node_modules/.bin/clang-format"
28+
}

.vscode/tasks.json

+13
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,13 @@
1+
{
2+
// See https://go.microsoft.com/fwlink/?LinkId=733558
3+
// for the documentation about the tasks.json format
4+
"version": "2.0.0",
5+
"tasks": [
6+
{
7+
"label": "test: suite0",
8+
"group": "test",
9+
"type": "shell",
10+
"command": "${workspaceFolder}/tools/test-runner-cli suite0"
11+
}
12+
]
13+
}

CONTRIBUTING.md

+14
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,14 @@
1+
2+
You are welcome to contribute to ONNX.js!
3+
4+
You can contribute to ONNX.js in following ways:
5+
6+
* Contribute to core module functionality
7+
* For example- implement ONNX operators to support more models, implement optimizations to the model execution framework, etc.
8+
* Contribute bug fixes.
9+
* Review the [source code changes](https://github.com/Microsoft/onnxjs/pulls).
10+
* [Report issues](https://github.com/Microsoft/onnxjs/issues) and help us verify fixes as they are checked in.
11+
12+
Any changes are required to be reviewed. Please submit a [pull request](https://help.github.com/articles/about-pull-requests/) so that changes can be reviewed.
13+
14+
This project has adopted the [Microsoft Open Source Code of Conduct](https://opensource.microsoft.com/codeofconduct/). For more information see the [Code of Conduct FAQ](https://opensource.microsoft.com/codeofconduct/faq/) or contact [email protected] with any additional questions or comments.

LICENSE

+11
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,11 @@
1+
ONNX.js
2+
3+
Copyright (c) Microsoft Corporation. All rights reserved.
4+
5+
MIT License
6+
7+
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the ""Software""), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
8+
9+
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
10+
11+
THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

README.md

+146
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,146 @@
1+
# ONNX.js
2+
ONNX.js is a Javascript library for running ONNX models on browsers and on Node.js.
3+
4+
ONNX.js has adopted WebAssembly and WebGL technologies for providing an optimized ONNX model inference runtime for both CPUs and GPUs.
5+
6+
### Why ONNX models
7+
The [Open Neural Network Exchange](http://onnx.ai/) (ONNX) is an open standard for representing machine learning models. The biggest advantage of ONNX is that it allows interoperability across different open source AI frameworks, which itself offers more flexibility for AI frameworks adoption. [This](#Getting-ONNX-models) is a great place to start getting acquainted with ONNX models.
8+
9+
### Why ONNX.js
10+
With ONNX.js, web developers can score pre-trained ONNX models directly on browsers with various benefits of reducing server-client communication and protecting user privacy, as well as offering install-free and cross-platform in-browser ML experience.
11+
12+
ONNX.js can run on both CPU and GPU. For running on CPU, [WebAssembly](https://developer.mozilla.org/en-US/docs/WebAssembly) is adopted to execute model at near-native speed. Furthermore, ONNX.js utilizes [Web Workers](https://developer.mozilla.org/en-US/docs/Web/API/Web_Workers_API/Using_web_workers) to provide a "multi-threaded" environment to parallelize data processing. Empirical evaluation shows very promising performance gains on CPU by taking full advantage of WebAssembly and Web Workers. For running on GPUs, a popular standard for accessing GPU capabilities - WebGL is adopted. ONNX.js has further adopted several novel optimization techniques for reducing data transfer between CPU and GPU, as well as some techniques to reduce GPU processing cycles to further push the performance to the maximum.
13+
14+
### Benchmarks
15+
16+
Benchmarks have been run against the most prominent open source solutions in the same market. Below are the results collected for Chrome and Edge browsers on one sample machine (computations run on both CPU and GPU):
17+
18+
![alt text](./docs/perf-resnet50.png "Resnet50 Perf numbers")
19+
20+
> NOTE:
21+
> 1. Keras.js doesn't support WebGL usage on Edge
22+
> 2. Keras.js and TensorFlow.js doesn't support WebAssembly usage on any browser
23+
24+
> The specs of the machine that was used to perform the benchmarking is listed below:
25+
> * OS: Microsoft Windows 10 Enterprise Insider Preview
26+
> * Model: HP Z240 Tower Workstation
27+
> * Processor: Intel(R) Core(TM) i7-6700 CPU @ 3.40GHz, 3401 Mhz, 4 Core(s), 8 Logical Processor(s)
28+
> * Installed Physical Memory (RAM): 32.0 GB
29+
> * GPU make / Chip type: AMD FirePro W2100 / AMD FirePro SDI (0x6608)
30+
> * GPU Memory (approx.): 18.0 GB
31+
32+
### Demo
33+
34+
[ONNX.js demo website](https://microsoft.github.io/onnxjs-demo/) shows the capabilities of ONNX.js. Check the [code](https://github.com/Microsoft/onnxjs-demo).
35+
36+
## Getting Started
37+
There are multiple ways to use ONNX.js in a project:
38+
39+
### Using `<script>` tag
40+
41+
This is the most straightforward way to use ONNX.js. The following HTML example shows how to use it:
42+
43+
```html
44+
<html>
45+
<head>
46+
</head>
47+
48+
<body>
49+
<!-- Load ONNX.js -->
50+
<script src="https://cdn.jsdelivr.net/npm/onnxjs/dist/onnx.min.js"></script>
51+
<!-- Code that consume ONNX.js -->
52+
<script>
53+
// create a session
54+
const myOnnxSession = new onnx.InferenceSession();
55+
// load the ONNX model file
56+
myOnnxSession.loadModel("./my-model.onnx").then(()=>{
57+
// generate model input
58+
const inferenceInputs = getInputs();
59+
// execute the model
60+
session.run(inferenceInputs).then(output=>{
61+
// consume the output
62+
const outputTensor = output.values().next().value;
63+
console.log(`model output tensor: ${outputTensor.data}.`);
64+
});
65+
})
66+
</script>
67+
</body>
68+
</html>
69+
```
70+
71+
Refer to [browser/Add](./examples/browser/add) for an example.
72+
73+
### Using NPM and bundling tools
74+
75+
Modern browser based applications are usually built by frameworks like [Angular](https://angular.io/), [React](https://reactjs.org/), [Vue.js](https://vuejs.org/) and so on. This solution usually builds the source code into one or more bundle file(s). The following TypeScript example shows how to use ONNX.js in an async context:
76+
77+
1. Import `Tensor` and `InferenceSession`.
78+
```ts
79+
import {Tensor, InferenceSession} from 'onnxjs';
80+
```
81+
82+
2. Create an instance of `InferenceSession`.
83+
```ts
84+
const session = new InferenceSession();
85+
```
86+
87+
3. Load the ONNX.js model
88+
```ts
89+
// use the following in an async method
90+
const url = './data/models/resnet/model.onnx';
91+
await session.loadModel(url);
92+
```
93+
94+
4. Create your input Tensor(s) similar to the example below. You need to do any pre-processing required by
95+
your model at this stage. For that refer to the documentation of the model you have:
96+
```javascript
97+
// creating an array of input Tensors is the easiest way. For other options see the API documentation
98+
const inputs = [new Tensor(new Float32Array([1.0,2.0,3.0,4.0]), 'float32', [2,2])];
99+
```
100+
5. Run the model with the input Tensors. The output Tensor(s) are available once the run operation is complete:
101+
```javascript
102+
// run this in an async method:
103+
const outputMap = await session.run(inputs);
104+
const outputTensor = outputMap.values().next().value;
105+
```
106+
More verbose examples on how to use ONNX.js are located under the `examples` folder. For further info see [Examples](./examples/README.md)
107+
108+
### Running in Node.js
109+
110+
ONNX.js can run in Node.js as well. This is usually for testing purpose. Use the `require()` function to load ONNX.js:
111+
```js
112+
require('onnxjs');
113+
```
114+
115+
Refer to [node/Add](./examples/node/add) for a detailed example.
116+
117+
*NOTE: Currently, the supported platforms are Windows 10 + Edge/Chrome/Firefox/Electron/Node.js (support for other platforms is coming soon).*
118+
119+
## Documents
120+
121+
### Developers
122+
For information on development ONNX.js, please check [Development](./docs/development.md)
123+
124+
For API reference, please check [API](./docs/api.md).
125+
126+
### Getting ONNX models
127+
You could get ONNX models easily in multiple ways:
128+
- Choose a pre-trained ONNX model from the [ONNX Model Zoo](https://github.com/onnx/models)
129+
- Convert models from mainstream frameworks, e.g. PyTorch, TensorFlow and Keras, by following [ONNX tutorials](https://github.com/onnx/tutorials)
130+
- Use your data to generate a customized ONNX model from [Azure Custom Vision service](https://docs.microsoft.com/en-us/azure/cognitive-services/Custom-Vision-Service/home)
131+
- [Train a custom model in AzureML](https://github.com/Azure/MachineLearningNotebooks/tree/master/training) and save it in the ONNX format
132+
133+
Learn more about ONNX
134+
- [ONNX website](http://onnx.ai/)
135+
- [ONNX on GitHub](https://github.com/onnx/onnx)
136+
137+
### Operators supported
138+
The [list](./docs/operators.md) of ONNX operators supported by each of the 3 available builtin backends (cpu, wasm, and webgl).
139+
140+
## Contribute
141+
We’d love to embrace your contribution to ONNX.js. Please refer to [CONTRIBUTING.md](./CONTRIBUTING.md).
142+
143+
## License
144+
Copyright (c) Microsoft Corporation. All rights reserved.
145+
146+
Licensed under the [MIT](./LICENSE) License.

benchmark/LICENSE

+11
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,11 @@
1+
# Licenses
2+
3+
TensorFlow.js:
4+
https://github.com/tensorflow/tfjs/blob/master/LICENSE
5+
6+
Keras.js:
7+
https://github.com/transcranial/keras-js/blob/master/LICENSE
8+
9+
WebDNN:
10+
https://github.com/mil-tokyo/webdnn/blob/master/LICENSE.txt
11+

benchmark/README.md

+47
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,47 @@
1+
# Benchmarks
2+
This sub project is to benchmark and compare ONNX.js peformance vs other leading in-browser AI Inference frameworks.
3+
4+
## Frameworks
5+
- TensorFlow.js
6+
- Keras.js
7+
- WebDNN
8+
- ONNX.js
9+
10+
## Backends
11+
(not all backends supported by all platforms)
12+
- WebGL
13+
- WebAssembly
14+
- CPU
15+
16+
## Browsers
17+
(not all framework/backend combinations are supported by all browsers)
18+
- Chrome (WebGL 2)
19+
- Edge (WebGL 1)
20+
21+
## Instructions
22+
Please download all the sub-folders (containing the model files and corresponding test data) under
23+
https://github.com/Microsoft/onnxjs-demo/tree/data/data/benchmark and place them in ./benchmark/data prior to running the benchmark tests
24+
25+
1. Ensure that the ONNX.js project (the parent) is already installed and built:
26+
```bash
27+
npm ci
28+
npm run build
29+
```
30+
2. Change to `benchmark` subfolder and run npm ci and build in the benchmark folder
31+
```bash
32+
cd benchmark
33+
npm install
34+
npm run build
35+
```
36+
3. Run tests (Chrome)
37+
```bash
38+
npm run test
39+
```
40+
4. Run tests (Edge)
41+
42+
Note that the Edge tests are likely to crash the broswer. A recommended way would be to comment out
43+
all Frameworks and backends except one and repeat this for all others. Look in the definition for
44+
`BenchmarkImageNetData` in src/index.js
45+
```bash
46+
npm run test-edge
47+
```

0 commit comments

Comments
 (0)