-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathonnx-webgpu.html
56 lines (49 loc) · 2.28 KB
/
onnx-webgpu.html
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>onnx</title>
</head>
<body>
<script src="https://cdnjs.cloudflare.com/ajax/libs/onnxruntime-web/1.20.1/ort.webgpu.min.js"
integrity="sha512-VfhX+QkN7NbCrYehivVRUqfUdswaFcRPtmyBSMGIXGbIMRku82aqFB2mKxvrtNV9TYP6JWz371CfTIzAExMyCA=="
crossorigin="anonymous" referrerpolicy="no-referrer"></script>
<h1>onnx webgpu</h1>
</body>
<script type="module">
async function main() {
try {
// create a new session and load the specific model.
// const session = await ort.InferenceSession.create('data/sims.onnx');
// Fails with e = 42912032??? Maybe model isn't compatible with webgpu?
let options = { executionProviders: ['webgpu'] }
ort.env.debug = true;
ort.env.logLevel = 'verbose';
ort.env.trace = true;
options['logSeverityLevel'] = 0;
options['logVerbosityLevel'] = 0;
const session = await ort.InferenceSession.create(
'http://localhost:3000/models/default.onnx', options);
// Works: From https://github.com/akbartus/DepthAnything-on-Browser/blob/main/interactive-v2/webgpu-sliders.html
// const myOrtSession = await ort.InferenceSession.create("https://cdn.glitch.me/0f5359e2-6022-421b-88f7-13e276d0fb33/depthanythingv2-vits-dynamic-quant.onnx", { executionProviders: ['webgpu'] });
console.log('sims model loaded');
// prepare inputs. a tensor need its corresponding TypedArray as data
const batch_size = 1;
const X = new ort.Tensor('float32', new Float32Array(batch_size * 33694), [batch_size, 33694]);
for (let i = 0; i < 100; i++) {
X.data[i] = 0.5;
}
// feed inputs and run
const results = await session.run({ "input.1": X });
console.log('inference done');
console.log(results);
document.write(`<div>Cell 0 Predictions: ${results["826"].cpuData.slice(0, 8)}</div>`);
} catch (e) {
document.write(`failed to inference ONNX model: ${e}.`);
}
}
main();
console.log('done');
</script>
</html>