-
-
Notifications
You must be signed in to change notification settings - Fork 23
/
ir-stream-nfov.html
76 lines (69 loc) · 3.05 KB
/
ir-stream-nfov.html
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
<!DOCTYPE html>
<html>
<head>
<meta charset="UTF-8">
<title>Kinect Azure Example</title>
<link rel="stylesheet" href="../assets/vendors/bootstrap-4.3.1-dist/css/bootstrap.css">
<link rel="stylesheet" href="../assets/vendors/bootstrap-4.3.1-dist/css/docs.min.css">
</head>
<body class="container-fluid py-3">
<div class="d-flex align-items-baseline justify-content-between">
<h1 class="bd-title">Infrared Stream (NFOV)</h1>
<button onclick="require('electron').remote.getCurrentWebContents().openDevTools()">open dev tools</button>
</div>
<p>
This demo shows the infrared stream in an html canvas element. Note that the canvas tag in this demo is resized through css to fit the window. The native image size in this demo is <span id="info">different</span>.
</p>
<canvas id="outputCanvas" class="img-fluid"></canvas>
<script>
{
const KinectAzure = require('kinect-azure');
const kinect = new KinectAzure();
const $info = document.getElementById('info');
const $outputCanvas = document.getElementById('outputCanvas'),
outputCtx = $outputCanvas.getContext('2d');
let outputImageData;
const init = () => {
startKinect();
};
const startKinect = () => {
if(kinect.open()) {
kinect.startCameras({
depth_mode: KinectAzure.K4A_DEPTH_MODE_NFOV_UNBINNED
});
kinect.startListening((data) => {
$info.textContent = ` ${data.irImageFrame.width}x${data.irImageFrame.height}`;
if (!outputImageData && data.irImageFrame.width > 0) {
$outputCanvas.width = data.irImageFrame.width;
$outputCanvas.height = data.irImageFrame.height;
outputImageData = outputCtx.createImageData($outputCanvas.width, $outputCanvas.height);
}
if (outputImageData) {
renderIrFrameAsGreyScale(outputCtx, outputImageData, data.irImageFrame);
}
});
}
};
const renderIrFrameAsGreyScale = (ctx, canvasImageData, imageFrame) => {
const newPixelData = Buffer.from(imageFrame.imageData);
const pixelArray = canvasImageData.data;
const maxValue = 255 << 8;
let incomingPixelIndex = 0;
for (let i = 0; i < canvasImageData.data.length; i+=4) {
const irValue = newPixelData[incomingPixelIndex+1] << 8 | newPixelData[incomingPixelIndex];
const normalizedValue = irValue;//map(irValue, 0, maxValue, 0, 255);
pixelArray[i] = normalizedValue;
pixelArray[i+1] = normalizedValue;
pixelArray[i+2] = normalizedValue;
pixelArray[i+3] = 0xff;
incomingPixelIndex += 2;
}
ctx.putImageData(canvasImageData, 0, 0);
};
// expose the kinect instance to the window object in this demo app to allow the parent window to close it between sessions
window.kinect = kinect;
init();
}
</script>
</body>
</html>