446 lines
16 KiB
HTML
446 lines
16 KiB
HTML
<!DOCTYPE html>
|
|
<html lang="en">
|
|
<head>
|
|
<title>Holoprojection accuracy graphing</title>
|
|
<meta charset="utf-8" />
|
|
<meta
|
|
name="viewport"
|
|
content="width=device-width, user-scalable=no, minimum-scale=1.0, maximum-scale=1.0"
|
|
/>
|
|
<script src="https://cdn.jsdelivr.net/npm/chart.js"></script>
|
|
</head>
|
|
<body>
|
|
<h2>Select a Video File</h2>
|
|
<input type="file" id="videoInput" accept="video/*" />
|
|
<canvas id="fidelityChart" width="800" height="400"></canvas>
|
|
|
|
<video
|
|
id="video"
|
|
loop
|
|
muted
|
|
crossorigin="anonymous"
|
|
playsinline
|
|
style="display: none"
|
|
>
|
|
</video>
|
|
<div id="main-scene-container"></div>
|
|
|
|
<!-- Offscreen canvases for comparison -->
|
|
<canvas id="renderedCanvas" width="256" height="256"></canvas>
|
|
<canvas id="truthCanvas" width="256" height="256"></canvas>
|
|
|
|
<script type="importmap">
|
|
{
|
|
"imports": {
|
|
"three": "https://threejs.org/build/three.module.js",
|
|
"three/addons/": "https://threejs.org/examples/jsm/"
|
|
}
|
|
}
|
|
</script>
|
|
|
|
<script type="module">
|
|
import * as THREE from "three";
|
|
import pixelmatch from "https://unpkg.com/pixelmatch?module";
|
|
|
|
|
|
let projection_vert_shader_source = await (await fetch('./projection.vert.glsl')).text();
|
|
let projection_frag_shader_source = await (await fetch('./projection.frag.glsl')).text();
|
|
|
|
let scene, camera, renderer;
|
|
let geometry;
|
|
let meshList = [];
|
|
let materialList = [];
|
|
let numCameras;
|
|
|
|
// DOM elements
|
|
const video = document.getElementById("video");
|
|
const videoInput = document.getElementById("videoInput");
|
|
|
|
// Offscreen canvases for image comparison
|
|
const renderedCanvas = document.getElementById("renderedCanvas");
|
|
const renderedContext = renderedCanvas.getContext("2d", { willReadFrequently: true });
|
|
const truthCanvas = document.getElementById("truthCanvas");
|
|
const truthContext = truthCanvas.getContext("2d", { willReadFrequently: true });
|
|
|
|
// Size constants
|
|
const width = 256;
|
|
const height = 256;
|
|
|
|
init();
|
|
|
|
function pixelArrayToUint32(imageData, canvasWidth, rowNumber) {
|
|
|
|
let buffer = imageData;
|
|
let result = 0;
|
|
|
|
for (let i = 0; i < 32; i++) {
|
|
let thisBit = (buffer[(i*4) + (rowNumber*(canvasWidth*4))] > 128) & 1;
|
|
result |= (thisBit << (31-i));
|
|
}
|
|
return result;
|
|
}
|
|
|
|
// convert unsigned integer 32 to float
|
|
function decodeUint32ToFloat(theInteger) {
|
|
const buffer = new ArrayBuffer(4);
|
|
const view = new DataView(buffer);
|
|
view.setUint32(0, theInteger); // "at address 0, write theInteger to the stack as a 32 bit integer"
|
|
// console.log(view.getFloat32(0)); // "at address 0, read the stack as a Float32"
|
|
return view.getFloat32(0);
|
|
}
|
|
|
|
function init() {
|
|
const container = document.getElementById("main-scene-container");
|
|
|
|
camera = new THREE.PerspectiveCamera(
|
|
50,
|
|
window.innerWidth / window.innerHeight,
|
|
.01,
|
|
100
|
|
);
|
|
scene = new THREE.Scene();
|
|
|
|
const video = document.getElementById("video");
|
|
const videoInput = document.getElementById("videoInput");
|
|
|
|
// Video input handler
|
|
videoInput.addEventListener("change", function () {
|
|
const file = this.files[0]; // Get the selected file
|
|
if (file) {
|
|
const videoURL = URL.createObjectURL(file); // Create blob URL
|
|
video.src = videoURL; // Set video source
|
|
video.style.display = "block"; // Show the video player
|
|
video.load(); // Load the video
|
|
video.play();
|
|
}
|
|
});
|
|
|
|
// function copyRendererToCanvas(renderer) {
|
|
// return new Promise((resolve) => {
|
|
// const gl = renderer.getContext();
|
|
// const width = 256;
|
|
// const height = 256;
|
|
|
|
// const pixels = new Uint8Array(width * height * 4);
|
|
// gl.readPixels(0, 0, width, height, gl.RGBA, gl.UNSIGNED_BYTE, pixels);
|
|
|
|
// // Flip image vertically
|
|
// const imageData = new ImageData(width, height);
|
|
// for (let y = 0; y < height; y++) {
|
|
// const row = pixels.subarray(y * width * 4, (y + 1) * width * 4);
|
|
// imageData.data.set(row, (height - y - 1) * width * 4);
|
|
// } //vector1.angleTo vector2
|
|
|
|
// const canvas = document.createElement('canvas');
|
|
// canvas.width = width;
|
|
// canvas.height = height;
|
|
// const ctx = canvas.getContext('2d');
|
|
// ctx.putImageData(imageData, 0, 0);
|
|
// resolve(canvas);
|
|
// });
|
|
// }
|
|
|
|
const texture = new THREE.VideoTexture(video);
|
|
texture.minFilter = THREE.NearestFilter;
|
|
texture.magFilter = THREE.NearestFilter;
|
|
texture.generateMipmaps = false;
|
|
|
|
// const captureImageWidth = 256,
|
|
// captureImageHeight = 256;
|
|
// const nearClipping = 0.1,
|
|
// farClipping = 5,
|
|
// pointSize = 3,
|
|
// boxSize = 1;
|
|
|
|
|
|
// create a array of points, where every three elements corresponds to one point (x, y, z)
|
|
const vertices = new Float32Array(width * height * 3);
|
|
for (
|
|
let i = 0, j = 0, l = vertices.length;
|
|
i < l;
|
|
i += 3, j++
|
|
) {
|
|
vertices[i] = j % width; // pixels from left
|
|
vertices[i + 1] = Math.floor(j / width); // pixels from bottom (vertical zero in shader land)
|
|
vertices[i + 2] = 0;
|
|
}
|
|
|
|
geometry = new THREE.BufferGeometry();
|
|
// treat the "position" property as a vertice made from three elements
|
|
geometry.setAttribute(
|
|
"position",
|
|
new THREE.BufferAttribute(vertices, 3)
|
|
);
|
|
|
|
function buildMaterialAndMeshList(numCameras) {
|
|
// first clean out old mesh and material list
|
|
for (let i = 0; i < materialList.length; i++) {
|
|
materialList[i].dispose();
|
|
meshList[i].dispose();
|
|
scene.remove(meshList[i]);
|
|
scene.remove(materialList[i]);
|
|
}
|
|
|
|
materialList = [];
|
|
meshList = [];
|
|
|
|
// now reconstruct both lists
|
|
for (let i = 0; i < numCameras; i++) {
|
|
let material = new THREE.ShaderMaterial({
|
|
uniforms: {
|
|
map: { value: texture },
|
|
width: { value: width },
|
|
height: { value: height },
|
|
nearClipping: { value: 0.1 },
|
|
farClipping: { value: 5 },
|
|
boxSize: { value: 1 },
|
|
pointSize: { value: 3 },
|
|
cameraIndex: { value: i },
|
|
numCameras: { value: numCameras },
|
|
c2wm: { value: new THREE.Matrix4() },
|
|
prjm: { value: new THREE.Matrix4() }
|
|
},
|
|
vertexShader: projection_vert_shader_source,
|
|
fragmentShader: projection_frag_shader_source,
|
|
blending: THREE.NormalBlending,
|
|
depthTest: true,
|
|
depthWrite: true,
|
|
transparent: true,
|
|
});
|
|
|
|
let mesh = new THREE.Points(geometry, material);
|
|
|
|
// add the mesh unless this is the SOURCE OF TRUTH sensor (index==3)
|
|
if (i != 3) {
|
|
scene.add(mesh);
|
|
}
|
|
|
|
materialList[i] = material;
|
|
meshList[i] = mesh;
|
|
}
|
|
}
|
|
|
|
const arrowHelper =
|
|
scene.add( new THREE.ArrowHelper(new THREE.Vector3(1,0,0), new THREE.Vector3(0,0,0), 1, 0xff0000) );
|
|
scene.add( new THREE.ArrowHelper(new THREE.Vector3(0,1,0), new THREE.Vector3(0,0,0), 1, 0x00ff00) );
|
|
scene.add( new THREE.ArrowHelper(new THREE.Vector3(0,0,1), new THREE.Vector3(0,0,0), 1, 0x0000ff) );
|
|
|
|
// Renderer (hidden, 256x256)
|
|
renderer = new THREE.WebGLRenderer({ preserveDrawingBuffer: true });
|
|
renderer.setSize(width, height);
|
|
renderer.setPixelRatio(1);
|
|
renderer.domElement.style.display = "none";
|
|
container.appendChild(renderer.domElement);
|
|
|
|
function animate() {
|
|
if (!video.readyState) return requestAnimationFrame(animate);
|
|
|
|
let debugCanvas = document.createElement("canvas");
|
|
debugCanvas.width = 1024;
|
|
debugCanvas.height = 256;
|
|
const debugContext = debugCanvas.getContext("2d", { willReadFrequently: true });
|
|
debugContext.drawImage(texture.image, 0, 0);
|
|
|
|
let d = debugContext.getImageData(0, 0, debugCanvas.width, 1);
|
|
numCameras = decodeUint32ToFloat(pixelArrayToUint32(d.data, debugCanvas.width, 0));
|
|
|
|
if (numCameras !== materialList.length) {
|
|
console.log("got new camera count: " + numCameras);
|
|
buildMaterialAndMeshList(numCameras);
|
|
}
|
|
|
|
// Reuse vectors
|
|
const virtualForward = new THREE.Vector3();
|
|
const mainForward = new THREE.Vector3();
|
|
|
|
// Update all the properties of each camera matrix
|
|
for (let i = 0; i < numCameras; i++) {
|
|
let d = debugContext.getImageData(256 * i, 1, debugCanvas.width, 32);
|
|
const c2wm_array = [0,0,0,0, 0,0,0,0, 0,0,0,0, 0,0,0,0];
|
|
const prjm_array = [0,0,0,0, 0,0,0,0, 0,0,0,0, 0,0,0,0];
|
|
|
|
for (let rowNr = 0; rowNr < 16; rowNr++) {
|
|
c2wm_array[rowNr] = decodeUint32ToFloat(pixelArrayToUint32(d.data, debugCanvas.width, rowNr));
|
|
prjm_array[rowNr] = decodeUint32ToFloat(pixelArrayToUint32(d.data, debugCanvas.width, rowNr + 16));
|
|
}
|
|
|
|
materialList[i].uniforms.c2wm.value.fromArray(c2wm_array);
|
|
materialList[i].uniforms.prjm.value.fromArray(prjm_array);
|
|
|
|
if (i==3) {
|
|
camera.matrixAutoUpdate = false;
|
|
const m = new THREE.Matrix4();
|
|
m.set(...c2wm_array);
|
|
camera.matrix.copy(m.transpose());
|
|
|
|
// ✅ Decompose to update .position, .quaternion, .scale
|
|
camera.matrix.decompose(camera.position, camera.quaternion, camera.scale);
|
|
|
|
|
|
const p = new THREE.Matrix4();
|
|
p.set(...prjm_array);
|
|
camera.projectionMatrix.copy(p.transpose());
|
|
|
|
camera.updateMatrixWorld(true);
|
|
}
|
|
|
|
if (i !== 3) {
|
|
// Get main camera's current forward direction
|
|
camera.getWorldDirection(mainForward);
|
|
|
|
// Get virtual camera's forward direction
|
|
virtualForward.set(0, 0, -1);
|
|
const rotMatrix = new THREE.Matrix4().extractRotation(materialList[i].uniforms.c2wm.value);
|
|
virtualForward.applyMatrix4(rotMatrix).normalize();
|
|
|
|
// Compute angle between them
|
|
const dot = THREE.MathUtils.clamp(virtualForward.dot(mainForward), -1, 1);
|
|
const angleRadians = Math.acos(dot);
|
|
const angleDegrees = THREE.MathUtils.radToDeg(angleRadians);
|
|
|
|
// console.log(`Camera ${i}: ${angleDegrees.toFixed(2)}° difference from three.js camera`);
|
|
}
|
|
}
|
|
|
|
renderer.render(scene, camera);
|
|
|
|
// ✅ CAPTURE RENDERED FRAME
|
|
const gl = renderer.getContext();
|
|
const pixels = new Uint8Array(width * height * 4);
|
|
gl.readPixels(0, 0, width, height, gl.RGBA, gl.UNSIGNED_BYTE, pixels);
|
|
|
|
const renderedImageData = new ImageData(width, height);
|
|
// Flip vertically
|
|
for (let y = 0; y < height; y++) {
|
|
const rowStart = y * width * 4;
|
|
const destStart = (height - y - 1) * width * 4;
|
|
for (let i = 0; i < width * 4; i++) {
|
|
renderedImageData.data[destStart + i] = pixels[rowStart + i];
|
|
}
|
|
}
|
|
renderedContext.putImageData(renderedImageData, 0, 0);
|
|
|
|
// ✅ CAPTURE TRUTH FRAME (from video, camera i=3 region)
|
|
// Assuming each camera's video region is 256px wide
|
|
truthContext.drawImage(video, 256 * 3, 256 * 2, 256, 256, 0, 0, 256, 256);
|
|
|
|
// ✅ COMPARE IMAGES
|
|
const percentSimilarity = compareImages();
|
|
let orbitAngle = Math.atan2(camera.position.x, camera.position.z);
|
|
orbitAngle = (orbitAngle + 2 * Math.PI) % (2 * Math.PI);
|
|
const orbitAngleDegrees = THREE.MathUtils.radToDeg(orbitAngle); // → 0 to 360
|
|
|
|
updateChart(orbitAngleDegrees, percentSimilarity);
|
|
|
|
requestAnimationFrame(animate);
|
|
}
|
|
|
|
// --- CHART INITIALIZATION ---
|
|
const chartCtx = document.getElementById('fidelityChart').getContext('2d');
|
|
|
|
let fidelityChart = new Chart(chartCtx, {
|
|
type: 'scatter',
|
|
data: {
|
|
datasets: [
|
|
{
|
|
label: 'Reconstruction Fidelity',
|
|
data: [],
|
|
borderColor: 'rgba(75, 192, 192, 1)',
|
|
fill: false,
|
|
tension: 0.1,
|
|
pointRadius: 1
|
|
}
|
|
]
|
|
},
|
|
options: {
|
|
animation: false,
|
|
responsive: true,
|
|
scales: {
|
|
x: {
|
|
type: 'linear',
|
|
title: {
|
|
display: true,
|
|
text: 'Orbital Angle (degrees)'
|
|
},
|
|
ticks: {
|
|
stepSize: 30, // ← You can use 3, 6, 15, 30, etc.
|
|
callback: function(value) {
|
|
return value + '°'; // e.g., "0°", "6°", "90°"
|
|
}
|
|
},
|
|
min: 0,
|
|
max: 360,
|
|
grid: {
|
|
color: function(context) {
|
|
const value = context.tick.value;
|
|
// if (value === 120 || value === 240) {
|
|
// return 'rgba(0, 0, 0, 0.5)'; // thicker lines at key angles
|
|
// }
|
|
if (value === 90 || value === 210 || value === 330) {
|
|
return 'rgba(0, 0, 0, 0.3)'; // thicker lines at key angles
|
|
}
|
|
return 'rgba(0, 0, 0, 0.1)';
|
|
}
|
|
}
|
|
},
|
|
y: {
|
|
title: {
|
|
display: true,
|
|
text: 'Similarity (%)'
|
|
},
|
|
min: 0,
|
|
max: 100
|
|
}
|
|
},
|
|
plugins: {
|
|
title: {
|
|
display: true,
|
|
text: '3D Reconstruction vs. Ground Truth'
|
|
}
|
|
}
|
|
}
|
|
});
|
|
|
|
function compareImages() {
|
|
const img1 = renderedContext.getImageData(0, 0, width, height);
|
|
const img2 = truthContext.getImageData(0, 0, width, height);
|
|
const diff = renderedContext.createImageData(width, height);
|
|
|
|
const mismatchedPixels = pixelmatch(
|
|
img1.data,
|
|
img2.data,
|
|
diff.data,
|
|
width,
|
|
height,
|
|
{ threshold: 0.1 }
|
|
);
|
|
|
|
const totalPixels = width * height;
|
|
const percentDifference = (mismatchedPixels / totalPixels) * 100;
|
|
|
|
// ✅ Log to console
|
|
// console.log(`Frame comparison: ${percentDifference.toFixed(2)}% mismatch`);
|
|
|
|
return 100 - percentDifference; // return % similarity
|
|
}
|
|
|
|
function updateChart(angleRadians, similarityPercent) {
|
|
fidelityChart.data.datasets[0].data.push({
|
|
x: angleRadians,
|
|
y: similarityPercent
|
|
});
|
|
|
|
// Optional: limit data points
|
|
if (fidelityChart.data.datasets[0].data.length > 1000) {
|
|
fidelityChart.data.datasets[0].data.shift();
|
|
}
|
|
|
|
fidelityChart.update();
|
|
}
|
|
|
|
animate();
|
|
}
|
|
</script>
|
|
</body>
|
|
</html>
|