dec 2025 nz visit

This commit is contained in:
RopeBunnyVJ 2025-12-01 13:42:07 -08:00
parent 8be33b903c
commit 29dc899eb4
17 changed files with 2645 additions and 395 deletions

View File

@ -36,40 +36,39 @@ Shader "Custom/DepthOnly"
fixed4 frag(v2f i) : SV_Target fixed4 frag(v2f i) : SV_Target
{ {
// if (i.depth <= 0) {
// return fixed4(1.0, 0.5, 0.0, 1.0);
// }
// Outputs depth in RGB (2^24 precision)
// ****We had to change Unity's color space to Gamma such that it doesn't add its own gamma correction, // ****We had to change Unity's color space to Gamma such that it doesn't add its own gamma correction,
// for this to work ******* // for this to work *******
uint depth24b = (i.depth * ((1<<24)-1)); // maps depth to be from 0 --- 2^24 // uint depth24b = (i.depth * ((1<<24)-1)); // maps depth to be from 0 --- 2^24
uint rEncode = 0; // uint rEncode = 0;
uint gEncode = 0; // uint gEncode = 0;
uint bEncode = 0; // uint bEncode = 0;
// This loop encodes depth into a 24 bit value split across all three color channels. // // This loop encodes depth into a 24 bit value split across all three color channels.
// The least significant digit is the last blue value. // // The least significant digit is the last blue value.
// The most significant digit is the first red value. // // The most significant digit is the first red value.
for (int i = 0; i < 8; i++){ // for (int i = 0; i < 8; i++){
bEncode |= ((depth24b) & 1) << i; // bEncode |= ((depth24b) & 1) << i;
depth24b >>= 1; // depth24b >>= 1;
gEncode |= ((depth24b) & 1) << i; // gEncode |= ((depth24b) & 1) << i;
depth24b >>= 1; // depth24b >>= 1;
rEncode |= ((depth24b) & 1) << i; // rEncode |= ((depth24b) & 1) << i;
depth24b >>= 1; // depth24b >>= 1;
} // }
return fixed4( // return fixed4(
rEncode/255., // rEncode/255.,
gEncode/255., // gEncode/255.,
bEncode/255., // bEncode/255.,
1.0 // 1.0
); // );
// Output depth as grayscale (2^8 precision)
return fixed4((i.depth), (i.depth), (i.depth), 1.0);
// return fixed4(i.depth, i.depth, i.depth, 1.0);
// Output depth as grayscale
// return fixed4((i.depth), (i.depth), (i.depth), 1.0);
// return fixed4( // return fixed4(
// (i.pos2.x) * .5 + .5, // (i.pos2.x) * .5 + .5,
// (i.pos2.x) * .5 + .5, // (i.pos2.x) * .5 + .5,

18
Assets/DriftObject.cs Normal file
View File

@ -0,0 +1,18 @@
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
public class DriftObject : MonoBehaviour
{
// Start is called before the first frame update
void Start()
{
}
// Update is called once per frame
void Update()
{
transform.Translate(Vector3.right * Time.deltaTime, Camera.main.transform);
}
}

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: ae13af9f09165054485ac7c362ccb0ac
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

8
Assets/Editor.meta Normal file
View File

@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: fd445429b69abaa48b5c38dcea905d97
folderAsset: yes
DefaultImporter:
externalObjects: {}
userData:
assetBundleName:
assetBundleVariant:

File diff suppressed because it is too large Load Diff

View File

@ -14,9 +14,15 @@ public class RT3script : MonoBehaviour
// public string outputRTMP = "rtmp://localhost/live/HoloPipeline"; // public string outputRTMP = "rtmp://localhost/live/HoloPipeline";
public string ingestRTMP = "rtmp://ingest.vrcdn.live/live"; public string ingestRTMP = "rtmp://ingest.vrcdn.live/live";
public string streamKey = ""; public string streamKey = "";
public bool recordToDisk = false;
public string recordedFilePath;
public Camera[] cameraList; public Camera[] cameraList;
private Camera[] depthCameraList; private Camera[] depthCameraList;
// public Camera PSNRcamera;
private RenderTexture[] colorImages, depthImages; private RenderTexture[] colorImages, depthImages;
public Material debugMaterial; public Material debugMaterial;
@ -49,7 +55,7 @@ public class RT3script : MonoBehaviour
// colorImage2 = new RenderTexture(256,256,16,RenderTextureFormat.ARGB32); // colorImage2 = new RenderTexture(256,256,16,RenderTextureFormat.ARGB32);
// depthImage = new RenderTexture(256,256,16,RenderTextureFormat.ARGB32); // depthImage = new RenderTexture(256,256,16,RenderTextureFormat.ARGB32);
// depthImage2 = new RenderTexture(256,256,16,RenderTextureFormat.ARGB32); // depthImage2 = new RenderTexture(256,256,16,RenderTextureFormat.ARGB32);
outputImage = new Texture2D((256*cameraList.Length), 512 + metadataHeight, TextureFormat.RGB24, false); outputImage = new Texture2D((256*(cameraList.Length)), 512 + metadataHeight, TextureFormat.RGB24, false);
outputImage.filterMode = FilterMode.Point; outputImage.filterMode = FilterMode.Point;
for (int i = 0; i < cameraList.Length; i++) { for (int i = 0; i < cameraList.Length; i++) {
@ -69,6 +75,7 @@ public class RT3script : MonoBehaviour
depthCameraList[i].targetTexture = depthImages[i]; depthCameraList[i].targetTexture = depthImages[i];
} }
// PSNRcamera.targetTexture = new RenderTexture(256,256,16,RenderTextureFormat.ARGB32);
debugMaterial.mainTexture = outputImage; debugMaterial.mainTexture = outputImage;
@ -90,8 +97,10 @@ public class RT3script : MonoBehaviour
void LateUpdate() void LateUpdate()
{ {
float expectedFrames = Time.timeSinceLevelLoad * targetFrameRate; float expectedFrames = Time.timeSinceLevelLoad * targetFrameRate;
if (frameCount < expectedFrames) { if (frameCount < expectedFrames)
for(int i = 0; i < cameraList.Length; i++){ {
for (int i = 0; i < cameraList.Length; i++)
{
RenderTexture.active = colorImages[i]; RenderTexture.active = colorImages[i];
outputImage.ReadPixels(new Rect(0, 0, 256, 256), (256 * i), 0); outputImage.ReadPixels(new Rect(0, 0, 256, 256), (256 * i), 0);
outputImage.Apply(); outputImage.Apply();
@ -102,7 +111,8 @@ public class RT3script : MonoBehaviour
//for the very first camera, encode the length of camera pairs before anything else //for the very first camera, encode the length of camera pairs before anything else
if (i==0) { if (i == 0)
{
encodeData(cameraList.Length, 0, 1); encodeData(cameraList.Length, 0, 1);
} }
@ -140,6 +150,11 @@ public class RT3script : MonoBehaviour
frameCount += 1; frameCount += 1;
} }
// RenderTexture.active = PSNRcamera.targetTexture;
// outputImage.ReadPixels(new Rect(0, 0, 256, 256), (256 * cameraList.Length), 0);
// outputImage.Apply();
} }
@ -150,7 +165,7 @@ public class RT3script : MonoBehaviour
for (int colNr = 0; colNr < 4; colNr++) { for (int colNr = 0; colNr < 4; colNr++) {
for (int rowNr = 0; rowNr < 4; rowNr++) { for (int rowNr = 0; rowNr < 4; rowNr++) {
encodeData(mat[rowNr,colNr], colOffset, rowOffset); // if I want to switch to ROW MAJOR encoding, switch the two loop lines above (ALT+Up/Down) encodeData(mat[rowNr,colNr], colOffset, rowOffset); // if I want to switch to ROW MAJOR encoding, switch the two loop lines above (ALT+Up/Down)
// UnityEngine.Debug.Log("RO " + rowOffset + ": encode "+mat[i,j]); // UnityEngine.Debug.Log("RO " + rowOffset + ": encode "+mat[colNr,rowNr]);
rowOffset++; rowOffset++;
} }
} }
@ -187,14 +202,15 @@ public class RT3script : MonoBehaviour
// Based on this: https://stackoverflow.com/a/16822144 // Based on this: https://stackoverflow.com/a/16822144
[StructLayout(LayoutKind.Explicit)] [StructLayout(LayoutKind.Explicit)]
private struct FloatAndUIntUnion { private struct FloatAndUIntUnion {
// The memort layout of this struct looks like this // The memory layout of this struct looks like this
// but both pointers refer to the same memory address. // both pointers refer to the same memory address.
// UInt32Bits: IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII // UInt32Bits: IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII
// FloatValue: SEEEEEEEEFFFFFFFFFFFFFFFFFFFFFFF // FloatValue: SEEEEEEEEFFFFFFFFFFFFFFFFFFFFFFF
[FieldOffset(0)] public uint UInt32Bits; [FieldOffset(0)] public uint UInt32Bits;
[FieldOffset(0)] public float FloatValue; [FieldOffset(0)] public float FloatValue;
} }
public static uint FloatToIntBits(float value) { public static uint FloatToIntBits(float value)
{
FloatAndUIntUnion f2i = default(FloatAndUIntUnion); FloatAndUIntUnion f2i = default(FloatAndUIntUnion);
f2i.FloatValue = value; // write as float f2i.FloatValue = value; // write as float
return f2i.UInt32Bits; // read back as int return f2i.UInt32Bits; // read back as int
@ -223,7 +239,19 @@ public class RT3script : MonoBehaviour
// Setup FFmpeg process with arguments for RTMP streaming // Setup FFmpeg process with arguments for RTMP streaming
string ffmpegArgs = $"-y -f rawvideo -vcodec rawvideo -pix_fmt rgb24 -s {outputImage.width}x{outputImage.height} " + string ffmpegArgs = $"-y -f rawvideo -vcodec rawvideo -pix_fmt rgb24 -s {outputImage.width}x{outputImage.height} " +
// string ffmpegArgs = $"-y -f rawvideo -vcodec rawvideo -pix_fmt rgbaf32le -s {outputImage.width}x{outputImage.height} " + // string ffmpegArgs = $"-y -f rawvideo -vcodec rawvideo -pix_fmt rgbaf32le -s {outputImage.width}x{outputImage.height} " +
$"-r {targetFrameRate} -i pipe:0 -vf vflip -c:v libx264 -preset ultrafast -tune zerolatency -pix_fmt yuv420p -f flv {ingestRTMP + "/" + streamKey}"; $"-r {targetFrameRate} -i pipe:0 -vf vflip";
if (!recordToDisk)
{
ffmpegArgs += $" -c:v libx264 -preset ultrafast -tune zerolatency -pix_fmt yuv420p -f flv {ingestRTMP + "/" + streamKey}";
}
else
{
ffmpegArgs += " \"" + recordedFilePath + "\"";
}
ffmpegProcess = new Process(); ffmpegProcess = new Process();
ffmpegProcess.StartInfo.FileName = "Assets/ffmpeg.exe"; ffmpegProcess.StartInfo.FileName = "Assets/ffmpeg.exe";
@ -233,6 +261,6 @@ public class RT3script : MonoBehaviour
ffmpegProcess.StartInfo.CreateNoWindow = true; ffmpegProcess.StartInfo.CreateNoWindow = true;
ffmpegProcess.Start(); ffmpegProcess.Start();
// UnityEngine.Debug.Log("FFmpeg process started with arguments: " + ffmpegArgs); UnityEngine.Debug.Log("FFmpeg process started with arguments: " + ffmpegArgs);
} }
} }

View File

@ -15,6 +15,6 @@ public class TwistObject : MonoBehaviour
{ {
// Transform t = GetComponent<Transform>(); // Transform t = GetComponent<Transform>();
// t.RotateObject(vector3.up, 1); // t.RotateObject(vector3.up, 1);
transform.Rotate(0,0,0.1f); transform.Rotate(0,0,.5f);
} }
} }

View File

@ -0,0 +1,130 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8" />
<meta
name="viewport"
content="width=device-width, user-scalable=no, minimum-scale=1.0, maximum-scale=1.0"
/>
<title>Holoprojection Image Comparison</title>
<style>
canvas {
border: 1px solid #ccc;
margin-bottom: 20px;
}
</style>
</head>
<body>
<div id="main-scene-container"></div>
<h2>Rendered image:</h2>
<input type="file" id="renderedImageInput" accept="image/png" />
<br />
<canvas id="renderedImage" width="256" height="256"></canvas>
<h2>Truth image:</h2>
<input type="file" id="truthImageInput" accept="image/png" />
<br />
<canvas id="truthImage" width="256" height="256"></canvas>
<h2>Difference (Pixelmatch):</h2>
<canvas id="diffImage" width="256" height="256"></canvas>
<div id="diffInfo">Images not yet compared.</div>
<script type="importmap">
{
"imports": {
"three": "https://threejs.org/build/three.module.js",
"three/addons/": "https://threejs.org/examples/jsm/"
}
}
</script>
<script type="module">
import pixelmatch from "https://unpkg.com/pixelmatch?module";
const renderedImageInput = document.getElementById("renderedImageInput");
const truthImageInput = document.getElementById("truthImageInput");
const renderedCanvas = document.getElementById("renderedImage");
const truthCanvas = document.getElementById("truthImage");
const diffCanvas = document.getElementById("diffImage");
const diffInfo = document.getElementById("diffInfo");
const width = 256;
const height = 256;
const renderedContext = renderedCanvas.getContext('2d');
const truthContext = truthCanvas.getContext('2d');
const diffContext = diffCanvas.getContext('2d');
let hasRenderedImage = false;
let hasTruthImage = false;
// Helper function to load an image to a canvas
function loadImageToCanvas(file, canvas, callback) {
const reader = new FileReader();
reader.onload = function(e) {
const img = new Image();
img.onload = function() {
const ctx = canvas.getContext('2d');
ctx.clearRect(0, 0, width, height);
ctx.drawImage(img, 0, 0, width, height);
callback();
};
img.src = e.target.result;
};
reader.readAsDataURL(file);
}
// Run pixelmatch and render diff
function compareImages() {
const img1 = renderedContext.getImageData(0, 0, width, height);
const img2 = truthContext.getImageData(0, 0, width, height);
const diff = diffContext.createImageData(width, height);
const mismatchedPixels = pixelmatch(
img1.data,
img2.data,
diff.data,
width,
height,
{
threshold: 0.1 // adjust for sensitivity
}
);
diffContext.putImageData(diff, 0, 0);
const totalPixels = width * height;
const percentDifference = (mismatchedPixels / totalPixels) * 100;
diffInfo.textContent = `
Mismatched pixels: ${mismatchedPixels}
(${percentDifference.toFixed(2)}% difference)
`;
}
// Set up listeners for file uploads
renderedImageInput.addEventListener('change', (e) => {
const file = e.target.files[0];
if (!file) return;
loadImageToCanvas(file, renderedCanvas, () => {
hasRenderedImage = true;
if (hasTruthImage) compareImages();
});
});
truthImageInput.addEventListener('change', (e) => {
const file = e.target.files[0];
if (!file) return;
loadImageToCanvas(file, truthCanvas, () => {
hasTruthImage = true;
if (hasRenderedImage) compareImages();
});
});
</script>
</body>
</html>

View File

@ -0,0 +1,445 @@
<!DOCTYPE html>
<html lang="en">
<head>
<title>Holoprojection accuracy graphing</title>
<meta charset="utf-8" />
<meta
name="viewport"
content="width=device-width, user-scalable=no, minimum-scale=1.0, maximum-scale=1.0"
/>
<script src="https://cdn.jsdelivr.net/npm/chart.js"></script>
</head>
<body>
<h2>Select a Video File</h2>
<input type="file" id="videoInput" accept="video/*" />
<canvas id="fidelityChart" width="800" height="400"></canvas>
<video
id="video"
loop
muted
crossorigin="anonymous"
playsinline
style="display: none"
>
</video>
<div id="main-scene-container"></div>
<!-- Offscreen canvases for comparison -->
<canvas id="renderedCanvas" width="256" height="256"></canvas>
<canvas id="truthCanvas" width="256" height="256"></canvas>
<script type="importmap">
{
"imports": {
"three": "https://threejs.org/build/three.module.js",
"three/addons/": "https://threejs.org/examples/jsm/"
}
}
</script>
<script type="module">
import * as THREE from "three";
import pixelmatch from "https://unpkg.com/pixelmatch?module";
let projection_vert_shader_source = await (await fetch('./projection.vert.glsl')).text();
let projection_frag_shader_source = await (await fetch('./projection.frag.glsl')).text();
let scene, camera, renderer;
let geometry;
let meshList = [];
let materialList = [];
let numCameras;
// DOM elements
const video = document.getElementById("video");
const videoInput = document.getElementById("videoInput");
// Offscreen canvases for image comparison
const renderedCanvas = document.getElementById("renderedCanvas");
const renderedContext = renderedCanvas.getContext("2d", { willReadFrequently: true });
const truthCanvas = document.getElementById("truthCanvas");
const truthContext = truthCanvas.getContext("2d", { willReadFrequently: true });
// Size constants
const width = 256;
const height = 256;
init();
function pixelArrayToUint32(imageData, canvasWidth, rowNumber) {
let buffer = imageData;
let result = 0;
for (let i = 0; i < 32; i++) {
let thisBit = (buffer[(i*4) + (rowNumber*(canvasWidth*4))] > 128) & 1;
result |= (thisBit << (31-i));
}
return result;
}
// convert unsigned integer 32 to float
function decodeUint32ToFloat(theInteger) {
const buffer = new ArrayBuffer(4);
const view = new DataView(buffer);
view.setUint32(0, theInteger); // "at address 0, write theInteger to the stack as a 32 bit integer"
// console.log(view.getFloat32(0)); // "at address 0, read the stack as a Float32"
return view.getFloat32(0);
}
function init() {
const container = document.getElementById("main-scene-container");
camera = new THREE.PerspectiveCamera(
50,
window.innerWidth / window.innerHeight,
.01,
100
);
scene = new THREE.Scene();
const video = document.getElementById("video");
const videoInput = document.getElementById("videoInput");
// Video input handler
videoInput.addEventListener("change", function () {
const file = this.files[0]; // Get the selected file
if (file) {
const videoURL = URL.createObjectURL(file); // Create blob URL
video.src = videoURL; // Set video source
video.style.display = "block"; // Show the video player
video.load(); // Load the video
video.play();
}
});
// function copyRendererToCanvas(renderer) {
// return new Promise((resolve) => {
// const gl = renderer.getContext();
// const width = 256;
// const height = 256;
// const pixels = new Uint8Array(width * height * 4);
// gl.readPixels(0, 0, width, height, gl.RGBA, gl.UNSIGNED_BYTE, pixels);
// // Flip image vertically
// const imageData = new ImageData(width, height);
// for (let y = 0; y < height; y++) {
// const row = pixels.subarray(y * width * 4, (y + 1) * width * 4);
// imageData.data.set(row, (height - y - 1) * width * 4);
// } //vector1.angleTo vector2
// const canvas = document.createElement('canvas');
// canvas.width = width;
// canvas.height = height;
// const ctx = canvas.getContext('2d');
// ctx.putImageData(imageData, 0, 0);
// resolve(canvas);
// });
// }
const texture = new THREE.VideoTexture(video);
texture.minFilter = THREE.NearestFilter;
texture.magFilter = THREE.NearestFilter;
texture.generateMipmaps = false;
// const captureImageWidth = 256,
// captureImageHeight = 256;
// const nearClipping = 0.1,
// farClipping = 5,
// pointSize = 3,
// boxSize = 1;
// create a array of points, where every three elements corresponds to one point (x, y, z)
const vertices = new Float32Array(width * height * 3);
for (
let i = 0, j = 0, l = vertices.length;
i < l;
i += 3, j++
) {
vertices[i] = j % width; // pixels from left
vertices[i + 1] = Math.floor(j / width); // pixels from bottom (vertical zero in shader land)
vertices[i + 2] = 0;
}
geometry = new THREE.BufferGeometry();
// treat the "position" property as a vertice made from three elements
geometry.setAttribute(
"position",
new THREE.BufferAttribute(vertices, 3)
);
function buildMaterialAndMeshList(numCameras) {
// first clean out old mesh and material list
for (let i = 0; i < materialList.length; i++) {
materialList[i].dispose();
meshList[i].dispose();
scene.remove(meshList[i]);
scene.remove(materialList[i]);
}
materialList = [];
meshList = [];
// now reconstruct both lists
for (let i = 0; i < numCameras; i++) {
let material = new THREE.ShaderMaterial({
uniforms: {
map: { value: texture },
width: { value: width },
height: { value: height },
nearClipping: { value: 0.1 },
farClipping: { value: 5 },
boxSize: { value: 1 },
pointSize: { value: 3 },
cameraIndex: { value: i },
numCameras: { value: numCameras },
c2wm: { value: new THREE.Matrix4() },
prjm: { value: new THREE.Matrix4() }
},
vertexShader: projection_vert_shader_source,
fragmentShader: projection_frag_shader_source,
blending: THREE.NormalBlending,
depthTest: true,
depthWrite: true,
transparent: true,
});
let mesh = new THREE.Points(geometry, material);
// add the mesh unless this is the SOURCE OF TRUTH sensor (index==3)
if (i != 3) {
scene.add(mesh);
}
materialList[i] = material;
meshList[i] = mesh;
}
}
const arrowHelper =
scene.add( new THREE.ArrowHelper(new THREE.Vector3(1,0,0), new THREE.Vector3(0,0,0), 1, 0xff0000) );
scene.add( new THREE.ArrowHelper(new THREE.Vector3(0,1,0), new THREE.Vector3(0,0,0), 1, 0x00ff00) );
scene.add( new THREE.ArrowHelper(new THREE.Vector3(0,0,1), new THREE.Vector3(0,0,0), 1, 0x0000ff) );
// Renderer (hidden, 256x256)
renderer = new THREE.WebGLRenderer({ preserveDrawingBuffer: true });
renderer.setSize(width, height);
renderer.setPixelRatio(1);
renderer.domElement.style.display = "none";
container.appendChild(renderer.domElement);
function animate() {
if (!video.readyState) return requestAnimationFrame(animate);
let debugCanvas = document.createElement("canvas");
debugCanvas.width = 1024;
debugCanvas.height = 256;
const debugContext = debugCanvas.getContext("2d", { willReadFrequently: true });
debugContext.drawImage(texture.image, 0, 0);
let d = debugContext.getImageData(0, 0, debugCanvas.width, 1);
numCameras = decodeUint32ToFloat(pixelArrayToUint32(d.data, debugCanvas.width, 0));
if (numCameras !== materialList.length) {
console.log("got new camera count: " + numCameras);
buildMaterialAndMeshList(numCameras);
}
// Reuse vectors
const virtualForward = new THREE.Vector3();
const mainForward = new THREE.Vector3();
// Update all the properties of each camera matrix
for (let i = 0; i < numCameras; i++) {
let d = debugContext.getImageData(256 * i, 1, debugCanvas.width, 32);
const c2wm_array = [0,0,0,0, 0,0,0,0, 0,0,0,0, 0,0,0,0];
const prjm_array = [0,0,0,0, 0,0,0,0, 0,0,0,0, 0,0,0,0];
for (let rowNr = 0; rowNr < 16; rowNr++) {
c2wm_array[rowNr] = decodeUint32ToFloat(pixelArrayToUint32(d.data, debugCanvas.width, rowNr));
prjm_array[rowNr] = decodeUint32ToFloat(pixelArrayToUint32(d.data, debugCanvas.width, rowNr + 16));
}
materialList[i].uniforms.c2wm.value.fromArray(c2wm_array);
materialList[i].uniforms.prjm.value.fromArray(prjm_array);
if (i==3) {
camera.matrixAutoUpdate = false;
const m = new THREE.Matrix4();
m.set(...c2wm_array);
camera.matrix.copy(m.transpose());
// ✅ Decompose to update .position, .quaternion, .scale
camera.matrix.decompose(camera.position, camera.quaternion, camera.scale);
const p = new THREE.Matrix4();
p.set(...prjm_array);
camera.projectionMatrix.copy(p.transpose());
camera.updateMatrixWorld(true);
}
if (i !== 3) {
// Get main camera's current forward direction
camera.getWorldDirection(mainForward);
// Get virtual camera's forward direction
virtualForward.set(0, 0, -1);
const rotMatrix = new THREE.Matrix4().extractRotation(materialList[i].uniforms.c2wm.value);
virtualForward.applyMatrix4(rotMatrix).normalize();
// Compute angle between them
const dot = THREE.MathUtils.clamp(virtualForward.dot(mainForward), -1, 1);
const angleRadians = Math.acos(dot);
const angleDegrees = THREE.MathUtils.radToDeg(angleRadians);
// console.log(`Camera ${i}: ${angleDegrees.toFixed(2)}° difference from three.js camera`);
}
}
renderer.render(scene, camera);
// ✅ CAPTURE RENDERED FRAME
const gl = renderer.getContext();
const pixels = new Uint8Array(width * height * 4);
gl.readPixels(0, 0, width, height, gl.RGBA, gl.UNSIGNED_BYTE, pixels);
const renderedImageData = new ImageData(width, height);
// Flip vertically
for (let y = 0; y < height; y++) {
const rowStart = y * width * 4;
const destStart = (height - y - 1) * width * 4;
for (let i = 0; i < width * 4; i++) {
renderedImageData.data[destStart + i] = pixels[rowStart + i];
}
}
renderedContext.putImageData(renderedImageData, 0, 0);
// ✅ CAPTURE TRUTH FRAME (from video, camera i=3 region)
// Assuming each camera's video region is 256px wide
truthContext.drawImage(video, 256 * 3, 256 * 2, 256, 256, 0, 0, 256, 256);
// ✅ COMPARE IMAGES
const percentSimilarity = compareImages();
let orbitAngle = Math.atan2(camera.position.x, camera.position.z);
orbitAngle = (orbitAngle + 2 * Math.PI) % (2 * Math.PI);
const orbitAngleDegrees = THREE.MathUtils.radToDeg(orbitAngle); // → 0 to 360
updateChart(orbitAngleDegrees, percentSimilarity);
requestAnimationFrame(animate);
}
// --- CHART INITIALIZATION ---
const chartCtx = document.getElementById('fidelityChart').getContext('2d');
let fidelityChart = new Chart(chartCtx, {
type: 'scatter',
data: {
datasets: [
{
label: 'Reconstruction Fidelity',
data: [],
borderColor: 'rgba(75, 192, 192, 1)',
fill: false,
tension: 0.1,
pointRadius: 1
}
]
},
options: {
animation: false,
responsive: true,
scales: {
x: {
type: 'linear',
title: {
display: true,
text: 'Orbital Angle (degrees)'
},
ticks: {
stepSize: 30, // ← You can use 3, 6, 15, 30, etc.
callback: function(value) {
return value + '°'; // e.g., "0°", "6°", "90°"
}
},
min: 0,
max: 360,
grid: {
color: function(context) {
const value = context.tick.value;
// if (value === 120 || value === 240) {
// return 'rgba(0, 0, 0, 0.5)'; // thicker lines at key angles
// }
if (value === 90 || value === 210 || value === 330) {
return 'rgba(0, 0, 0, 0.3)'; // thicker lines at key angles
}
return 'rgba(0, 0, 0, 0.1)';
}
}
},
y: {
title: {
display: true,
text: 'Similarity (%)'
},
min: 0,
max: 100
}
},
plugins: {
title: {
display: true,
text: '3D Reconstruction vs. Ground Truth'
}
}
}
});
function compareImages() {
const img1 = renderedContext.getImageData(0, 0, width, height);
const img2 = truthContext.getImageData(0, 0, width, height);
const diff = renderedContext.createImageData(width, height);
const mismatchedPixels = pixelmatch(
img1.data,
img2.data,
diff.data,
width,
height,
{ threshold: 0.1 }
);
const totalPixels = width * height;
const percentDifference = (mismatchedPixels / totalPixels) * 100;
// ✅ Log to console
// console.log(`Frame comparison: ${percentDifference.toFixed(2)}% mismatch`);
return 100 - percentDifference; // return % similarity
}
function updateChart(angleRadians, similarityPercent) {
fidelityChart.data.datasets[0].data.push({
x: angleRadians,
y: similarityPercent
});
// Optional: limit data points
if (fidelityChart.data.datasets[0].data.length > 1000) {
fidelityChart.data.datasets[0].data.shift();
}
fidelityChart.update();
}
animate();
}
</script>
</body>
</html>

6
Assets/Website~/package-lock.json generated Normal file
View File

@ -0,0 +1,6 @@
{
"name": "Website~",
"lockfileVersion": 3,
"requires": true,
"packages": {}
}

View File

@ -0,0 +1 @@
{}

View File

@ -0,0 +1,338 @@
<!DOCTYPE html>
<html lang="en">
<head>
<title>Holoprojection browser endpoint</title>
<meta charset="utf-8" />
<meta
name="viewport"
content="width=device-width, user-scalable=no, minimum-scale=1.0, maximum-scale=1.0"
/>
</head>
<body>
<h2>Select a Video File</h2>
<input type="file" id="videoInput" accept="video/*" />
<button id="captureImages">Capture and download evaluation images</button>
<video
id="video"
loop
muted
crossorigin="anonymous"
playsinline
style="display: none"
>
<!-- <source
src="https://cdn.glitch.global/5bef24a3-b00f-440a-b7ad-33368d47b340/2022-07-12%2017-20-08.mp4?v=1737668838585"
type="video/mp4"
/> -->
<!-- <source
src="https://stream.vrcdn.live/live/ropebunny.live.mp4"
type="video/mp4"
/> -->
</video>
<div id="main-scene-container"></div>
<canvas id="debugCanvas" width="1024" height="256" />
<script type="importmap">
{
"imports": {
"three": "https://threejs.org/build/three.module.js",
"three/addons/": "https://threejs.org/examples/jsm/"
}
}
</script>
<script type="module">
import * as THREE from "three";
import { GUI } from "three/addons/libs/lil-gui.module.min.js";
import { OrbitControls } from 'three/addons/controls/OrbitControls.js';
let projection_vert_shader_source = await (await fetch('./projection.vert.glsl')).text();
let projection_frag_shader_source = await (await fetch('./projection.frag.glsl')).text();
let scene, camera, renderer;
let geometry;
let meshList = [];
let materialList = [];
let mouse, center;
let numCameras;
const cameraFloats = [];
init();
function pixelArrayToUint32(imageData, canvasWidth, rowNumber) {
let buffer = imageData;
let result = 0;
for (let i = 0; i < 32; i++) {
let thisBit = (buffer[(i*4) + (rowNumber*(canvasWidth*4))] > 128) & 1;
result |= (thisBit << (31-i));
}
return result;
}
// convert unsigned integer 32 to float
function decodeUint32ToFloat(theInteger) {
const buffer = new ArrayBuffer(4);
const view = new DataView(buffer);
view.setUint32(0, theInteger); // "at address 0, write theInteger to the stack as a 32 bit integer"
// console.log(view.getFloat32(0)); // "at address 0, read the stack as a Float32"
return view.getFloat32(0);
}
function init() {
const container = document.getElementById("main-scene-container");
camera = new THREE.PerspectiveCamera(
50,
window.innerWidth / window.innerHeight,
.01,
100
);
scene = new THREE.Scene();
center = new THREE.Vector3();
center.z = 0;
const video = document.getElementById("video");
const videoInput = document.getElementById("videoInput");
const captureEvaluationImageInput = document.getElementById("captureImages")
let currentVideoFilename = "video"; // Default fallback
videoInput.addEventListener("change", function () {
const file = this.files[0]; // Get the selected file
if (file) {
const videoURL = URL.createObjectURL(file); // Create blob URL
video.src = videoURL; // Set video source
video.style.display = "block"; // Show the video player
video.load(); // Load the video
video.play();
// Save the filename (without extension)
currentVideoFilename = file.name.replace(/\.[^/.]+$/, "");
}
});
captureEvaluationImageInput.addEventListener("click", async function () {
const timestamp = generateTimestamp();
const videoCanvas = document.createElement('canvas');
const videoContext = videoCanvas.getContext('2d');
videoCanvas.width = 256;
videoCanvas.height = 256;
videoContext.drawImage(video, video.videoWidth - 256, video.videoHeight - 256, 256, 256, 0, 0, 256, 256);
const videoDataURL = videoCanvas.toDataURL('image/png');
const videoFilename = `${currentVideoFilename}-${timestamp}-video.png`;
downloadImage(videoDataURL, videoFilename);
const rendererCanvas = await copyRendererToCanvas(renderer);
const rendererDataURL = rendererCanvas.toDataURL('image/png');
const rendererFilename = `${currentVideoFilename}-${timestamp}-renderer.png`;
downloadImage(rendererDataURL, rendererFilename);
});
function generateTimestamp() {
const now = new Date();
const year = now.getFullYear();
const month = String(now.getMonth() + 1).padStart(2, '0');
const day = String(now.getDate()).padStart(2, '0');
const hours = String(now.getHours()).padStart(2, '0');
const minutes = String(now.getMinutes()).padStart(2, '0');
const seconds = String(now.getSeconds()).padStart(2, '0');
return `${year}${month}${day}-${hours}${minutes}${seconds}`;
}
function downloadImage(dataURL, filename) {
const link = document.createElement('a');
link.href = dataURL;
link.download = filename;
document.body.appendChild(link);
link.click();
document.body.removeChild(link);
}
function copyRendererToCanvas(renderer) {
return new Promise((resolve) => {
const gl = renderer.getContext();
const width = 256;
const height = 256;
const pixels = new Uint8Array(width * height * 4);
gl.readPixels(0, 0, width, height, gl.RGBA, gl.UNSIGNED_BYTE, pixels);
// Flip image vertically
const imageData = new ImageData(width, height);
for (let y = 0; y < height; y++) {
const row = pixels.subarray(y * width * 4, (y + 1) * width * 4);
imageData.data.set(row, (height - y - 1) * width * 4);
} //vector1.angleTo vector2
const canvas = document.createElement('canvas');
canvas.width = width;
canvas.height = height;
const ctx = canvas.getContext('2d');
ctx.putImageData(imageData, 0, 0);
resolve(canvas);
});
}
const texture = new THREE.VideoTexture(video);
texture.minFilter = THREE.NearestFilter;
texture.magFilter = THREE.NearestFilter;
texture.generateMipmaps = false;
const captureImageWidth = 256,
captureImageHeight = 256;
const nearClipping = 0.1,
farClipping = 5,
pointSize = 3,
boxSize = 1;
// create a array of points, where every three elements corresponds to one point (x, y, z)
const vertices = new Float32Array(captureImageWidth * captureImageHeight * 3);
for (
let i = 0, j = 0, l = vertices.length;
i < l;
i += 3, j++
) {
vertices[i] = j % captureImageWidth; // pixels from left
vertices[i + 1] = Math.floor(j / captureImageWidth); // pixels from bottom (vertical zero in shader land)
vertices[i + 2] = 0;
}
geometry = new THREE.BufferGeometry();
// treat the "position" property as a vertice made from three elements
geometry.setAttribute(
"position",
new THREE.BufferAttribute(vertices, 3)
);
function buildMaterialAndMeshList(numCameras) {
// first clean out old mesh and material list
for (let i = 0; i < materialList.length; i++) {
materialList[i].dispose();
meshList[i].dispose();
scene.remove(meshList[i]);
scene.remove(materialList[i]);
}
materialList = [];
meshList = [];
// now reconstruct both lists
for (let i = 0; i < numCameras; i++) {
let material = new THREE.ShaderMaterial({
uniforms: {
map: { value: texture },
width: { value: captureImageWidth },
height: { value: captureImageHeight },
nearClipping: { value: nearClipping },
farClipping: { value: farClipping },
boxSize: { value: boxSize },
pointSize: { value: pointSize },
cameraIndex: { value: i },
numCameras: { value: numCameras },
c2wm: { value: new THREE.Matrix4() },
prjm: { value: new THREE.Matrix4() }
},
vertexShader: projection_vert_shader_source,
fragmentShader: projection_frag_shader_source,
blending: THREE.NormalBlending,
depthTest: true,
depthWrite: true,
transparent: true,
});
let mesh = new THREE.Points(geometry, material);
// add the mesh unless this is the SOURCE OF TRUTH sensor (index==3)
if (i != 3) {
scene.add(mesh);
}
materialList[i] = material;
meshList[i] = mesh;
}
}
function animate() {
//requestAnimationFrame(animate);
// controls.update();
let canvas = document.getElementById("debugCanvas");
let context = canvas.getContext('2d', { willReadFrequently: true } );
context.drawImage(texture.image, 0,0); //the image must be drawn to a canvas in order to read numCameras
let d = context.getImageData(0,0,canvas.width,1);
numCameras = decodeUint32ToFloat(pixelArrayToUint32(d.data, canvas.width, 0)) // obtains numCameras
// if the number of cameras changes, reconstruct the materialList and meshList
if (numCameras != materialList.length) {
console.log("got new camera count: " + numCameras);
buildMaterialAndMeshList(numCameras);
}
// update all the properties of each camera matrix
for(let i = 0; i < numCameras; i++) {
// this next line needs to obtain the region of the video texture with
// the appropriate pixel encoded floats for the camera matrix.
let d = context.getImageData((256*i),1,canvas.width,32); //should get data from the second line to the 17th, or 16 rows of pixels
const c2wm_array = [0,0,0,0, 0,0,0,0, 0,0,0,0, 0,0,0,0];
const prjm_array = [0,0,0,0, 0,0,0,0, 0,0,0,0, 0,0,0,0];
for(let rowNr = 0; rowNr < 16; rowNr++) {
// cameraFloats[i][rowNr] = decodeUint32ToFloat(pixelArrayToUint32(d.data, c.width, rowNr))
c2wm_array[rowNr] = decodeUint32ToFloat(pixelArrayToUint32(d.data, canvas.width, rowNr))
prjm_array[rowNr] = decodeUint32ToFloat(pixelArrayToUint32(d.data, canvas.width, rowNr+16))
}
materialList[i].uniforms.c2wm.value.fromArray(c2wm_array);
materialList[i].uniforms.prjm.value.fromArray(prjm_array);
// if (i==0) document.prjm0 = prjm_array; //these two lines make the projection array visible in console, for debugging
// if (i==1) document.prjm1 = prjm_array;
// if (i==0) document.c2wm0 = c2wm_array;
// if (i==1) document.c2wm1 = c2wm_array;
if (i==3) {
camera.matrixAutoUpdate = false;
const m = new THREE.Matrix4();
m.set(...c2wm_array);
camera.matrix.copy(m.transpose());
const p = new THREE.Matrix4();
p.set(...prjm_array);
camera.projectionMatrix.copy(p.transpose());
camera.updateMatrixWorld(true);
}
// columns = document.prjm1; a = []; for (let row=0; row < 4; ++row) a.push([columns[0+row], columns[4+row], columns[8+row], columns[12+row]]); a
}
renderer.render(scene, camera);
}
renderer = new THREE.WebGLRenderer({preserveDrawingBuffer: true});
renderer.setPixelRatio(1);
renderer.setSize(256, 256);
renderer.setAnimationLoop(animate);
container.appendChild(renderer.domElement);
camera.aspect = 1;
camera.updateProjectionMatrix();
camera.position.set(-1, 1, 3);
}
</script>
</body>
</html>

View File

@ -1,14 +1,11 @@
uniform sampler2D map; uniform sampler2D map;
uniform float width; uniform float width;
uniform float height; uniform float height;
uniform float nearClipping, farClipping; uniform float nearClipping, farClipping;
uniform float pointSize; uniform float pointSize;
uniform float boxSize; uniform float boxSize;
uniform float cameraIndex; uniform float cameraIndex;
uniform float numCameras; uniform float numCameras;
uniform mat4 c2wm; uniform mat4 c2wm;
@ -18,20 +15,10 @@ varying vec2 vUv;
varying vec2 vUv1pxOffset; varying vec2 vUv1pxOffset;
varying float paintfordiscard; varying float paintfordiscard;
mat4 unity_to_opengl(mat4 U) {
return mat4(
1., 0., 0., 0.,
0., 1., 0., 0.,
0., 0.,-1., 0.,
0., 0., 0., 1.
) * U;
}
void main() { void main() {
vUv = vec2( position.x / width, position.y / height ); vUv = vec2( position.x / width, position.y / height );
vUv1pxOffset = vec2( 1.0 / width, 1.0 / height ) * 2.0; vUv1pxOffset = vec2( 1.0 / width, 1.0 / height ) * 2.0;
int skipctr = 0; int skipctr = 0;
int skipthreshold = 1; int skipthreshold = 1;
@ -49,34 +36,7 @@ void main() {
paintfordiscard = float(skipctr > skipthreshold); paintfordiscard = float(skipctr > skipthreshold);
vec4 color = texture2D( map, vUv*vec2(1.0/numCameras,(1.0/3.0))+vec2(cameraIndex/numCameras,(1.0/3.0))); vec4 color = texture2D( map, vUv*vec2(1.0/numCameras,(1.0/3.0))+vec2(cameraIndex/numCameras,(1.0/3.0)));
// float depth = ( color.r + color.g + color.b ) / 3.0; float depth = ( color.r + color.g + color.b ) / 3.0;
int decodeDepth = 0;
int rEncode = int(color.r*255.);
int gEncode = int(color.g*255.);
int bEncode = int(color.b*255.);
// This loop decodes depth into a 24 bit value split across all three color channels.
// The least significant digit is the last blue value.
// The most significant digit is the first red value.
for (int i = 0; i < 8; i++){
decodeDepth <<= 1;
decodeDepth |= ((rEncode >> (7-i)) & 1);
decodeDepth <<= 1;
decodeDepth |= ((gEncode >> (7-i)) & 1);
decodeDepth <<= 1;
decodeDepth |= ((bEncode >> (7-i)) & 1);
}
// decodeDepth = int(10000024);
// decodeDepth = int(1<<23);
// decodeDepth = int(color.r * 255.*255.);
float depth = float(decodeDepth) / float((1<<24) - 1);
// float depth = .5;
// depth = color.r;
// if (decodeDepth > 6) depth = .5;
vec4 pos = vec4( vec4 pos = vec4(
2.*(position.x / float(width)) - 1., 2.*(position.x / float(width)) - 1.,
@ -86,7 +46,7 @@ void main() {
); );
// First undo projection, then undo view, then below model is undone; flip the Y and Z vectors since WebGL is right handed // First undo projection, then undo view, then below model is undone; flip the Y and Z vectors since WebGL is right handed
vec4 pos2 = unity_to_opengl(c2wm) * inverse(prjm) * pos; // order matters here! Parentheses do not vec4 pos2 = c2wm * inverse(prjm) * pos; // order matters here! Parentheses do not
pos2.xyz *= boxSize; pos2.xyz *= boxSize;
gl_PointSize = pointSize; gl_PointSize = pointSize;

View File

@ -0,0 +1,39 @@
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
public class orbitAndViewParent : MonoBehaviour
{
public float orbitSpeed = 20f;
// public float setAngle = 90f;
private Transform originPoint;
private float angle;
private float radius;
private float height;
void Start()
{
originPoint = transform.parent;
// Calculate radius and height from initial position
Vector3 localPos = transform.localPosition;
radius = Mathf.Sqrt(localPos.x * localPos.x + localPos.z * localPos.z);
height = localPos.y;
// Calculate starting angle
angle = Mathf.Atan2(localPos.z, localPos.x) * Mathf.Rad2Deg;
// angle += setAngle;
}
void Update()
{
angle += orbitSpeed * Time.deltaTime;
float x = Mathf.Cos(angle * Mathf.Deg2Rad) * radius;
float z = Mathf.Sin(angle * Mathf.Deg2Rad) * radius;
transform.localPosition = new Vector3(x, height, z);
transform.LookAt(originPoint.position);
}
}

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: b68ba7b2a89f5b348ae578fa3b484c41
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

21
Assets/viewParent.cs Normal file
View File

@ -0,0 +1,21 @@
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
public class viewParent : MonoBehaviour
{
private Transform originPoint;
// Start is called before the first frame update
void Start()
{
originPoint = transform.parent;
}
// Update is called once per frame
void Update()
{
transform.LookAt(originPoint.position);
}
}

11
Assets/viewParent.cs.meta Normal file
View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: fc8fa59f9cf92e84ea59ca1496ce044e
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant: