map camera matrix values to Three.js camera uniform

This commit is contained in:
buncccc 2025-04-06 11:43:46 +12:00
parent adcd73f1eb
commit 1f31073017
4 changed files with 124 additions and 65 deletions

View File

@ -103,7 +103,7 @@ public class RT3script : MonoBehaviour
rowOffset = encodeMatrix(cameraList[i].cameraToWorldMatrix, i, rowOffset);
rowOffset = encodeMatrix(cameraList[i].projectionMatrix, i, rowOffset);
encodeData(rowOffset, i, rowOffset);
// encodeData(rowOffset, i, rowOffset);
//encode into pixels transform matrix for this camera
outputImage.Apply();

8
Assets/Website.meta Normal file
View File

@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: fce4a387813179444a907bff52783141
folderAsset: yes
DefaultImporter:
externalObjects: {}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -37,7 +37,8 @@
uniform float pointSize;
uniform float boxSize;
uniform int numCameras;
uniform int cameraIndex;
varying vec2 vUv;
@ -53,31 +54,31 @@
float z = depth * (farClipping-nearClipping) + nearClipping;
vec4 pos = vec4(
( position.x / width - 0.5 )*boxSize,
(float(cameraIndex) + position.x / width - 0.5 )*boxSize,
( position.y / height - 0.5 )*boxSize,
z*boxSize*0.5,
1.0);
gl_PointSize = pointSize;
gl_Position = projectionMatrix * modelViewMatrix * pos;
if (depth <.01) { // move this point out of the view box if the depth is nearly zero
gl_Position = vec4(-1., -1., -1., -1.);
}
}
</script>
<script id="fs" type="x-shader/x-fragment">
uniform sampler2D map;
uniform int numCameras;
uniform int cameraIndex;
varying vec2 vUv;
void main() {
if (numCameras == 2) {
gl_FragColor = vec4( 1.0, 1.0, 1.0, 1.0);
}
else {
vec4 color = texture2D( map, vUv*vec2(0.5,0.333)+vec2(0.0,0.666) );
gl_FragColor = vec4( color.r, color.g, color.b, 1.0);
}
vec4 color = texture2D( map, vUv*vec2(0.5,0.333) + vec2(0.0, 0.0) );
// if (color.r < .2) { discard;}
gl_FragColor = vec4( color.r, color.g, color.b, 1.0);
}
</script>
@ -95,7 +96,9 @@
import { GUI } from "three/addons/libs/lil-gui.module.min.js";
let scene, camera, renderer;
let geometry, mesh, material;
let geometry;
let meshList = [];
let materialList = [];
let mouse, center;
let numCameras;
@ -156,21 +159,19 @@
texture.minFilter = THREE.NearestFilter;
texture.generateMipmaps = false;
const width = 640,
height = 480;
const nearClipping = 0,
const captureImageWidth = 256,
captureImageHeight = 256;
const nearClipping = 0.1,
farClipping = 5,
pointSize = 5,
boxSize = 500;
geometry = new THREE.BufferGeometry();
const vertices = new Float32Array(width * height * 3);
const vertices = new Float32Array(captureImageWidth * captureImageHeight * 3);
for (let i = 0, j = 0, l = vertices.length; i < l; i += 3, j++) {
vertices[i] = j % width;
vertices[i + 1] = Math.floor(j / width);
vertices[i] = j % captureImageWidth;
vertices[i + 1] = Math.floor(j / captureImageWidth);
}
geometry.setAttribute(
@ -178,63 +179,108 @@
new THREE.BufferAttribute(vertices, 3)
);
material = new THREE.ShaderMaterial({
uniforms: {
map: { value: texture },
width: { value: width },
height: { value: height },
nearClipping: { value: nearClipping },
farClipping: { value: farClipping },
boxSize: { value: boxSize },
pointSize: { value: pointSize },
numCameras: { value: numCameras },
},
vertexShader: document.getElementById("vs").textContent,
fragmentShader: document.getElementById("fs").textContent,
blending: THREE.NormalBlending,
depthTest: true,
depthWrite: true,
transparent: true,
});
mesh = new THREE.Points(geometry, material);
scene.add(mesh);
const gui = new GUI();
gui
.add(material.uniforms.nearClipping, "value", 0, 1, nearClipping)
.name("nearClipping");
gui
.add(material.uniforms.farClipping, "value", 1, 10, farClipping)
.name("farClipping");
gui
.add(material.uniforms.pointSize, "value", 1, 10, pointSize)
.name("pointSize");
gui
.add(material.uniforms.boxSize, "value", 1, 1000, boxSize)
.name("boxSize");
// const gui = new GUI();
// gui
// .add(material.uniforms.nearClipping, "value", 0, 1, nearClipping)
// .name("nearClipping");
// gui
// .add(material.uniforms.farClipping, "value", 1, 10, farClipping)
// .name("farClipping");
// gui
// .add(material.uniforms.pointSize, "value", 1, 10, pointSize)
// .name("pointSize");
// gui
// .add(material.uniforms.boxSize, "value", 1, 1000, boxSize)
// .name("boxSize");
video.play();
//
function buildMaterialAndMeshList(numCameras) {
// first clean out old mesh and material list
for (let i = 0; i < materialList.length; i++) {
materialList[i].dispose();
meshList[i].dispose();
scene.remove(meshList[i]);
scene.remove(materialList[i]);
}
materialList = [];
meshList = [];
// now reconstruct both lists
for (let i = 0; i < numCameras; i++) {
let material = new THREE.ShaderMaterial({
uniforms: {
map: { value: texture },
width: { value: captureImageWidth },
height: { value: captureImageHeight },
nearClipping: { value: nearClipping },
farClipping: { value: farClipping },
boxSize: { value: boxSize },
pointSize: { value: pointSize },
cameraIndex: { value: i },
captureCameraToWorldMatrix: {value: [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]},
captureCameraProjectionMatrix: {value: [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]},
},
vertexShader: document.getElementById("vs").textContent,
fragmentShader: document.getElementById("fs").textContent,
blending: THREE.NormalBlending,
depthTest: true,
depthWrite: true,
transparent: true,
});
let mesh = new THREE.Points(geometry, material);
scene.add(mesh);
materialList[i] = material;
meshList[i] = mesh;
}
}
function animate() {
camera.position.x = -mouse.x;
camera.position.y = mouse.y;
camera.lookAt(center);
let c = document.getElementById("debugCanvas");
let canvas = document.getElementById("debugCanvas");
let context = canvas.getContext("2d");
context.drawImage(texture.image, 0,0); //the image must be drawn to a canvas in order to read numCameras
let cont = c.getContext("2d");
cont.drawImage(texture.image, 0,0);
let d = cont.getImageData(0,0,c.width,c.height);
material.uniforms.numCameras.value = decodeUint32ToFloat(pixelArrayToUint32(d.data, c.width, 0)) // obtains numCameras
for(let rowNr = 1; rowNr < 17; rowNr++) {
cameraFloats[rowNr-1] = decodeUint32ToFloat(pixelArrayToUint32(d.data, c.width, rowNr))
let d = context.getImageData(0,0,canvas.width,1);
numCameras = decodeUint32ToFloat(pixelArrayToUint32(d.data, canvas.width, 0)) // obtains numCameras
// if the number of cameras changes, reconstruct the materialList and meshList
if (numCameras != materialList.length) {
console.log("got new camera count: " + numCameras);
buildMaterialAndMeshList(numCameras);
}
// update all the properties of each camera matrix
for(let i = 0; i < numCameras; i++) {
//skip drawing if this is the first camera, since we already did it to get numCameras above
if (i != 0) {
context.drawImage(texture.image, 256*i,0);
}
// this next line needs to obtain the region of the video texture with
// the appropriate pixel encoded floats for the camera matrix.
let d = context.getImageData(0,1,canvas.width,16); //should get data from the second line to the 17th, or 16 rows of pixels
for(let rowNr = 0; rowNr < 16; rowNr++) {
// cameraFloats[i][rowNr] = decodeUint32ToFloat(pixelArrayToUint32(d.data, c.width, rowNr))
materialList[i].uniforms.captureCameraToWorldMatrix.value[rowNr] = decodeUint32ToFloat(pixelArrayToUint32(d.data, canvas.width, rowNr))
}
}
renderer.render(scene, camera);
}
renderer = new THREE.WebGLRenderer();
renderer.setPixelRatio(window.devicePixelRatio);
renderer.setSize(window.innerWidth, window.innerHeight);
@ -245,8 +291,6 @@
document.addEventListener("mousemove", onDocumentMouseMove);
//
window.addEventListener("resize", onWindowResize);
}

View File

@ -0,0 +1,7 @@
fileFormatVersion: 2
guid: 1006689567576dc4ba3e610b5f29b209
TextScriptImporter:
externalObjects: {}
userData:
assetBundleName:
assetBundleVariant: