223 lines
8.7 KiB
C#
223 lines
8.7 KiB
C#
using System.Collections;
|
|
using System.Collections.Generic;
|
|
using System.Diagnostics;
|
|
using UnityEngine;
|
|
|
|
using System.Runtime.InteropServices;
|
|
|
|
using System.IO;
|
|
|
|
|
|
public class RT3script : MonoBehaviour
|
|
{
|
|
|
|
// public string outputRTMP = "rtmp://localhost/live/HoloPipeline";
|
|
public string ingestRTMP = "rtmp://ingest.vrcdn.live/live";
|
|
public string streamKey = "";
|
|
public string mp4name = "my_movie_depth_ffmpeg.mp4";
|
|
|
|
public Camera[] cameraList;
|
|
private Camera[] depthCameraList;
|
|
|
|
private RenderTexture[] colorImages, depthImages;
|
|
public Material debugMaterial;
|
|
|
|
public Texture2D outputImage;
|
|
|
|
public int targetFrameRate = 60;
|
|
int frameCount = 0;
|
|
|
|
private Process ffmpegProcess;
|
|
private string outputMP4;
|
|
private int metadataHeight = 256;
|
|
|
|
// Start is called before the first frame update
|
|
void Start()
|
|
{
|
|
colorImages = new RenderTexture[cameraList.Length];
|
|
depthImages = new RenderTexture[cameraList.Length];
|
|
depthCameraList = new Camera[cameraList.Length];
|
|
|
|
Matrix4x4 testTranslate = Matrix4x4.Translate(new Vector3(0, 0, 2));
|
|
// UnityEngine.Debug.Log(testTranslate.ToString());
|
|
|
|
|
|
|
|
// colorImage = new RenderTexture(256,256,16,RenderTextureFormat.ARGB32);
|
|
// colorImage2 = new RenderTexture(256,256,16,RenderTextureFormat.ARGB32);
|
|
// depthImage = new RenderTexture(256,256,16,RenderTextureFormat.ARGB32);
|
|
// depthImage2 = new RenderTexture(256,256,16,RenderTextureFormat.ARGB32);
|
|
outputImage = new Texture2D((256*cameraList.Length), 512 + metadataHeight, TextureFormat.RGB24, false);
|
|
outputImage.filterMode = FilterMode.Point;
|
|
|
|
for (int i = 0; i < cameraList.Length; i++) {
|
|
colorImages[i] = new RenderTexture(256,256,16,RenderTextureFormat.ARGB32);
|
|
cameraList[i].targetTexture = colorImages[i];
|
|
|
|
Camera depthCamera = new GameObject().AddComponent<Camera>();
|
|
depthCamera.CopyFrom(cameraList[i]);
|
|
depthCamera.clearFlags = CameraClearFlags.SolidColor;
|
|
depthCamera.backgroundColor = Color.black;
|
|
depthCamera.SetReplacementShader(Shader.Find("Custom/DepthOnly"), "");
|
|
|
|
depthCameraList[i] = depthCamera;
|
|
depthImages[i] = new RenderTexture(256,256,16,RenderTextureFormat.ARGB32);
|
|
depthCameraList[i].targetTexture = depthImages[i];
|
|
}
|
|
|
|
|
|
debugMaterial.mainTexture = outputImage;
|
|
|
|
// Initialize FFmpeg process
|
|
StartFFmpeg();
|
|
|
|
|
|
}
|
|
|
|
void Update()
|
|
{
|
|
for(int i = 0; i < cameraList.Length; i++){
|
|
depthCameraList[i].worldToCameraMatrix = cameraList[i].worldToCameraMatrix;
|
|
// depthCameraList[i].projectionMatrix = cameraList[i].cameraToWorldMatrix;
|
|
}
|
|
}
|
|
|
|
// Update is called once per frame
|
|
void LateUpdate()
|
|
{
|
|
float expectedFrames = Time.timeSinceLevelLoad * targetFrameRate;
|
|
if (frameCount < expectedFrames) {
|
|
for(int i = 0; i < cameraList.Length; i++){
|
|
RenderTexture.active = colorImages[i];
|
|
outputImage.ReadPixels(new Rect(0, 0, 256, 256), (256*i), 0);
|
|
outputImage.Apply();
|
|
|
|
RenderTexture.active = depthImages[i];
|
|
outputImage.ReadPixels(new Rect(0, 0, 256,256), (256*i), 256);
|
|
outputImage.Apply();
|
|
|
|
//for the very first camera, encode the length of camera pairs before anything else
|
|
if (i==0) {
|
|
encodeData(cameraList.Length, 0, 1);
|
|
}
|
|
|
|
//encode into pixels this camera's coordinates
|
|
// var tr = cameraList[i].transform.localToWorldMatrix;
|
|
int rowOffset = 2;
|
|
// UnityEngine.Debug.Log("Encoding Camera "+i+":\n" +cameraList[i].cameraToWorldMatrix.ToString());
|
|
|
|
rowOffset = encodeMatrix(cameraList[i].cameraToWorldMatrix, i, rowOffset);
|
|
rowOffset = encodeMatrix(cameraList[i].projectionMatrix, i, rowOffset);
|
|
|
|
// encodeData(rowOffset, i, rowOffset);
|
|
|
|
//encode into pixels transform matrix for this camera
|
|
outputImage.Apply();
|
|
|
|
}
|
|
RenderTexture.active = null;
|
|
|
|
// Get the raw pixel data and write it to FFmpeg's input stream
|
|
byte[] frameBytes = outputImage.GetRawTextureData();
|
|
ffmpegProcess.StandardInput.BaseStream.Write(frameBytes, 0, frameBytes.Length);
|
|
ffmpegProcess.StandardInput.BaseStream.Flush();
|
|
|
|
frameCount += 1;
|
|
}
|
|
|
|
}
|
|
|
|
// this is encoding COLUMN MAJOR; the inner loop (rowNr) goes along a column. E.g. First row's item on the column, then second row's item, etc.
|
|
private int encodeMatrix(Matrix4x4 mat, int colOffset, int rowOffset) {
|
|
// UnityEngine.Debug.Log("m23: "+mat.m23+"; mat[2,3]: "+mat[2,3]);
|
|
|
|
for (int colNr = 0; colNr < 4; colNr++) {
|
|
for (int rowNr = 0; rowNr < 4; rowNr++) {
|
|
encodeData(mat[rowNr,colNr], colOffset, rowOffset); // if I want to switch to ROW MAJOR encoding, switch the two loop lines above (ALT+Up/Down)
|
|
// UnityEngine.Debug.Log("RO " + rowOffset + ": encode "+mat[i,j]);
|
|
rowOffset++;
|
|
}
|
|
}
|
|
return rowOffset;
|
|
}
|
|
|
|
private void encodeData(float data, int colNumber, int rowNumber) {
|
|
//encode into pixels this camera's coordinates
|
|
var encodedTransform = new Color[32];
|
|
uint floatbits = FloatToIntBits(data);
|
|
|
|
for (int j = 0; j < 32; j++) {
|
|
//this logic does a bitwise check on the length value at bit j
|
|
// use little-endian (ends with least significant bit on the right)
|
|
if (((floatbits >> ((31)-j)) & 1)==1) {
|
|
encodedTransform[j] = Color.white;
|
|
}
|
|
else {
|
|
encodedTransform[j] = Color.black;
|
|
}
|
|
}
|
|
|
|
outputImage.SetPixels((256*colNumber), outputImage.height-rowNumber,32,1, encodedTransform);
|
|
}
|
|
|
|
// inverse in javascript of this:
|
|
// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/DataView/setUint32
|
|
// // Create an ArrayBuffer with 32bits (4 bytes)
|
|
// const buffer = new ArrayBuffer(4);
|
|
// const view = new DataView(buffer);
|
|
// view.setUint32(0, 1057279852); // Max unsigned 32-bit integer
|
|
// console.log(view.getFloat32(0));
|
|
|
|
// Based on this: https://stackoverflow.com/a/16822144
|
|
[StructLayout(LayoutKind.Explicit)]
|
|
private struct FloatAndUIntUnion {
|
|
// The memort layout of this struct looks like this
|
|
// but both pointers refer to the same memory address.
|
|
// UInt32Bits: IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII
|
|
// FloatValue: SEEEEEEEEFFFFFFFFFFFFFFFFFFFFFFF
|
|
[FieldOffset(0)] public uint UInt32Bits;
|
|
[FieldOffset(0)] public float FloatValue;
|
|
}
|
|
public static uint FloatToIntBits(float value) {
|
|
FloatAndUIntUnion f2i = default(FloatAndUIntUnion);
|
|
f2i.FloatValue = value; // write as float
|
|
return f2i.UInt32Bits; // read back as int
|
|
}
|
|
|
|
void OnDestroy()
|
|
{
|
|
}
|
|
|
|
private void OnApplicationQuit()
|
|
{
|
|
// Stop FFmpeg process when the application quits
|
|
if (ffmpegProcess != null && !ffmpegProcess.HasExited)
|
|
{
|
|
ffmpegProcess.StandardInput.Close();
|
|
// ffmpegProcess.Kill();
|
|
ffmpegProcess.WaitForExit();
|
|
ffmpegProcess = null;
|
|
UnityEngine.Debug.Log("FFmpeg process stopped (hopefully) ");
|
|
|
|
}
|
|
}
|
|
|
|
private void StartFFmpeg()
|
|
{
|
|
// Setup FFmpeg process with arguments for RTMP streaming
|
|
string ffmpegArgs = $"-y -f rawvideo -vcodec rawvideo -pix_fmt rgb24 -s {outputImage.width}x{outputImage.height} " +
|
|
// string ffmpegArgs = $"-y -f rawvideo -vcodec rawvideo -pix_fmt rgbaf32le -s {outputImage.width}x{outputImage.height} " +
|
|
$"-r {targetFrameRate} -i pipe:0 -vf vflip -c:v libx264 -preset ultrafast -tune zerolatency -pix_fmt yuv420p -f flv {ingestRTMP + "/" + streamKey}";
|
|
|
|
ffmpegProcess = new Process();
|
|
ffmpegProcess.StartInfo.FileName = "Assets/ffmpeg.exe";
|
|
ffmpegProcess.StartInfo.Arguments = ffmpegArgs;
|
|
ffmpegProcess.StartInfo.UseShellExecute = false;
|
|
ffmpegProcess.StartInfo.RedirectStandardInput = true;
|
|
ffmpegProcess.StartInfo.CreateNoWindow = true;
|
|
ffmpegProcess.Start();
|
|
|
|
// UnityEngine.Debug.Log("FFmpeg process started with arguments: " + ffmpegArgs);
|
|
}
|
|
}
|