151 lines
5.7 KiB
C#
151 lines
5.7 KiB
C#
using System.Collections;
|
|
using System.Collections.Generic;
|
|
using System.Diagnostics;
|
|
using UnityEngine;
|
|
|
|
using System.IO;
|
|
|
|
|
|
public class RT3script : MonoBehaviour
|
|
{
|
|
|
|
// public string outputRTMP = "rtmp://localhost/live/HoloPipeline";
|
|
public string ingestRTMP = "rtmp://ingest.vrcdn.live/live";
|
|
public string streamKey = "";
|
|
public string mp4name = "my_movie_depth_ffmpeg.mp4";
|
|
|
|
public Camera[] cameraList;
|
|
private Camera[] depthCameraList;
|
|
|
|
private RenderTexture[] colorImages, depthImages;
|
|
public Material debugMaterial;
|
|
|
|
public Texture2D outputImage;
|
|
|
|
public int targetFrameRate = 60;
|
|
int frameCount = 0;
|
|
|
|
private Process ffmpegProcess;
|
|
private string outputMP4;
|
|
private int metadataHeight = 256;
|
|
|
|
// Start is called before the first frame update
|
|
void Start()
|
|
{
|
|
colorImages = new RenderTexture[cameraList.Length];
|
|
depthImages = new RenderTexture[cameraList.Length];
|
|
depthCameraList = new Camera[cameraList.Length];
|
|
|
|
|
|
// colorImage = new RenderTexture(256,256,16,RenderTextureFormat.ARGB32);
|
|
// colorImage2 = new RenderTexture(256,256,16,RenderTextureFormat.ARGB32);
|
|
// depthImage = new RenderTexture(256,256,16,RenderTextureFormat.ARGB32);
|
|
// depthImage2 = new RenderTexture(256,256,16,RenderTextureFormat.ARGB32);
|
|
outputImage = new Texture2D((256*cameraList.Length), 512 + metadataHeight, TextureFormat.RGB24, false);
|
|
outputImage.filterMode = FilterMode.Point;
|
|
|
|
for (int i = 0; i < cameraList.Length; i++) {
|
|
colorImages[i] = new RenderTexture(256,256,16,RenderTextureFormat.ARGB32);
|
|
cameraList[i].targetTexture = colorImages[i];
|
|
|
|
Camera depthCamera = new GameObject().AddComponent<Camera>();
|
|
depthCamera.CopyFrom(cameraList[i]);
|
|
depthCamera.clearFlags = CameraClearFlags.SolidColor;
|
|
depthCamera.backgroundColor = Color.black;
|
|
depthCamera.SetReplacementShader(Shader.Find("Custom/DepthOnly"), "");
|
|
|
|
depthCameraList[i] = depthCamera;
|
|
depthImages[i] = new RenderTexture(256,256,16,RenderTextureFormat.ARGB32);
|
|
depthCameraList[i].targetTexture = depthImages[i];
|
|
}
|
|
|
|
|
|
debugMaterial.mainTexture = outputImage;
|
|
|
|
// Initialize FFmpeg process
|
|
StartFFmpeg();
|
|
|
|
|
|
}
|
|
|
|
// Update is called once per frame
|
|
void LateUpdate()
|
|
{
|
|
float expectedFrames = Time.timeSinceLevelLoad * targetFrameRate;
|
|
if (frameCount < expectedFrames) {
|
|
for(int i = 0; i < cameraList.Length; i++){
|
|
RenderTexture.active = colorImages[i];
|
|
outputImage.ReadPixels(new Rect(0, 0, 256, 256), (256*i), 0);
|
|
outputImage.Apply();
|
|
|
|
RenderTexture.active = depthImages[i];
|
|
outputImage.ReadPixels(new Rect(0, 0, 256,256), (256*i), 256);
|
|
outputImage.Apply();
|
|
|
|
//for the very first camera, encode the length of camera pairs before anything else
|
|
if (i==0) {
|
|
//encode into pixels the number of camera pairs
|
|
var colors = new Color[8];
|
|
for (int j = 0; j < 8; j++) {
|
|
//this logic does a bitwise check on the length value at bit j
|
|
if (((cameraList.Length >> j) & 1)==1) {
|
|
colors[j] = Color.white;
|
|
}
|
|
else {
|
|
colors[j] = Color.black;
|
|
}
|
|
}
|
|
outputImage.SetPixels(0,0,cameraList.Length,1, colors);
|
|
}
|
|
//encode into pixels transform matrix for this camera
|
|
outputImage.Apply();
|
|
|
|
}
|
|
RenderTexture.active = null;
|
|
|
|
// Get the raw pixel data and write it to FFmpeg's input stream
|
|
byte[] frameBytes = outputImage.GetRawTextureData();
|
|
ffmpegProcess.StandardInput.BaseStream.Write(frameBytes, 0, frameBytes.Length);
|
|
ffmpegProcess.StandardInput.BaseStream.Flush();
|
|
|
|
frameCount += 1;
|
|
}
|
|
}
|
|
|
|
void OnDestroy()
|
|
{
|
|
}
|
|
|
|
private void OnApplicationQuit()
|
|
{
|
|
// Stop FFmpeg process when the application quits
|
|
if (ffmpegProcess != null && !ffmpegProcess.HasExited)
|
|
{
|
|
ffmpegProcess.StandardInput.Close();
|
|
// ffmpegProcess.Kill();
|
|
ffmpegProcess.WaitForExit();
|
|
ffmpegProcess = null;
|
|
UnityEngine.Debug.Log("FFmpeg process stopped (hopefully) ");
|
|
|
|
}
|
|
}
|
|
|
|
private void StartFFmpeg()
|
|
{
|
|
// Setup FFmpeg process with arguments for RTMP streaming
|
|
string ffmpegArgs = $"-y -f rawvideo -vcodec rawvideo -pix_fmt rgb24 -s {outputImage.width}x{outputImage.height} " +
|
|
// string ffmpegArgs = $"-y -f rawvideo -vcodec rawvideo -pix_fmt rgbaf32le -s {outputImage.width}x{outputImage.height} " +
|
|
$"-r {targetFrameRate} -i pipe:0 -c:v libx264 -preset ultrafast -tune zerolatency -pix_fmt yuv420p -f flv {ingestRTMP + "/" + streamKey}";
|
|
|
|
ffmpegProcess = new Process();
|
|
ffmpegProcess.StartInfo.FileName = "Assets/ffmpeg.exe";
|
|
ffmpegProcess.StartInfo.Arguments = ffmpegArgs;
|
|
ffmpegProcess.StartInfo.UseShellExecute = false;
|
|
ffmpegProcess.StartInfo.RedirectStandardInput = true;
|
|
ffmpegProcess.StartInfo.CreateNoWindow = true;
|
|
ffmpegProcess.Start();
|
|
|
|
// UnityEngine.Debug.Log("FFmpeg process started with arguments: " + ffmpegArgs);
|
|
}
|
|
}
|