
本文介绍了如何使用NatCorder创建视频文件,这是一个Unity资产。
NatCorder
NatCorder是一种资产,用于在Unity中创建mp4等视频文件,如果你未安装,请先可从站内下载。
它可以轻松地从游戏摄像机、渲染纹理和像素数据创建视频文件,录音也是可能的。
除了PC,它还支持iOS和Android等移动设备,除了mp4,还可以导出为动画GIF或JPEG序列。
记录游戏相机
首先,作为一种常见的用法,尝试按原样记录游戏相机。
记录按以下步骤进行。
生成MP4Recorder(用于mp4输出),创建CameraInput,开始录制使用MP4Recorder.FinishWriting()完成录制并保存,执行它的代码如下:
using System;
using System.Threading.Tasks;
using UnityEngine;
using NatSuite.Recorders;
using NatSuite.Recorders.Inputs;
using NatSuite.Recorders.Clocks;
public class GameCameraRecorder : IDisposable
{
private readonly Camera _camera;
private readonly IMediaRecorder _recorder;
private CameraInput _cameraInput;
public bool IsRunnning { get; private set; }
public bool IsDisposed { get; private set; }
public GameCameraRecorder(Camera camera, int width, int height)
{
_camera = camera;
// Recorder is created
var frameRate = 30; // Since the frame rate is video, it is fixed at 30 this time.
_recorder = new MP4Recorder(width, height, frameRate);
}
public void StartRecording()
{
if (IsRunnning)
{
throw new InvalidOperationException();
}
if (IsDisposed)
{
throw new ObjectDisposedException(GetType().Name);
}
// Recording is started by creating a CameraInput
// This sends every frame information to Recorder
_cameraInput = new CameraInput(_recorder, new RealtimeClock(), _camera);
IsRunnning = true;
}
public async Task<string> EndRecordingAsync()
{
if (!IsRunnning)
{
throw new InvalidOperationException();
}
if (IsDisposed)
{
throw new ObjectDisposedException(GetType().Name);
}
// Dispose CameraInput before finishing recording to stop sending information to Recorder
_cameraInput.Dispose();
_cameraInput = null;
// Save video and get destination path
// The path cannot be changed, so if you want to move it to a specific location, move it with File.IO
return await _recorder.FinishWriting();
}
public void Dispose()
{
if (IsDisposed)
{
throw new ObjectDisposedException(GetType().Name);
}
if (_cameraInput != null)
{
_cameraInput.Dispose();
}
IsDisposed = true;
}
}
像这样使用这个类:
using UnityEngine;
public class Example : MonoBehaviour
{
[SerializeField] private Camera _camera;
private GameCameraRecorder _recorder;
public void Start()
{
_recorder = new GameCameraRecorder(_camera, Screen.width, Screen.height);
}
private void OnGUI()
{
var width = Screen.width;
var height = width * 0.1f;
var buttonRect = new Rect(0, 0, width, height);
if (GUI.Button(buttonRect, "Start") && !_recorder.IsRunnning && !_recorder.IsDisposed)
{
_recorder.StartRecording();
Debug.Log("Start");
}
buttonRect.y += height + 8;
if (GUI.Button(buttonRect, "End") && _recorder.IsRunnning && !_recorder.IsDisposed)
{
var _ = _recorder
.EndRecordingAsync()
.ContinueWith(x => Debug.Log($"End: {x.Result}"));
}
}
private void OnDestroy()
{
_recorder.Dispose();
}
}
按开始按钮开始录制,按结束按钮结束,将mp4输出到日志输出路径就成功了。
屏幕空间设置为Overlay无法录制UI
请注意,Camera Input无法记录Screen Space – Overlay的Canvas,这是由于Unity的限制。
叠加模式不通过普通渲染管道,因此不会注入普通相机渲染中。可以通过使用屏幕空间 – 相机UI并将其设置为从特定相机渲染(设置为正交模式)来解决。
如果想录制UI,需要改变Canvas的RenderMode 。
有声录音
会尝试用声音录音,创建一个AudioInput以包含音频。
using System;
using System.Threading.Tasks;
using UnityEngine;
using NatSuite.Recorders;
using NatSuite.Recorders.Inputs;
using NatSuite.Recorders.Clocks;
public class GameCameraRecorder : IDisposable
{
private readonly Camera _camera;
private readonly IMediaRecorder _recorder;
private readonly AudioListener _audioListener;
private CameraInput _cameraInput;
private AudioInput _audioInput;
public bool IsRunnning { get; private set; }
public bool IsDisposed { get; private set; }
public GameCameraRecorder(Camera camera, int width, int height)
{
_camera = camera;
var frameRate = 30;
// Must specify SampleRate and Channel Count
var sampleRate = AudioSettings.outputSampleRate;
var channelCount = (int)AudioSettings.speakerMode;
_recorder = new MP4Recorder(width, height, frameRate, sampleRate, channelCount);
// Record Camera's AudioListener
_audioListener = _camera.GetComponent<AudioListener>();
}
public void StartRecording()
{
if (IsRunnning)
{
throw new InvalidOperationException();
}
if (IsDisposed)
{
throw new ObjectDisposedException(GetType().Name);
}
var clock = new RealtimeClock();
_cameraInput = new CameraInput(_recorder, clock, _camera);
// Create an AudioInput to include audio in the video
// Be sure to use the same Clock as used for CameraInput (so that the sound does not lag)
_audioInput = new AudioInput(_recorder, clock, _audioListener);
IsRunnning = true;
}
public async Task<string> EndRecordingAsync()
{
if (!IsRunnning)
{
throw new InvalidOperationException();
}
if (IsDisposed)
{
throw new ObjectDisposedException(GetType().Name);
}
_cameraInput.Dispose();
_cameraInput = null;
_audioInput.Dispose();
_audioInput = null;
return await _recorder.FinishWriting();
}
public void Dispose()
{
if (IsDisposed)
{
throw new ObjectDisposedException(GetType().Name);
}
if (_cameraInput != null)
{
_cameraInput.Dispose();
}
if (_audioInput != null)
{
_audioInput.Dispose();
}
IsDisposed = true;
}
}
用法与上一节相同。
在iOS或Android设备上保存到相机
接下来,将通过这种方式生成的视频文件保存到智能手机的相机胶卷中,要保存到相机胶卷,请使用NatShare的免费资产,从github下载。
NatShare是一种在SNS上共享媒体的资产,但这次将只使用保存到相机胶卷的功能,要使用它,只需在导入上述资产后添加以下注释即可。
using System;
using System.Threading.Tasks;
using UnityEngine;
using NatSuite.Recorders;
using NatSuite.Recorders.Inputs;
using NatSuite.Recorders.Clocks;
using NatSuite.Sharing;
public class GameCameraRecorder : IDisposable
{
private readonly Camera _camera;
private readonly IMediaRecorder _recorder;
private readonly AudioListener _audioListener;
private CameraInput _cameraInput;
private AudioInput _audioInput;
public bool IsRunnning { get; private set; }
public bool IsDisposed { get; private set; }
public GameCameraRecorder(Camera camera, int width, int height)
{
_camera = camera;
var frameRate = 30;
var sampleRate = AudioSettings.outputSampleRate;
var channelCount = (int)AudioSettings.speakerMode;
_recorder = new MP4Recorder(width, height, frameRate, sampleRate, channelCount);
_audioListener = _camera.GetComponent<AudioListener>();
}
public void StartRecording()
{
if (IsRunnning)
{
throw new InvalidOperationException();
}
if (IsDisposed)
{
throw new ObjectDisposedException(GetType().Name);
}
var clock = new RealtimeClock();
_cameraInput = new CameraInput(_recorder, clock, _camera);
_audioInput = new AudioInput(_recorder, clock, _audioListener);
IsRunnning = true;
}
public async Task<string> EndRecordingAsync()
{
if (!IsRunnning)
{
throw new InvalidOperationException();
}
if (IsDisposed)
{
throw new ObjectDisposedException(GetType().Name);
}
_cameraInput.Dispose();
_cameraInput = null;
_audioInput.Dispose();
_audioInput = null;
var path = await _recorder.FinishWriting();
// Save to camera roll
var payload = new SavePayload();
payload.AddMedia(path);
await payload.Commit();
return path;
}
public void Dispose()
{
if (IsDisposed)
{
throw new ObjectDisposedException(GetType().Name);
}
if (_cameraInput != null)
{
_cameraInput.Dispose();
}
if (_audioInput != null)
{
_audioInput.Dispose();
}
IsDisposed = true;
}
}
安卓设置
Android(安卓)需要从Project Settings将Minimum API Level设置为Android Lollipop(API Level 22)或更高。

iOS设置
对于iOS,需要将以下两项添加到info.plist中。
- NSPhotoLibraryUsageDescription
- NSPhotoLibraryAddUsageDescription
要使用Unity脚本对其进行设置,请执行以下操作:
using System.IO;
#if UNITY_IOS
using UnityEditor;
using UnityEditor.Build;
using UnityEditor.Build.Reporting;
using UnityEditor.iOS.Xcode;
public class iOSBuildPostProcess : IPostprocessBuildWithReport
{
public int callbackOrder => 0;
public void OnPostprocessBuild(BuildReport report)
{
if (report.summary.platform != BuildTarget.iOS)
{
return;
}
var path = Path.Combine(report.summary.outputPath, "Info.plist");
var plist = new PlistDocument();
plist.ReadFromFile(path);
plist.root.SetString("NSPhotoLibraryUsageDescription", "If you want to display the explanation in the dialog asking for permission, enter it here");
plist.root.SetString("NSPhotoLibraryAddUsageDescription", "If you want to display the explanation in the dialog asking for permission, enter it here");
plist.WriteToFile(path);
}
}
#endif
离线录音
作为一个稍微应用的用法,可以执行离线渲染以从每一帧的渲染结果(纹理)数组创建电影,要执行离线渲染,请在不使用CameraInput的情况下按如下方式调用Recorder.CommitFrame()。
using System.Linq;
using NatSuite.Recorders;
using NatSuite.Recorders.Clocks;
using UnityEngine;
public class Example : MonoBehaviour
{
public async void Start()
{
const int width = 128;
const int height = 128;
const int frameRate = 30;
// Generate FixedIntervalClock with FrameRate
var clock = new FixedIntervalClock(frameRate);
// Generate Recorder
var recorder = new MP4Recorder(width, height, frameRate);
// Prepare an array of textures for each frame
var colors = Enumerable.Range(0, width * height)
.Select(_ => (Color32) Color.magenta)
.ToArray();
var frames = Enumerable.Range(0, 100)
.Select(x =>
{
// *Pay attention to format
var texture = new Texture2D(width, height, TextureFormat.RGBA32, false, false);
texture.SetPixels32(colors);
return texture;
})
.ToArray();
// commit every frame
foreach (var frame in frames)
{
// Every time FixedIntervalClock.timestamp is called, the elapsed time is added by the number of frames
recorder.CommitFrame(frame.GetPixels32(), clock.timestamp);
}
var path = await recorder.FinishWriting();
Debug.Log(path);
}
}
最好在另一个线程中进行提交,这种情况下,GetPixel32()由于是Unity方法,只能从主线程调用,所以改成预先将帧信息保留为Color32数组。(根据NatCorder的官方文档,目前GetPixel32()是在单独的线程中调用,但是这样会报错)
var frames = Enumerable.Range(0, 100)
.Select(x =>
{
var texture = new Texture2D(width, height, TextureFormat.RGBA32, false, false);
texture.SetPixels32(colors);
// Call GetPixels32() at this point
return texture.GetPixels32();
})
.ToArray();
// threading
var path = await Task.Run(() =>
{
foreach (var frame in frames)
{
recorder.CommitFrame(frame, clock.timestamp);
}
return recorder.FinishWriting();
});
Debug.Log(path);
包括声音在内的离线录音稍微复杂一些。
关于性能
调整分辨率
一般来说,视频所需的分辨率低于相机的分辨率。以相机的分辨率录制会增加录制的处理负荷,因此以视频所需的分辨率录制似乎更好。
视频分辨率可以在Recorder constructor。
_recorder = new MP4Recorder(width, height, frameRate, sampleRate, channelCount);
降低帧率
视频所需的帧率为30,因此即使游戏以60FPS运行,也应该为视频承诺的帧数调整为30帧。
在CameraInput的frameSkip字段中设置一个值将按该帧数跳过提交间隔。
例如,如果想将帧提交数减半,则将其设置为 1,如果想要1/3,则将其设置为2。
_cameraInput = new CameraInput(_recorder, clock, _camera)
{
frameSkip = 1
};
多线程
提交帧的过程是安全的线程,此外,由于此处理非常昂贵,因此应尽可能将帧提交处理移至单独的线程。
…
以上是3D天堂网关于使用NatCorder创建视频文件的全部内容,如果你有任何反馈,请随时在本页面下方留言。