Jay*_*yme 2 c# unity-game-engine unity5
这篇文章中指出,使用新的Unity VideoPlayer和VideoClip API播放视频时,人们可以“根据需要检索每一帧的纹理”
请问将当前帧作为Texture2D的正确方法是什么?
编辑:
回答之后,我这样做了,但是不起作用:
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using UnityEngine.Video;
public class AverageColorFromTexture : MonoBehaviour {
public VideoClip videoToPlay;
public Light lSource;
private Color targetColor;
private VideoPlayer videoPlayer;
private VideoSource videoSource;
private Renderer rend;
private Texture tex;
private AudioSource audioSource;
void Start()
{
Application.runInBackground = true;
StartCoroutine(playVideo());
}
IEnumerator playVideo()
{
rend = GetComponent<Renderer>();
videoPlayer = gameObject.AddComponent<VideoPlayer>();
audioSource = gameObject.AddComponent<AudioSource>();
//Disable Play on Awake for both Video and Audio
videoPlayer.playOnAwake = false;
audioSource.playOnAwake = false;
videoPlayer.source = VideoSource.VideoClip;
videoPlayer.audioOutputMode = VideoAudioOutputMode.AudioSource;
videoPlayer.EnableAudioTrack(0, true);
videoPlayer.SetTargetAudioSource(0, audioSource);
//Set video To Play then prepare Audio to prevent Buffering
videoPlayer.clip = videoToPlay;
videoPlayer.Prepare();
//Wait until video is prepared
while (!videoPlayer.isPrepared)
{
Debug.Log("Preparing Video");
yield return null;
}
Debug.Log("Done Preparing Video");
//Assign the Texture from Video to Material texture
tex = videoPlayer.texture;
rend.material.mainTexture = tex;
//Enable new frame Event
videoPlayer.sendFrameReadyEvents = true;
//Subscribe to the new frame Event
videoPlayer.frameReady += OnNewFrame;
//Play Video
videoPlayer.Play();
//Play Sound
audioSource.Play();
Debug.Log("Playing Video");
while (videoPlayer.isPlaying)
{
Debug.LogWarning("Video Time: " + Mathf.FloorToInt((float)videoPlayer.time));
yield return null;
}
Debug.Log("Done Playing Video");
}
void OnNewFrame(VideoPlayer source, long frameIdx)
{
Texture2D videoFrame = (Texture2D)source.texture;
targetColor = CalculateAverageColorFromTexture(videoFrame);
lSource.color = targetColor ;
}
Color32 CalculateAverageColorFromTexture(Texture2D tex)
{
Color32[] texColors = tex.GetPixels32();
int total = texColors.Length;
float r = 0;
float g = 0;
float b = 0;
for(int i = 0; i < total; i++)
{
r += texColors[i].r;
g += texColors[i].g;
b += texColors[i].b;
}
return new Color32((byte)(r / total) , (byte)(g / total) , (byte)(b / total) , 0);
}
}
Run Code Online (Sandbox Code Playgroud)
您可以通过三个步骤正确地做到这一点:
1。通过设置VideoPlayer.sendFrameReadyEvents
为来启用新帧事件true
。
2。订阅VideoPlayerframeReady
活动
3。VideoPlayerframeReady
当有新帧可用时,将调用分配给事件的功能。只需从中访问视频帧,VideoPlayer
就可以通过将其转换为参数将其传递到参数VideoPlayer.texture
中Texture2D
。
而已。
在代码中:
在video.Play()
添加这些之前:
//Enable new frame Event
videoPlayer.sendFrameReadyEvents = true;
//Subscribe to the new frame Event
videoPlayer.frameReady += OnNewFrame;
Run Code Online (Sandbox Code Playgroud)
这是您的OnNewFrame
功能签名。
void OnNewFrame(VideoPlayer source, long frameIdx)
{
Texture2D videoFrame = (Texture2D)source.texture;
//Do anything with the videoFrame Texture.
}
Run Code Online (Sandbox Code Playgroud)
值得注意的是,启用该事件的成本很高。在执行此操作之前,请确保需要每个框架。
编辑:
无论Texture2D videoFrame = (Texture2D)source.texture;
与和Texture2D videoFrame = source.texture as Texture2D;
失败。
我把Debug.Log(source.texture);
里面OnNewFrame
的功能,并得到:
TempBuffer 294 320x240(UnityEngine.RenderTexture)
因此,看起来该Video.texture
属性正在返回RenderTexture
类型,而不是Texture
应有的类型。
我们必须将转换RenderTexture
为Texture2D
。
void Start()
{
videoFrame = new Texture2D(2, 2);]
...
}
//Initialize in the Start function
Texture2D videoFrame;
void OnNewFrame(VideoPlayer source, long frameIdx)
{
RenderTexture renderTexture = source.texture as RenderTexture;
if (videoFrame.width != renderTexture.width || videoFrame.height != renderTexture.height)
{
videoFrame.Resize(renderTexture.width, renderTexture.height);
}
RenderTexture.active = renderTexture;
videoFrame.ReadPixels(new Rect(0, 0, renderTexture.width, renderTexture.height), 0, 0);
videoFrame.Apply();
RenderTexture.active = null;
targetColor = CalculateAverageColorFromTexture(videoFrame);
lSource.color = targetColor;
}
Run Code Online (Sandbox Code Playgroud)
您问题的完整代码:
public class AverageColorFromTexture : MonoBehaviour
{
public VideoClip videoToPlay;
public Light lSource;
private Color targetColor;
private VideoPlayer videoPlayer;
private VideoSource videoSource;
private Renderer rend;
private Texture tex;
private AudioSource audioSource;
void Start()
{
videoFrame = new Texture2D(2, 2);
Application.runInBackground = true;
StartCoroutine(playVideo());
}
IEnumerator playVideo()
{
rend = GetComponent<Renderer>();
videoPlayer = gameObject.AddComponent<VideoPlayer>();
audioSource = gameObject.AddComponent<AudioSource>();
//Disable Play on Awake for both Video and Audio
videoPlayer.playOnAwake = false;
audioSource.playOnAwake = false;
videoPlayer.source = VideoSource.VideoClip;
videoPlayer.audioOutputMode = VideoAudioOutputMode.AudioSource;
videoPlayer.EnableAudioTrack(0, true);
videoPlayer.SetTargetAudioSource(0, audioSource);
//Set video To Play then prepare Audio to prevent Buffering
videoPlayer.clip = videoToPlay;
videoPlayer.Prepare();
//Wait until video is prepared
while (!videoPlayer.isPrepared)
{
Debug.Log("Preparing Video");
yield return null;
}
Debug.Log("Done Preparing Video");
//Assign the Texture from Video to Material texture
tex = videoPlayer.texture;
rend.material.mainTexture = tex;
//Enable new frame Event
videoPlayer.sendFrameReadyEvents = true;
//Subscribe to the new frame Event
videoPlayer.frameReady += OnNewFrame;
//Play Video
videoPlayer.Play();
//Play Sound
audioSource.Play();
Debug.Log("Playing Video");
while (videoPlayer.isPlaying)
{
Debug.LogWarning("Video Time: " + Mathf.FloorToInt((float)videoPlayer.time));
yield return null;
}
Debug.Log("Done Playing Video");
}
//Initialize in the Start function
Texture2D videoFrame;
void OnNewFrame(VideoPlayer source, long frameIdx)
{
RenderTexture renderTexture = source.texture as RenderTexture;
if (videoFrame.width != renderTexture.width || videoFrame.height != renderTexture.height)
{
videoFrame.Resize(renderTexture.width, renderTexture.height);
}
RenderTexture.active = renderTexture;
videoFrame.ReadPixels(new Rect(0, 0, renderTexture.width, renderTexture.height), 0, 0);
videoFrame.Apply();
RenderTexture.active = null;
targetColor = CalculateAverageColorFromTexture(videoFrame);
lSource.color = targetColor;
}
Color32 CalculateAverageColorFromTexture(Texture2D tex)
{
Color32[] texColors = tex.GetPixels32();
int total = texColors.Length;
float r = 0;
float g = 0;
float b = 0;
for (int i = 0; i < total; i++)
{
r += texColors[i].r;
g += texColors[i].g;
b += texColors[i].b;
}
return new Color32((byte)(r / total), (byte)(g / total), (byte)(b / total), 0);
}
}
Run Code Online (Sandbox Code Playgroud)
归档时间: |
|
查看次数: |
5073 次 |
最近记录: |