在论文中需要画出插图,以说明全景视频在观看时的示意图,类似于如下效果:
创建两个Sphere,设置其中一个球显示效果为“透视”,另一个为“不透视”。“透视”球显示全部的全景图片,“不透视球”显示相机视锥体区域内的图像。
①在Unity场景中,添加两个大小为10.51的Sphere,一个名为为foveated,一个为margin,位置为(0,0,0) ②创建两个Unlit Shader,一个名为foveated,编写如下:
Shader "Unlit/yuvvideo" { Properties { _RGBTex("Texture", 2D) = "white" {} } SubShader { Tags { "RenderType" = "Opaque" } LOD 100 cull off Pass { //不写入深度缓冲,为了不遮挡住其他物体 //ZWrite Off //选取Alpha混合方式 //Blend SrcAlpha SrcAlpha CGPROGRAM #pragma vertex vert #pragma fragment frag // make fog work #pragma multi_compile_fog #include "UnityCG.cginc" struct appdata { float4 vertex : POSITION; float2 uv : TEXCOORD0; }; struct v2f { float2 uv : TEXCOORD0; UNITY_FOG_COORDS(1) float4 vertex : SV_POSITION; }; sampler2D _RGBTex; float4 _RGBTex_ST; v2f vert(appdata v) { v2f o; o.vertex = UnityObjectToClipPos(v.vertex); o.uv = TRANSFORM_TEX(v.uv, _RGBTex); UNITY_TRANSFER_FOG(o,o.vertex); return o; } fixed4 frag(v2f i) : SV_Target { // sample the texture fixed4 col = tex2D(_RGBTex, i.uv); // apply fog UNITY_APPLY_FOG(i.fogCoord, col); return col; } ENDCG } } }一个名为margin,编写如下:
Shader "Unlit/margin" { Properties { _MainTex ("Texture", 2D) = "white" {} } SubShader { Tags { "RenderType"="transparent" } LOD 100 Pass { //不写入深度缓冲,为了不遮挡住其他物体 ZWrite Off //选取Alpha混合方式 Blend SrcAlpha SrcAlpha CGPROGRAM #pragma vertex vert #pragma fragment frag // make fog work #pragma multi_compile_fog #include "UnityCG.cginc" struct appdata { float4 vertex : POSITION; float2 uv : TEXCOORD0; }; struct v2f { float2 uv : TEXCOORD0; UNITY_FOG_COORDS(1) float4 vertex : SV_POSITION; }; sampler2D _MainTex; float4 _MainTex_ST; v2f vert (appdata v) { v2f o; o.vertex = UnityObjectToClipPos(v.vertex); o.uv = TRANSFORM_TEX(v.uv, _MainTex); UNITY_TRANSFER_FOG(o,o.vertex); return o; } fixed4 frag (v2f i) : SV_Target { // sample the texture fixed4 col = tex2D(_MainTex, i.uv); // apply fog UNITY_APPLY_FOG(i.fogCoord, col); return col; } ENDCG } } }③创建两个材质球,一个名为foveated,挂上着色器脚本foveated;另一个名为margin,挂上着色器脚本margin。将两个材质球分别挂在foveated和margin下。Render Queue均设置为Transparent:
④编写脚本GimosCameraRect.cs如下:
using System.Collections; using System.Collections.Generic; using UnityEngine; namespace Draw { public class GimosCameraRect : MonoBehaviour { public Vector3[] farVecArr; public Vector3[] nearVecArr; private Camera m_MainCamera; private Camera MainCamera { get { if (m_MainCamera == null) { m_MainCamera = Camera.main; } return m_MainCamera; } } Vector3[] GetCameraVector(float dis) { Vector3[] vectorArr = new Vector3[4]; float aspect = MainCamera.aspect; float height = Mathf.Tan(MainCamera.fieldOfView / 2 * Mathf.Deg2Rad) * dis; float width = height * aspect; Vector3 forward = transform.position + transform.forward * dis; Vector3 right = transform.right * width; Vector3 top = transform.up * height; vectorArr[0] = forward + top - right;//top_left vectorArr[1] = forward + top + right;//top_right vectorArr[2] = forward - top + right;//bottom_right vectorArr[3] = forward - top - right;//bottom_left return vectorArr; } int segmentNum = 100; float r = 5.255f; public void Update() { MainCamera.farClipPlane = 3.214f; MainCamera.nearClipPlane = 0.01f; farVecArr = GetCameraVector(MainCamera.farClipPlane); nearVecArr = GetCameraVector(MainCamera.nearClipPlane); Debug.DrawLine(nearVecArr[0], nearVecArr[1], Color.yellow); Debug.DrawLine(nearVecArr[1], nearVecArr[2], Color.yellow); Debug.DrawLine(nearVecArr[2], nearVecArr[3], Color.yellow); Debug.DrawLine(nearVecArr[3], nearVecArr[0], Color.yellow); Debug.DrawLine(nearVecArr[0], farVecArr[0], Color.black); Debug.DrawLine(nearVecArr[1], farVecArr[1], Color.black); Debug.DrawLine(nearVecArr[2], farVecArr[2], Color.black); Debug.DrawLine(nearVecArr[3], farVecArr[3], Color.black); Debug.Log("farVecArr[0]:" + farVecArr[0]); Debug.Log("farVecArr[1]:" + farVecArr[1]); Debug.Log("farVecArr[2]:" + farVecArr[2]); Debug.Log("farVecArr[3]:" + farVecArr[3]); float length; float segmentLength; Vector3 origin = new Vector3(0, 0, 0); //01 float y = farVecArr[0].y; Vector3[] path01 = new Vector3[segmentNum + 1]; path01[0] = farVecArr[0]; length = farVecArr[1].x - farVecArr[0].x; segmentLength = length / segmentNum; for (int i = 1; i <= segmentNum; i++) { float x = i * segmentLength + farVecArr[0].x; float z = Mathf.Sqrt(r * r - x * x - y * y); path01[i] = new Vector3(x, y, z); Debug.DrawLine(path01[i - 1], path01[i], Color.yellow); Debug.DrawLine(origin, path01[i], Color.yellow); } Debug.DrawLine(path01[segmentNum], farVecArr[1], Color.yellow); //23 y = farVecArr[2].y; Vector3[] path23 = new Vector3[segmentNum + 1]; path23[0] = farVecArr[0]; length = farVecArr[3].x - farVecArr[2].x; segmentLength = length / segmentNum; for (int i = 1; i <= segmentNum; i++) { float x = i * segmentLength + farVecArr[2].x; float z = Mathf.Sqrt(r * r - x * x - y * y); path23[i] = new Vector3(x, y, z); Debug.DrawLine(path23[i - 1], path23[i], Color.yellow); Debug.DrawLine(origin, path23[i], Color.yellow); } Debug.DrawLine(path23[segmentNum], farVecArr[3], Color.yellow); //12 float x_12 = farVecArr[1].x; Vector3[] path12 = new Vector3[segmentNum + 1]; path12[0] = farVecArr[2]; length = farVecArr[1].y - farVecArr[2].y; segmentLength = length / segmentNum; for (int i = 1; i <= segmentNum; i++) { float y_12 = i * segmentLength + farVecArr[2].y; float z = Mathf.Sqrt(r * r - x_12 * x_12 - y_12 * y_12); path12[i] = new Vector3(x_12, y_12, z); Debug.DrawLine(path12[i - 1], path12[i], Color.yellow); Debug.DrawLine(origin, path12[i], Color.yellow); } Debug.DrawLine(path12[segmentNum], farVecArr[1], Color.yellow); //30 float x_30 = farVecArr[3].x; Vector3[] path30 = new Vector3[segmentNum + 1]; path30[0] = farVecArr[3]; length = farVecArr[0].y - farVecArr[3].y; segmentLength = length / segmentNum; for (int i = 1; i <= segmentNum; i++) { float y_30 = i * segmentLength + farVecArr[3].y; float z = Mathf.Sqrt(r * r - x_30 * x_30 - y_30 * y_30); path30[i] = new Vector3(x_30, y_30, z); Debug.DrawLine(path30[i - 1], path30[i], Color.yellow); Debug.DrawLine(origin, path30[i], Color.yellow); } Debug.DrawLine(path30[segmentNum], farVecArr[0], Color.yellow); } } }将该脚本挂在Camera下。
⑤编写DrawYUV.cs脚本如下:
using System.Collections; using System.Collections.Generic; using System.IO; using UnityEngine; using UnityEngine.UI; using System; namespace Draw { public class DrawYUV : MonoBehaviour { private GimosCameraRect DataThread = null; float r = 5.255f;//场景中球半径 private Vector3[] vectorArry; public int frameCount; private int frameNow = 0; byte[] file;//最大值255 public Renderer target; //public Renderer target; private int w = 1906; private int h = 964; private float M_PI = 3.14159f; private float M_PI_2 = 1.57079f; //原始纹理t public Texture2D t = null;//全部虚化渲染 public Texture2D ts = null;//实体显示部分 private float x_pos, y_pos;//注视深度的坐标 private float Nx, Ny, Nz; private byte Y00, Y01, Y10, Y11;//Y分量 private float R00, G00, B00; private float R01, G01, B01; private float R10, G10, B10; private float R11, G11, B11; int i; int mY;//颠倒y值 // Plana 連續儲存,Packed 交錯儲存 // 使用 Plana YUV420 格式 void Start() { //返回type类型第一个激活的加载物体 DataThread = FindObjectOfType<GimosCameraRect>(); if (DataThread == null) return; //一次读取所有比特 file = File.ReadAllBytes("D:/unity3D_Demo/circle/Assets/大象.yuv"); frameCount = GetFrameCount(file, w, h); // 影像寬高 //输出总帧数 print("Frame Count : " + frameCount); } void Update() { if (frameNow > frameCount - 1) return; vectorArry = DataThread.farVecArr; GetTexture(file, w, h, vectorArry) ; // 影像寬高 frameNow++; ts.Apply(); target.sharedMaterial.SetTexture("_RGBTex", ts); t.Apply(); target.sharedMaterial.SetTexture("_MainTex", t); print("Frame : " + (frameNow) + " (" + (int)(frameNow / (float)(frameCount) * 100) + "%)"); } int GetFrameCount(byte[] file, int width, int height) { return file.Length / ((width * height) * 3 / 2); } Color32 temp; void GetTexture(byte[] file, int width, int height, Vector3[] vectorArry) { if (t != null) Destroy(t); if (ts != null) Destroy(ts); t = new Texture2D(width, height, TextureFormat.RGBA32, false); ts = new Texture2D(width, height, TextureFormat.RGBA32, false); int Ysize = t.width * t.height; int UorVsize = t.width * t.height / 4; byte U = 0; byte V = 0; int k = 0; //offset偏移量 int offset = frameNow * ((width * height) * 3 / 2); for (int y = 0; y < t.height; y += 2) { for (int x = 0; x < t.width; x += 2) { U = file[offset + Ysize - 1 + k++]; //注意要减一 V = file[offset + Ysize - 1 + k + UorVsize]; //注意要减一 i = y * t.width + x; mY = t.height - 1 - y; Y00 = file[offset + i]; Y01 = file[offset + i + t.width]; Y10 = file[offset + i + 1]; Y11 = file[offset + i + t.width + 1]; R00 = (Y00 + 1.4075f * (V - 128)) / 255f; G00 = (Y00 - 0.3455f * (U - 128) - 0.7169f * (V - 128)) / 255f; B00 = (Y00 + 1.779f * (U - 128)) / 255f; R01 = (Y01 + 1.4075f * (V - 128)) / 255f; G01 = (Y01 - 0.3455f * (U - 128) - 0.7169f * (V - 128)) / 255f; B01 = (Y01 + 1.779f * (U - 128)) / 255f; R10 = (Y10 + 1.4075f * (V - 128)) / 255f; G10 = (Y10 - 0.3455f * (U - 128) - 0.7169f * (V - 128)) / 255f; B10 = (Y10 + 1.779f * (U - 128)) / 255f; R11 = (Y11 + 1.4075f * (V - 128)) / 255f; G11 = (Y11 - 0.3455f * (U - 128) - 0.7169f * (V - 128)) / 255f; B11 = (Y11 + 1.779f * (U - 128)) / 255f; t.SetPixel(x, mY, new Color(R00, G00, B00, 0.4f)); t.SetPixel(x, mY - 1, new Color(R01, G01, B01, 0.4f)); t.SetPixel(x + 1, mY, new Color(R10, G10, B10, 0.4f)); t.SetPixel(x + 1, mY - 1, new Color(R11, G11, B11, 0.4f)); } } for (int y = 0; y < t.height; y++) { for (int x = 0; x < t.width; x++) { mY = t.height - 1 - y; x_pos = x / (float)w * (2 * M_PI) - M_PI; y_pos = mY / (float)h * (M_PI) - M_PI_2;//注意原点在左下角 Nx = (float)(Math.Cos(y_pos) * Math.Cos(x_pos) * r);//必须乘以l Nz = -1.0f * (float)(Math.Cos(y_pos) * Math.Sin(x_pos) * r); Ny = (float)Math.Sin(y_pos) * r; if (Nx >= vectorArry[0].x && Nx <= vectorArry[1].x && Ny <= vectorArry[0].y && Ny >= vectorArry[2].y && Nz <= -vectorArry[0].z) { temp = t.GetPixel(x, mY); temp.a = 1; ts.SetPixel(x, mY, temp); } } } } } }将脚本挂在Sphere-foveated下,注意自己修改图像路径及长宽,设置Target目标为foveated: 将脚本挂在Sphere-margin下,设置Target目标为margin:
在场景中运行,实现效果如下:
https://download.csdn.net/download/qq_41452267/12811163