最近和深圳老同事聊天,感觉都老惨了,本身都是做游戏出生,结果游戏公司版号、资本问题搞的各种花式降薪裁员,玩的比较好的一个同事找工作千难万难最后面进了一个VR公司,做医疗手术类型VR程序。功能中要做一个透视镜效果,他搞不定,让我帮忙解决,因为刚好用commandbuffer来做就很简单,所有记录一下。
要求功能就是有一个检测仪,扫描人体皮肤表面可以观察到内部骨骼的x光渲染画面,话说有这种仪器吗?
不过这种效果不算复杂,我们以前也都学习过,无非就是几种效果原理的叠加组合而已:
1.网格边缘光
2.采样纹理
3.裁剪纹理
首先我们给一个模型网格做一个透视遮挡边缘光的效果:
Shader "XRay/XRayColorShader"
{
Properties
{
_MainColor("X Ray Color",Color) = (1,1,1,1)
_MainOffset("X Ray Light Offset",Range(-1,1)) = 0
}
SubShader
{
Tags { "RenderType"="Transparent" "Queue"="Transparent" }
LOD 100
Pass
{
Blend SrcAlpha OneMinusSrcAlpha
ZTest Greater ZWrite Off
CGPROGRAM
#pragma vertex vert
#pragma fragment frag
#include "UnityCG.cginc"
struct appdata
{
float4 vertex : POSITION;
float4 normal : NORMAL;
};
struct v2f
{
float4 vertex : SV_POSITION;
float3 worldP2V : TEXCOORD0;
float3 worldNm : TEXCOORD1;
};
float4 _MainColor;
float _MainOffset;
v2f vert (appdata v)
{
v2f o;
o.vertex = UnityObjectToClipPos(v.vertex);
o.worldP2V = normalize(WorldSpaceViewDir(v.vertex));
o.worldNm = normalize(UnityObjectToWorldNormal(v.normal.xyz));
return o;
}
fixed4 frag (v2f i) : SV_Target
{
float wei = 1 - saturate(dot(i.worldP2V,i.worldNm) + _MainOffset);
fixed4 col = fixed4(_MainColor.rgb,wei);
return col;
}
ENDCG
}
}
}
效果如下:
然后我们通过commandbuffer将效果提取到“屏幕”上,如下:
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using UnityEngine.Rendering;
public class CmdBufferXRayCamera : MonoBehaviour
{
public GameObject[] cmdObjects;
public Material xrayMat;
private RenderTexture objectRT = null;
private CommandBuffer cmdBuffer = null;
void Start()
{
}
private void OnEnable()
{
objectRT = RenderTexture.GetTemporary(Screen.width, Screen.height, 0, RenderTextureFormat.ARGB32);
cmdBuffer = new CommandBuffer();
cmdBuffer.SetRenderTarget(objectRT);
cmdBuffer.ClearRenderTarget(true, true, Color.black);
for (int i = 0; i < cmdObjects.Length; i++)
{
GameObject go = cmdObjects[i];
MeshRenderer render = go.GetComponent();
if (render != null)
{
cmdBuffer.DrawRenderer(render, xrayMat);
}
}
Camera.main.AddCommandBuffer(CameraEvent.AfterImageEffects, cmdBuffer);
}
private void OnDisable()
{
RenderTexture.ReleaseTemporary(objectRT);
objectRT = null;
Camera.main.RemoveCommandBuffer(CameraEvent.AfterImageEffects, cmdBuffer);
cmdBuffer.Clear();
cmdBuffer = null;
}
private void OnRenderImage(RenderTexture source, RenderTexture destination)
{
if (cmdBuffer != null)
{
Graphics.ExecuteCommandBuffer(cmdBuffer);
}
Graphics.Blit(objectRT, destination);
}
}
效果如下:
最后做一个屏幕上的透视镜圆环采样叠加功能:
Shader "XRay/LensCircleShader"
{
Properties
{
_SrcTex ("Source Tex", 2D) = "white" {}
_XRayTex("X Ray Tex",2D) = "white" {}
_UV("Lens UV Saturate",vector) = (0,0,0,0)
_LensColor("Lens Outside Color",Color) = (1,1,1,1)
_Radius("Lens Inside Radius",Range(0,1)) = 0.1
_Width("Lens Outside Width",Range(0,1)) = 0.05
_Aspect("Screen Height / Width",float) = 1
}
SubShader
{
Tags { "RenderType"="Opaque" }
LOD 100
Pass
{
CGPROGRAM
#pragma vertex vert
#pragma fragment frag
#include "UnityCG.cginc"
struct appdata
{
float4 vertex : POSITION;
float2 uv : TEXCOORD0;
};
struct v2f
{
float2 uv : TEXCOORD0;
float4 vertex : SV_POSITION;
};
sampler2D _SrcTex;
float4 _SrcTex_ST;
sampler2D _XRayTex;
vector _UV;
float4 _LensColor;
float _Radius;
float _Width;
float _Aspect;
v2f vert (appdata v)
{
v2f o;
o.vertex = UnityObjectToClipPos(v.vertex);
o.uv = TRANSFORM_TEX(v.uv, _SrcTex);
return o;
}
fixed4 frag (v2f i) : SV_Target
{
fixed4 scol = tex2D(_SrcTex, i.uv);
fixed4 xcol = tex2D(_XRayTex,i.uv);
fixed4 col;
float2 p2c = float2(i.uv - float2(_UV.x,_UV.y));
float p2clen2 = p2c.x*p2c.x+p2c.y*_Aspect*p2c.y*_Aspect;
float radius2 = _Radius*_Radius;
float outradius2 = (_Radius+_Width)*(_Radius+_Width);
if(p2clen2>outradius2)
{
xcol = fixed4(0,0,0,0);
}
else if(p2clen2radius2)
{
col = _LensColor;
return col;
}
col = saturate(scol + xcol);
return col;
}
ENDCG
}
}
}
shader功能也简单,获取两张纹理,然后通过圆环InsideRadius和OutsideRadius进行裁剪叠加,c#代码控制一下功能性:
using System.Collections;
using System.Collections.Generic;
using System.Security.Cryptography;
using UnityEngine;
using UnityEngine.Rendering;
public class CmdBufferXRayCamera : MonoBehaviour
{
public GameObject[] cmdObjects;
public Material xrayMat;
public Material lensMat;
private RenderTexture objectRT = null;
private CommandBuffer cmdBuffer = null;
void Start()
{
lensMat.SetFloat("_Aspect", (float)Screen.height / (float)Screen.width);
}
private void OnEnable()
{
objectRT = RenderTexture.GetTemporary(Screen.width, Screen.height, 0, RenderTextureFormat.ARGB32);
cmdBuffer = new CommandBuffer();
cmdBuffer.SetRenderTarget(objectRT);
cmdBuffer.ClearRenderTarget(true, true, Color.black);
for (int i = 0; i < cmdObjects.Length; i++)
{
GameObject go = cmdObjects[i];
MeshRenderer render = go.GetComponent();
if (render != null)
{
cmdBuffer.DrawRenderer(render, xrayMat);
}
}
Camera.main.AddCommandBuffer(CameraEvent.AfterForwardOpaque, cmdBuffer);
}
private void OnDisable()
{
RenderTexture.ReleaseTemporary(objectRT);
objectRT = null;
Camera.main.RemoveCommandBuffer(CameraEvent.AfterForwardOpaque, cmdBuffer);
cmdBuffer.Clear();
cmdBuffer = null;
}
private void OnRenderImage(RenderTexture source, RenderTexture destination)
{
if (cmdBuffer != null)
{
Graphics.ExecuteCommandBuffer(cmdBuffer);
}
lensMat.SetTexture("_SrcTex", source);
lensMat.SetTexture("_XRayTex", objectRT);
Graphics.Blit(null, destination, lensMat);
}
private void Update()
{
if(Input.GetMouseButton(0))
{
Vector2 mpos = Input.mousePosition;
Vector4 uv = new Vector4(mpos.x / Screen.width, mpos.y / Screen.height);
lensMat.SetVector("_UV", uv);
}
}
}
效果如下:
so,因为以前也学过这些原理,所以只简单记录一下。