1
0
mirror of https://github.com/AcChosen/VR-Stage-Lighting.git synced 2024-11-27 17:00:51 +01:00

Fix line endings

This commit is contained in:
fundale 2024-11-03 18:53:13 -06:00
parent f25e054233
commit 38c05c6639
6 changed files with 1975 additions and 1975 deletions

View File

@ -1,204 +1,204 @@
Shader "VRSL/AudioLink/Other/Discoball"
{
Properties
{
[Header(Audio Section)]
[Toggle]_EnableAudioLink("Enable Audio Link", Float) = 0
_Band("Band", Float) = 0
_BandMultiplier("Band Multiplier", Range(1, 15)) = 1
_Delay("Delay", Float) = 0
_NumBands("Num Bands", Float) = 4
_AudioSpectrum("AudioSpectrum", 2D) = "black" {}
_GlobalIntensity("Global Intensity", Range(0,1)) = 1
_FinalIntensity("Final Intensity", Range(0,1)) = 1
[HDR]_Emission ("Color", Color) = (1.0, 1.0, 1.0, .2)
_Cube ("Projection Map", Cube) = "" {}
[Toggle] _UseWorldNorm("Use World Normal vs View Normal", Float) = 0
_RotationSpeed ("Rotation Speed", Range (0,180)) = 0
_Multiplier("Brightness Multiplier", Range(0, 10)) = 1
_UniversalIntensity ("Universal Intensity", Range (0,1)) = 1
[Toggle] _EnableThemeColorSampling ("Enable Theme Color Sampling", Int) = 0
_ThemeColorTarget ("Choose Theme Color", Int) = 0
[Toggle] _EnableColorTextureSample ("Enable Color Texture Sampling", Int) = 0
_SamplingTexture ("Texture To Sample From for Color", 2D) = "white" {}
_TextureColorSampleX ("X coordinate to sample the texture from", Range(0,1)) = 0.5
_TextureColorSampleY ("Y coordinate to sample the texture from", Range(0,1)) = 0.5
_RenderTextureMultiplier("Render Texture Multiplier", Range(1,10)) = 1
[Toggle] _EnableColorChord ("Enable Color Chord Tinting", Int) = 0
[Toggle]_UseTraditionalSampling("Use Traditional Texture Sampling", Int) = 0
[Enum(Transparent,1,AlphaToCoverage,2)] _RenderMode ("Render Mode", Int) = 1
[Enum(Off,0,On,1)] _ZWrite ("Z Write", Int) = 0
[Enum(Off,0,On,1)] _AlphaToCoverage ("Alpha To Coverage", Int) = 0
[Enum(Off,0,One,1)] _BlendDst ("Destination Blend mode", Float) = 1
[Enum(UnityEngine.Rendering.BlendOp)] _BlendOp ("Blend Operation", Float) = 0
_ClippingThreshold ("Clipping Threshold", Range (0,1)) = 0.5
_GlobalIntensityBlend("Global Intensity Blend", Range(0,1)) = 1
}
SubShader
{
Tags{ "Queue" = "Transparent+1" "ForceNoShadowCasting"="True" "IgnoreProjector"="True" "RenderType" = "Transparent" }
Offset -1, -5
Stencil
{
Ref 142
Comp NotEqual
Pass Keep
}
Pass
{
AlphaToMask [_AlphaToCoverage]
Cull Front
Ztest Greater
ZWrite Off
Blend DstColor [_BlendDst]
Lighting Off
SeparateSpecular Off
CGPROGRAM
#pragma vertex vert
#pragma fragment frag
uniform samplerCUBE _Cube;
#define VRSL_AUDIOLINK
#pragma multi_compile_local _ _ALPHATEST_ON
float4 _Cube_ST;
float _RotationSpeed;
#include "UnityCG.cginc"
struct appdata
{
float4 vertex : POSITION;
float2 uv : TEXCOORD0;
float3 texcoord : TEXCOORD1;
UNITY_VERTEX_INPUT_INSTANCE_ID
};
struct v2f
{
float4 vertex : SV_POSITION;
float2 uv : TEXCOORD0;
float3 ray : TEXCOORD2;
float4 screenPos : TEXCOORD4;
float4 worldDirection : TEXCOORD5;
float4 worldPos : TEXCOORD6;
float4 outColor : TEXCOORD7;
UNITY_VERTEX_OUTPUT_STEREO
};
#include "Packages/com.acchosen.vr-stage-lighting/Runtime/Shaders/Shared/VRSL-Defines.cginc"
half _Multiplier;
#include "../Shared/VRSL-AudioLink-Functions.cginc"
float4 Rotation(float4 vertPos)
{
//CALCULATE BASE ROTATION. MORE FUN MATH. THIS IS FOR PAN.
float angleY = radians(_Time.y * _RotationSpeed);
float c = cos(angleY);
float s = sin(angleY);
float4x4 rotateYMatrix = float4x4(c, 0, s, 0,
0, 1, 0, 0,
-s, 0, c, 0,
0, 0, 0, 1);
return mul(rotateYMatrix, vertPos);
}
inline float4 CalculateFrustumCorrection()
{
float x1 = -UNITY_MATRIX_P._31/(UNITY_MATRIX_P._11*UNITY_MATRIX_P._34);
float x2 = -UNITY_MATRIX_P._32/(UNITY_MATRIX_P._22*UNITY_MATRIX_P._34);
return float4(x1, x2, 0, UNITY_MATRIX_P._33/UNITY_MATRIX_P._34 + x1*UNITY_MATRIX_P._13 + x2*UNITY_MATRIX_P._23);
}
//CREDIT TO DJ LUKIS FOR MIRROR DEPTH CORRECTION
inline float CorrectedLinearEyeDepth(float z, float B)
{
return 1.0 / (z/UNITY_MATRIX_P._34 + B);
}
v2f vert(appdata v)
{
v2f o;
UNITY_SETUP_INSTANCE_ID(v);
UNITY_INITIALIZE_OUTPUT(v2f, o); //DON'T INITIALIZE OR IT WILL BREAK PROJECTION
UNITY_INITIALIZE_VERTEX_OUTPUT_STEREO(o);
//UNITY_TRANSFER_INSTANCE_ID(v, o);
o.vertex = UnityObjectToClipPos(v.vertex);
o.ray = UnityObjectToViewPos(v.vertex).xyz;
o.ray = o.ray.xyz * float3(-1,-1,1);
o.ray = lerp(o.ray, v.texcoord, v.texcoord.z != 0);
o.worldPos = mul(unity_ObjectToWorld, v.vertex);
o.screenPos = ComputeScreenPos(o.vertex);
o.worldDirection.xyz = o.worldPos.xyz - _WorldSpaceCameraPos;
// pack correction factor into direction w component to save space
o.worldDirection.w = dot(o.vertex, CalculateFrustumCorrection());
o.outColor = getEmissionColor();
return o;
}
#define IF(a, b, c) lerp(b, c, step((fixed) (a), 0));
fixed4 frag(v2f i) : SV_Target
{
UNITY_SETUP_STEREO_EYE_INDEX_POST_VERTEX(i);
float globalintensity = getGlobalIntensity();
float finalintensity = getFinalIntensity();
if(globalintensity <= 0.05 || finalintensity <= 0.05)
{
return half4(0,0,0,0);
}
#if _ALPHATEST_ON
float2 pos = i.screenPos.xy / i.screenPos.w;
pos *= _ScreenParams.xy;
float DITHER_THRESHOLDS[16] =
{
1.0 / 17.0, 9.0 / 17.0, 3.0 / 17.0, 11.0 / 17.0,
13.0 / 17.0, 5.0 / 17.0, 15.0 / 17.0, 7.0 / 17.0,
4.0 / 17.0, 12.0 / 17.0, 2.0 / 17.0, 10.0 / 17.0,
16.0 / 17.0, 8.0 / 17.0, 14.0 / 17.0, 6.0 / 17.0
};
int index = (int)((uint(pos.x) % 4) * 4 + uint(pos.y) % 4);
#endif
float4 depthdirect = i.worldDirection * (1.0f / i.vertex.w);
float sceneZ = SAMPLE_DEPTH_TEXTURE(_CameraDepthTexture, i.screenPos.xy / i.screenPos.w);
#if UNITY_REVERSED_Z
if (sceneZ == 0)
#else
sceneZ = lerp(UNITY_NEAR_CLIP_VALUE, 1, sceneZ);
if (sceneZ == 1)
#endif
return half4(0,0,0,0);
float depth = CorrectedLinearEyeDepth(sceneZ, depthdirect.w);
i.ray = i.ray * (_ProjectionParams.z / i.ray.z);
depth = Linear01Depth((1.0 - (depth * _ZBufferParams.w)) / (depth * _ZBufferParams.z));
float3 wpos = mul(unity_CameraToWorld, float4(i.ray * depth, 1)).xyz;
float UVscale = pow(abs(distance( mul(unity_ObjectToWorld, float4(0.0,0.0,0.0,1.0) ).xyz,wpos)),-1);
float3 projPos = (mul(unity_WorldToObject,float4(wpos, 1)));
if(0.0 < abs(projPos.x) < 0.1)
{
return half4(0,0,0,0);
}
projPos = Rotation(float4(projPos, 0)).xyz;
float4 col = (texCUBE (_Cube, projPos));
col = col *(i.outColor * (4*UVscale));
col = (col * _Multiplier) * GetAudioReactAmplitude();
col = ((col * globalintensity) * finalintensity);
col = col * _UniversalIntensity;
#ifdef _ALPHATEST_ON
clip(col.a - DITHER_THRESHOLDS[index]);
clip((((col.r + col.g + col.b)/3) * (_ClippingThreshold)) - DITHER_THRESHOLDS[index]);
return col;
#else
return col;
#endif
}
ENDCG
}//end color pass
}
CustomEditor "VRSLInspector"
Shader "VRSL/AudioLink/Other/Discoball"
{
Properties
{
[Header(Audio Section)]
[Toggle]_EnableAudioLink("Enable Audio Link", Float) = 0
_Band("Band", Float) = 0
_BandMultiplier("Band Multiplier", Range(1, 15)) = 1
_Delay("Delay", Float) = 0
_NumBands("Num Bands", Float) = 4
_AudioSpectrum("AudioSpectrum", 2D) = "black" {}
_GlobalIntensity("Global Intensity", Range(0,1)) = 1
_FinalIntensity("Final Intensity", Range(0,1)) = 1
[HDR]_Emission ("Color", Color) = (1.0, 1.0, 1.0, .2)
_Cube ("Projection Map", Cube) = "" {}
[Toggle] _UseWorldNorm("Use World Normal vs View Normal", Float) = 0
_RotationSpeed ("Rotation Speed", Range (0,180)) = 0
_Multiplier("Brightness Multiplier", Range(0, 10)) = 1
_UniversalIntensity ("Universal Intensity", Range (0,1)) = 1
[Toggle] _EnableThemeColorSampling ("Enable Theme Color Sampling", Int) = 0
_ThemeColorTarget ("Choose Theme Color", Int) = 0
[Toggle] _EnableColorTextureSample ("Enable Color Texture Sampling", Int) = 0
_SamplingTexture ("Texture To Sample From for Color", 2D) = "white" {}
_TextureColorSampleX ("X coordinate to sample the texture from", Range(0,1)) = 0.5
_TextureColorSampleY ("Y coordinate to sample the texture from", Range(0,1)) = 0.5
_RenderTextureMultiplier("Render Texture Multiplier", Range(1,10)) = 1
[Toggle] _EnableColorChord ("Enable Color Chord Tinting", Int) = 0
[Toggle]_UseTraditionalSampling("Use Traditional Texture Sampling", Int) = 0
[Enum(Transparent,1,AlphaToCoverage,2)] _RenderMode ("Render Mode", Int) = 1
[Enum(Off,0,On,1)] _ZWrite ("Z Write", Int) = 0
[Enum(Off,0,On,1)] _AlphaToCoverage ("Alpha To Coverage", Int) = 0
[Enum(Off,0,One,1)] _BlendDst ("Destination Blend mode", Float) = 1
[Enum(UnityEngine.Rendering.BlendOp)] _BlendOp ("Blend Operation", Float) = 0
_ClippingThreshold ("Clipping Threshold", Range (0,1)) = 0.5
_GlobalIntensityBlend("Global Intensity Blend", Range(0,1)) = 1
}
SubShader
{
Tags{ "Queue" = "Transparent+1" "ForceNoShadowCasting"="True" "IgnoreProjector"="True" "RenderType" = "Transparent" }
Offset -1, -5
Stencil
{
Ref 142
Comp NotEqual
Pass Keep
}
Pass
{
AlphaToMask [_AlphaToCoverage]
Cull Front
Ztest Greater
ZWrite Off
Blend DstColor [_BlendDst]
Lighting Off
SeparateSpecular Off
CGPROGRAM
#pragma vertex vert
#pragma fragment frag
uniform samplerCUBE _Cube;
#define VRSL_AUDIOLINK
#pragma multi_compile_local _ _ALPHATEST_ON
float4 _Cube_ST;
float _RotationSpeed;
#include "UnityCG.cginc"
struct appdata
{
float4 vertex : POSITION;
float2 uv : TEXCOORD0;
float3 texcoord : TEXCOORD1;
UNITY_VERTEX_INPUT_INSTANCE_ID
};
struct v2f
{
float4 vertex : SV_POSITION;
float2 uv : TEXCOORD0;
float3 ray : TEXCOORD2;
float4 screenPos : TEXCOORD4;
float4 worldDirection : TEXCOORD5;
float4 worldPos : TEXCOORD6;
float4 outColor : TEXCOORD7;
UNITY_VERTEX_OUTPUT_STEREO
};
#include "Packages/com.acchosen.vr-stage-lighting/Runtime/Shaders/Shared/VRSL-Defines.cginc"
half _Multiplier;
#include "../Shared/VRSL-AudioLink-Functions.cginc"
float4 Rotation(float4 vertPos)
{
//CALCULATE BASE ROTATION. MORE FUN MATH. THIS IS FOR PAN.
float angleY = radians(_Time.y * _RotationSpeed);
float c = cos(angleY);
float s = sin(angleY);
float4x4 rotateYMatrix = float4x4(c, 0, s, 0,
0, 1, 0, 0,
-s, 0, c, 0,
0, 0, 0, 1);
return mul(rotateYMatrix, vertPos);
}
inline float4 CalculateFrustumCorrection()
{
float x1 = -UNITY_MATRIX_P._31/(UNITY_MATRIX_P._11*UNITY_MATRIX_P._34);
float x2 = -UNITY_MATRIX_P._32/(UNITY_MATRIX_P._22*UNITY_MATRIX_P._34);
return float4(x1, x2, 0, UNITY_MATRIX_P._33/UNITY_MATRIX_P._34 + x1*UNITY_MATRIX_P._13 + x2*UNITY_MATRIX_P._23);
}
//CREDIT TO DJ LUKIS FOR MIRROR DEPTH CORRECTION
inline float CorrectedLinearEyeDepth(float z, float B)
{
return 1.0 / (z/UNITY_MATRIX_P._34 + B);
}
v2f vert(appdata v)
{
v2f o;
UNITY_SETUP_INSTANCE_ID(v);
UNITY_INITIALIZE_OUTPUT(v2f, o); //DON'T INITIALIZE OR IT WILL BREAK PROJECTION
UNITY_INITIALIZE_VERTEX_OUTPUT_STEREO(o);
//UNITY_TRANSFER_INSTANCE_ID(v, o);
o.vertex = UnityObjectToClipPos(v.vertex);
o.ray = UnityObjectToViewPos(v.vertex).xyz;
o.ray = o.ray.xyz * float3(-1,-1,1);
o.ray = lerp(o.ray, v.texcoord, v.texcoord.z != 0);
o.worldPos = mul(unity_ObjectToWorld, v.vertex);
o.screenPos = ComputeScreenPos(o.vertex);
o.worldDirection.xyz = o.worldPos.xyz - _WorldSpaceCameraPos;
// pack correction factor into direction w component to save space
o.worldDirection.w = dot(o.vertex, CalculateFrustumCorrection());
o.outColor = getEmissionColor();
return o;
}
#define IF(a, b, c) lerp(b, c, step((fixed) (a), 0));
fixed4 frag(v2f i) : SV_Target
{
UNITY_SETUP_STEREO_EYE_INDEX_POST_VERTEX(i);
float globalintensity = getGlobalIntensity();
float finalintensity = getFinalIntensity();
if(globalintensity <= 0.05 || finalintensity <= 0.05)
{
return half4(0,0,0,0);
}
#if _ALPHATEST_ON
float2 pos = i.screenPos.xy / i.screenPos.w;
pos *= _ScreenParams.xy;
float DITHER_THRESHOLDS[16] =
{
1.0 / 17.0, 9.0 / 17.0, 3.0 / 17.0, 11.0 / 17.0,
13.0 / 17.0, 5.0 / 17.0, 15.0 / 17.0, 7.0 / 17.0,
4.0 / 17.0, 12.0 / 17.0, 2.0 / 17.0, 10.0 / 17.0,
16.0 / 17.0, 8.0 / 17.0, 14.0 / 17.0, 6.0 / 17.0
};
int index = (int)((uint(pos.x) % 4) * 4 + uint(pos.y) % 4);
#endif
float4 depthdirect = i.worldDirection * (1.0f / i.vertex.w);
float sceneZ = SAMPLE_DEPTH_TEXTURE(_CameraDepthTexture, i.screenPos.xy / i.screenPos.w);
#if UNITY_REVERSED_Z
if (sceneZ == 0)
#else
sceneZ = lerp(UNITY_NEAR_CLIP_VALUE, 1, sceneZ);
if (sceneZ == 1)
#endif
return half4(0,0,0,0);
float depth = CorrectedLinearEyeDepth(sceneZ, depthdirect.w);
i.ray = i.ray * (_ProjectionParams.z / i.ray.z);
depth = Linear01Depth((1.0 - (depth * _ZBufferParams.w)) / (depth * _ZBufferParams.z));
float3 wpos = mul(unity_CameraToWorld, float4(i.ray * depth, 1)).xyz;
float UVscale = pow(abs(distance( mul(unity_ObjectToWorld, float4(0.0,0.0,0.0,1.0) ).xyz,wpos)),-1);
float3 projPos = (mul(unity_WorldToObject,float4(wpos, 1)));
if(0.0 < abs(projPos.x) < 0.1)
{
return half4(0,0,0,0);
}
projPos = Rotation(float4(projPos, 0)).xyz;
float4 col = (texCUBE (_Cube, projPos));
col = col *(i.outColor * (4*UVscale));
col = (col * _Multiplier) * GetAudioReactAmplitude();
col = ((col * globalintensity) * finalintensity);
col = col * _UniversalIntensity;
#ifdef _ALPHATEST_ON
clip(col.a - DITHER_THRESHOLDS[index]);
clip((((col.r + col.g + col.b)/3) * (_ClippingThreshold)) - DITHER_THRESHOLDS[index]);
return col;
#else
return col;
#endif
}
ENDCG
}//end color pass
}
CustomEditor "VRSLInspector"
}

View File

@ -1,393 +1,393 @@
Shader "VRSL/AudioLink/Standard Static/Lens Flare"
{
//Modified from https://github.com/ColinLeung-NiloCat/UnityURP-BillboardLensFlareShader by ColinLeung-NiloCat from URP to SRP
Properties
{
[Header(Audio Section)]
[Toggle]_EnableAudioLink("Enable Audio Link", Float) = 0
_Band("Band", Float) = 0
_BandMultiplier("Band Multiplier", Range(1, 15)) = 1
_Delay("Delay", Float) = 0
_NumBands("Num Bands", Float) = 4
_AudioSpectrum("AudioSpectrum", 2D) = "black" {}
[Toggle] _EnableColorChord ("Enable Color Chord Tinting", Int) = 0
_FinalIntensity("Final Intensity", Range(0,1)) = 1
_GlobalIntensity("Global Intensity", Range(0,1)) = 1
_GlobalIntensityBlend("Global Intensity Blend", Range(0,1)) = 1
_UniversalIntensity ("Universal Intensity", Range (0,1)) = 1
[HDR]_Emission("Light Color Tint", Color) = (1,1,1,1)
_MainTex ("Texture", 2D) = "white" {}
_FadeAmt ("Fade Strength", Range(0,1)) = 1
_ColorSat ("Color Saturtation Strength", Range(0,1)) = 1
_ScaleFactor ("Scale Factor", Range(0,0.01)) = 1
_ReferenceDistance("Reference Distance", Float) = 2
_UVScale ("UV Scale Test", Range(0.001,2)) = 1
_FixtureMaxIntensity ("Maximum Light Intensity",Range (0,15)) = 1
_CurveMod ("Light Intensity Curve Modifier", Range (-3,8)) = 5.0
_FixutreIntensityMultiplier ("Intensity Multipler (For Bloom Scaling)", Range(1,5)) = 1
_RenderTextureMultiplier("Render Texture Multiplier", Range(1,10)) = 1
_RemoveTextureArtifact("RemoveTextureArtifact", Range(0,0.1)) = 0
[Toggle]_UseTraditionalSampling("Use Traditional Texture Sampling", Int) = 0
[Toggle] _EnableColorTextureSample ("Enable Color Texture Sampling", Int) = 0
_SamplingTexture ("Texture To Sample From for Color", 2D) = "white" {}
_TextureColorSampleX ("X coordinate to sample the texture from", Range(0,1)) = 0.5
_TextureColorSampleY ("Y coordinate to sample the texture from", Range(0,1)) = 0.5
[Header(PreMultiply Alpha. Turn it ON only if your texture has correct alpha)]
[Toggle]_UsePreMultiplyAlpha("UsePreMultiplyAlpha (recommend _BaseMap's alpha = 'From Gray Scale')", Float) = 0
[Header(Depth Occlusion)]
_LightSourceViewSpaceRadius("LightSourceViewSpaceRadius", range(0,1)) = 0.05
_DepthOcclusionTestZBias("DepthOcclusionTestZBias", range(-1,1)) = -0.001
[Header(If camera too close Auto fadeout)]
_StartFadeinDistanceWorldUnit("StartFadeinDistanceWorldUnit",Float) = 0.05
_EndFadeinDistanceWorldUnit("EndFadeinDistanceWorldUnit", Float) = 0.5
[Header(Optional Flicker animation)]
[Toggle]_ShouldDoFlicker("ShouldDoFlicker", FLoat) = 1
_FlickerAnimSpeed("FlickerAnimSpeed", Float) = 5
_FlickResultIntensityLowestPoint("FlickResultIntensityLowestPoint", range(0,1)) = 0.5
[Toggle]_UseDepthLight("Toggle The Requirement of the depth light to function.", Int) = 1
[Toggle] _EnableThemeColorSampling ("Enable Theme Color Sampling", Int) = 0
_ThemeColorTarget ("Choose Theme Color", Int) = 0
[Enum(Transparent,1,AlphaToCoverage,2)] _RenderMode ("Render Mode", Int) = 1
[Enum(Off,0,On,1)] _ZWrite ("Z Write", Int) = 0
[Enum(Off,0,On,1)] _AlphaToCoverage ("Alpha To Coverage", Int) = 0
[Enum(Off,0,One,1)] _BlendDst ("Destination Blend mode", Float) = 1
[Enum(UnityEngine.Rendering.BlendOp)] _BlendOp ("Blend Operation", Float) = 0
_ClippingThreshold ("Clipping Threshold", Range (0,1)) = 0.5
_AlphaProjectionIntensity ("Alpha Projection Intesnity", Range (0,1)) = 0.5
}
SubShader
{
Tags { "RenderType"="Transparent" "Queue" = "Transparent+200" }
LOD 100
Pass
{
AlphaToMask [_AlphaToCoverage]
Zwrite Off
ZTest Off
Blend One [_BlendDst]
Cull Back
Lighting Off
Tags{ "LightMode" = "Always" }
Stencil
{
Ref 142
Comp NotEqual
Pass Keep
}
CGPROGRAM
#pragma vertex vert
#pragma fragment frag
// make fog work
#pragma multi_compile_local _ _USE_DEPTH_LIGHT
#pragma multi_compile_local _ _ALPHATEST_ON
#pragma multi_compile_fog
#pragma multi_compile_instancing
#define VRSL_AUDIOLINK
#define VRSL_FLARE
#include "UnityCG.cginc"
struct appdata
{
float4 vertex : POSITION;
float2 uv : TEXCOORD0;
half4 color : COLOR;
UNITY_VERTEX_INPUT_INSTANCE_ID
};
struct v2f
{
float2 uv : TEXCOORD0;
UNITY_FOG_COORDS(1)
float4 vertex : SV_POSITION;
float4 screenPos : TEXCOORD6;
float4 worldDirection : TEXCOORD2;
float4 vertexWorldPos : TEXCOORD3;
half4 color : TEXCOORD4;
float maskX : TEXCOORD5;
UNITY_VERTEX_INPUT_INSTANCE_ID // will turn into this in non OpenGL / non PSSL -> uint instanceID : SV_InstanceID;
UNITY_VERTEX_OUTPUT_STEREO
};
#define COUNT 8 //you can edit to any number(e.g. 1~32), the lower the faster. Keeping this number a const can enable many compiler optimizations
// sampler2D _MainTex;
//float4 _MainTex_ST;
//half4 _Emission;
half _ColorSat, _ScaleFactor, _ReferenceDistance, _UVScale;
float _LightSourceViewSpaceRadius;
float _DepthOcclusionTestZBias;
float _StartFadeinDistanceWorldUnit;
float _EndFadeinDistanceWorldUnit;
float _UsePreMultiplyAlpha;
// float _FlickerAnimSpeed;
float _FlickResultIntensityLowestPoint;
//float _ShouldDoFlicker;
half _RemoveTextureArtifact, _CurveMod;
#include "Packages/com.acchosen.vr-stage-lighting/Runtime/Shaders/Shared/VRSL-Defines.cginc"
#include "../Shared/VRSL-AudioLink-Functions.cginc"
float4x4 GetWorldToViewMatrix()
{
return UNITY_MATRIX_V;
}
float4x4 GetObjectToWorldMatrix()
{
return UNITY_MATRIX_M;
}
float3 TransformWorldToView(float3 positionWS)
{
return mul(GetWorldToViewMatrix(), float4(positionWS, 1.0)).xyz;
}
float3 TransformObjectToWorld(float3 vertex)
{
return mul(GetObjectToWorldMatrix(), float4(vertex, 1.0)).xyz;
}
inline float4 CalculateFrustumCorrection()
{
float x1 = -UNITY_MATRIX_P._31/(UNITY_MATRIX_P._11*UNITY_MATRIX_P._34);
float x2 = -UNITY_MATRIX_P._32/(UNITY_MATRIX_P._22*UNITY_MATRIX_P._34);
return float4(x1, x2, 0, UNITY_MATRIX_P._33/UNITY_MATRIX_P._34 + x1*UNITY_MATRIX_P._13 + x2*UNITY_MATRIX_P._23);
}
//CREDIT TO DJ LUKIS FOR MIRROR DEPTH CORRECTION
inline float CorrectedLinearEyeDepth(float z, float B)
{
return 1.0 / (z/UNITY_MATRIX_P._34 + B);
}
float3 RGB2HSV(float3 c)
{
float4 K = float4(0.0, -1.0 / 3.0, 2.0 / 3.0, -1.0);
float4 p = lerp(float4(c.bg, K.wz), float4(c.gb, K.xy), step(c.b, c.g));
float4 q = lerp(float4(p.xyw, c.r), float4(c.r, p.yzx), step(p.x, c.r));
float d = q.x - min(q.w, q.y);
float e = 1.0e-10;
return float3(abs(q.z + (q.w - q.y) / (6.0 * d + e)), d / (q.x + e), q.x);
}
float3 hsb2rgb( float3 c ){
float3 rgb = clamp( abs(fmod(c.x*6.0+float3(0.0,4.0,2.0),6)-3.0)-1.0, 0, 1);
rgb = rgb*rgb*(3.0-2.0*rgb);
return c.z * lerp( float3(1,1,1), rgb, c.y);
}
// float4 getEmissionColor()
// {
// return UNITY_ACCESS_INSTANCED_PROP(Props,_Emission);
// }
// float getGlobalIntensity()
// {
// return UNITY_ACCESS_INSTANCED_PROP(Props, _GlobalIntensity);
// }
// float getFinalIntensity()
// {
// return UNITY_ACCESS_INSTANCED_PROP(Props, _FinalIntensity);
// }
v2f vert (appdata v)
{
v2f o;
UNITY_SETUP_INSTANCE_ID(v); //Insert
UNITY_INITIALIZE_OUTPUT(v2f, o); //Insert
UNITY_INITIALIZE_VERTEX_OUTPUT_STEREO(o); //Insert
half4 e = getEmissionColor();
e = clamp(e, half4(0,0,0,1), half4(_FixtureMaxIntensity*2,_FixtureMaxIntensity*2,_FixtureMaxIntensity*2,1));
#ifdef _ALPHATEST_ON
e*= (_FixutreIntensityMultiplier*0.25);
#else
e*= _FixutreIntensityMultiplier;
#endif
e*= GetAudioReactAmplitude();
e = float4(((e.rgb * _FixtureMaxIntensity) * getGlobalIntensity()) * getFinalIntensity(), e.w);
e*= _UniversalIntensity;
float3 eHSV = RGB2HSV(e.rgb);
if(eHSV.z <= 0.01)
{
v.vertex = float4(0,0,0,0);
o.vertex = UnityObjectToClipPos(v.vertex);
return o;
}
o.uv = TRANSFORM_TEX(v.uv, _MainTex);
o.color = v.color * e;
float3 quadPivotPosOS = float3(0,0,0);
float3 quadPivotPosWS = TransformObjectToWorld(quadPivotPosOS);
float3 quadPivotPosVS = TransformWorldToView(quadPivotPosWS);
//get transform.lossyScale using:
//https://forum.unity.com/threads/can-i-get-the-scale-in-the-transform-of-the-object-i-attach-a-shader-to-if-so-how.418345/
float2 scaleXY_WS = float2(
length(float3(GetObjectToWorldMatrix()[0].x, GetObjectToWorldMatrix()[1].x, GetObjectToWorldMatrix()[2].x)), // scale x axis
length(float3(GetObjectToWorldMatrix()[0].y, GetObjectToWorldMatrix()[1].y, GetObjectToWorldMatrix()[2].y)) // scale y axis
);
float3 posVS = quadPivotPosVS + float3(v.vertex.xy * scaleXY_WS,0);//recontruct quad 4 points in view space
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
//complete SV_POSITION's view space to HClip space transformation
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
o.vertex = mul(UNITY_MATRIX_P,float4(posVS,1));
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
//do smooth visibility test using brute force forloop (COUNT*2+1)^2 times inside a view space 2D grid area
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
float visibilityTestPassedCount = 0;
float linearEyeDepthOfFlarePivot = -quadPivotPosVS.z;//view space's forward is pointing to -Z, but we want +Z, so negate it
float testLoopSingleAxisWidth = COUNT*2+1;
float totalTestCount = testLoopSingleAxisWidth * testLoopSingleAxisWidth;
float divider = 1.0 / totalTestCount;
float maxSingleAxisOffset = _LightSourceViewSpaceRadius / testLoopSingleAxisWidth;
//Test for n*n grid in view space, where quad pivot is grid's center.
//For each iteration,
//if that test point passed the scene depth occlusion test, we add 1 to visibilityTestPassedCount
#if _USE_DEPTH_LIGHT
for(int x = -COUNT; x <= COUNT; x++)
{
for(int y = -COUNT; y <= COUNT ; y++)
{
float3 testPosVS = quadPivotPosVS;
testPosVS.xy += float2(x,y) * maxSingleAxisOffset;//add 2D test grid offset, in const view space unit
float4 PivotPosCS = mul(UNITY_MATRIX_P,float4(testPosVS,1));
float4 PivotScreenPos = ComputeScreenPos(PivotPosCS);
float2 screenUV = PivotScreenPos.xy/PivotScreenPos.w;
//if screenUV out of bound, treat it as occluded, because no correct depth texture data can be used to compare
if(screenUV.x > 1 || screenUV.x < 0 || screenUV.y > 1 || screenUV.y < 0)
continue; //exit means occluded
//we don't have tex2D() in vertex shader, because rasterization is not done by GPU, so we use tex2Dlod() with mip0 instead
float4 ssd = SAMPLE_DEPTH_TEXTURE_LOD(_CameraDepthTexture, float4(screenUV, 0.0, 0.0));//(uv.x,uv.y,0,mipLevel)
float sampledSceneDepth = ssd.x;
#if !UNITY_REVERSED_Z
sampledSceneDepth = lerp(UNITY_NEAR_CLIP_VALUE, 1, sampledSceneDepth);
#endif
float linearEyeDepthFromSceneDepthTexture = LinearEyeDepth(sampledSceneDepth);
float linearEyeDepthFromSelfALU = PivotPosCS.w; //clip space .w is view space z, = linear eye depth
//do the actual depth comparision test
//+1 means flare test point is visible in screen space
//+0 means flare test point blocked by other objects in screen space, not visible
visibilityTestPassedCount += linearEyeDepthFromSelfALU + _DepthOcclusionTestZBias < linearEyeDepthFromSceneDepthTexture ? 1 : 0;
}
}
float visibilityResult01 = visibilityTestPassedCount * divider;//0~100% visiblility result
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
//if camera too close to flare , smooth fade out to prevent flare blocking camera too much (usually for fps games)
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
visibilityResult01 *= smoothstep(_StartFadeinDistanceWorldUnit,_EndFadeinDistanceWorldUnit,linearEyeDepthOfFlarePivot);
// if(_ShouldDoFlicker)
// {
// float flickerMul = 0;
// //TODO: expose more control to noise? (send me an issue in GitHub, if anyone need this)
// flickerMul += saturate(sin(_Time.y * _FlickerAnimSpeed * 1.0000)) * (1-_FlickResultIntensityLowestPoint) + _FlickResultIntensityLowestPoint;
// flickerMul += saturate(sin(_Time.y * _FlickerAnimSpeed * 0.6437)) * (1-_FlickResultIntensityLowestPoint) + _FlickResultIntensityLowestPoint;
// visibilityResult01 *= saturate(flickerMul/2);
// }
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
//apply all combinations(visibilityResult01) to vertex color
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
o.color.a *= visibilityResult01;
o.vertex = visibilityResult01 < divider ? 0 : o.vertex;
// }
#endif
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
//premultiply alpha to rgb after alpha's calculation is done
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
o.color.rgb *= o.color.a;
//o.color.a = _UsePreMultiplyAlpha? o.color.a : 0;
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
//pure optimization:
//if flare is invisible or nearly invisible,
//invalid this vertex (and all connected vertices).
//This 100% early exit at clipping stage will prevent any rasterization & fragment shader cost at all
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
float3 hsvFC = RGB2HSV(o.color.xyz);
hsvFC.y = 0.0;
float4 e2 = float4(1,1,1,o.color.w) * hsvFC.z;
o.maskX = lerp(1, 0, pow(distance(half2(0.5, 0.5), o.uv), _FadeAmt));
float satMask = lerp(1, 0, pow(distance(half2(0.5, 0.5), o.uv), _ColorSat));
o.color = lerp(o.color, e2, satMask);
#if _ALPHATEST_ON
o.screenPos = ComputeScreenPos(o.vertex);
#endif
// UNITY_TRANSFER_FOG(o,o.vertex);
return o;
}
fixed4 frag (v2f i) : SV_Target
{
#if _ALPHATEST_ON
float2 pos = i.screenPos.xy / i.screenPos.w;
pos *= _ScreenParams.xy;
float DITHER_THRESHOLDS[16] =
{
1.0 / 17.0, 9.0 / 17.0, 3.0 / 17.0, 11.0 / 17.0,
13.0 / 17.0, 5.0 / 17.0, 15.0 / 17.0, 7.0 / 17.0,
4.0 / 17.0, 12.0 / 17.0, 2.0 / 17.0, 10.0 / 17.0,
16.0 / 17.0, 8.0 / 17.0, 14.0 / 17.0, 6.0 / 17.0
};
int index = (uint(pos.x) % 4) * 4 + uint(pos.y) % 4;
float4 col = saturate(tex2D(_MainTex, i.uv ));
// col *= i.maskX;
//clip((col.a) - DITHER_THRESHOLDS[index]);
// apply fog
UNITY_APPLY_FOG(i.fogCoord, col);
clip((((col.r + col.g + col.b)/3) * (_ClippingThreshold * 10)) - DITHER_THRESHOLDS[index]);
return col * i.color;
#else
fixed4 col = saturate(tex2D(_MainTex, i.uv )-_RemoveTextureArtifact) * i.color;
// apply fog
UNITY_APPLY_FOG(i.fogCoord, col);
col *= i.maskX;
return col;
#endif
// fixed4 col = saturate(tex2D(_MainTex, i.uv )-_RemoveTextureArtifact) * i.color;
// UNITY_APPLY_FOG(i.fogCoord, col);
// col *= i.maskX;
// return col;
}
ENDCG
}
}
CustomEditor "VRSLInspector"
}
Shader "VRSL/AudioLink/Standard Static/Lens Flare"
{
//Modified from https://github.com/ColinLeung-NiloCat/UnityURP-BillboardLensFlareShader by ColinLeung-NiloCat from URP to SRP
Properties
{
[Header(Audio Section)]
[Toggle]_EnableAudioLink("Enable Audio Link", Float) = 0
_Band("Band", Float) = 0
_BandMultiplier("Band Multiplier", Range(1, 15)) = 1
_Delay("Delay", Float) = 0
_NumBands("Num Bands", Float) = 4
_AudioSpectrum("AudioSpectrum", 2D) = "black" {}
[Toggle] _EnableColorChord ("Enable Color Chord Tinting", Int) = 0
_FinalIntensity("Final Intensity", Range(0,1)) = 1
_GlobalIntensity("Global Intensity", Range(0,1)) = 1
_GlobalIntensityBlend("Global Intensity Blend", Range(0,1)) = 1
_UniversalIntensity ("Universal Intensity", Range (0,1)) = 1
[HDR]_Emission("Light Color Tint", Color) = (1,1,1,1)
_MainTex ("Texture", 2D) = "white" {}
_FadeAmt ("Fade Strength", Range(0,1)) = 1
_ColorSat ("Color Saturtation Strength", Range(0,1)) = 1
_ScaleFactor ("Scale Factor", Range(0,0.01)) = 1
_ReferenceDistance("Reference Distance", Float) = 2
_UVScale ("UV Scale Test", Range(0.001,2)) = 1
_FixtureMaxIntensity ("Maximum Light Intensity",Range (0,15)) = 1
_CurveMod ("Light Intensity Curve Modifier", Range (-3,8)) = 5.0
_FixutreIntensityMultiplier ("Intensity Multipler (For Bloom Scaling)", Range(1,5)) = 1
_RenderTextureMultiplier("Render Texture Multiplier", Range(1,10)) = 1
_RemoveTextureArtifact("RemoveTextureArtifact", Range(0,0.1)) = 0
[Toggle]_UseTraditionalSampling("Use Traditional Texture Sampling", Int) = 0
[Toggle] _EnableColorTextureSample ("Enable Color Texture Sampling", Int) = 0
_SamplingTexture ("Texture To Sample From for Color", 2D) = "white" {}
_TextureColorSampleX ("X coordinate to sample the texture from", Range(0,1)) = 0.5
_TextureColorSampleY ("Y coordinate to sample the texture from", Range(0,1)) = 0.5
[Header(PreMultiply Alpha. Turn it ON only if your texture has correct alpha)]
[Toggle]_UsePreMultiplyAlpha("UsePreMultiplyAlpha (recommend _BaseMap's alpha = 'From Gray Scale')", Float) = 0
[Header(Depth Occlusion)]
_LightSourceViewSpaceRadius("LightSourceViewSpaceRadius", range(0,1)) = 0.05
_DepthOcclusionTestZBias("DepthOcclusionTestZBias", range(-1,1)) = -0.001
[Header(If camera too close Auto fadeout)]
_StartFadeinDistanceWorldUnit("StartFadeinDistanceWorldUnit",Float) = 0.05
_EndFadeinDistanceWorldUnit("EndFadeinDistanceWorldUnit", Float) = 0.5
[Header(Optional Flicker animation)]
[Toggle]_ShouldDoFlicker("ShouldDoFlicker", FLoat) = 1
_FlickerAnimSpeed("FlickerAnimSpeed", Float) = 5
_FlickResultIntensityLowestPoint("FlickResultIntensityLowestPoint", range(0,1)) = 0.5
[Toggle]_UseDepthLight("Toggle The Requirement of the depth light to function.", Int) = 1
[Toggle] _EnableThemeColorSampling ("Enable Theme Color Sampling", Int) = 0
_ThemeColorTarget ("Choose Theme Color", Int) = 0
[Enum(Transparent,1,AlphaToCoverage,2)] _RenderMode ("Render Mode", Int) = 1
[Enum(Off,0,On,1)] _ZWrite ("Z Write", Int) = 0
[Enum(Off,0,On,1)] _AlphaToCoverage ("Alpha To Coverage", Int) = 0
[Enum(Off,0,One,1)] _BlendDst ("Destination Blend mode", Float) = 1
[Enum(UnityEngine.Rendering.BlendOp)] _BlendOp ("Blend Operation", Float) = 0
_ClippingThreshold ("Clipping Threshold", Range (0,1)) = 0.5
_AlphaProjectionIntensity ("Alpha Projection Intesnity", Range (0,1)) = 0.5
}
SubShader
{
Tags { "RenderType"="Transparent" "Queue" = "Transparent+200" }
LOD 100
Pass
{
AlphaToMask [_AlphaToCoverage]
Zwrite Off
ZTest Off
Blend One [_BlendDst]
Cull Back
Lighting Off
Tags{ "LightMode" = "Always" }
Stencil
{
Ref 142
Comp NotEqual
Pass Keep
}
CGPROGRAM
#pragma vertex vert
#pragma fragment frag
// make fog work
#pragma multi_compile_local _ _USE_DEPTH_LIGHT
#pragma multi_compile_local _ _ALPHATEST_ON
#pragma multi_compile_fog
#pragma multi_compile_instancing
#define VRSL_AUDIOLINK
#define VRSL_FLARE
#include "UnityCG.cginc"
struct appdata
{
float4 vertex : POSITION;
float2 uv : TEXCOORD0;
half4 color : COLOR;
UNITY_VERTEX_INPUT_INSTANCE_ID
};
struct v2f
{
float2 uv : TEXCOORD0;
UNITY_FOG_COORDS(1)
float4 vertex : SV_POSITION;
float4 screenPos : TEXCOORD6;
float4 worldDirection : TEXCOORD2;
float4 vertexWorldPos : TEXCOORD3;
half4 color : TEXCOORD4;
float maskX : TEXCOORD5;
UNITY_VERTEX_INPUT_INSTANCE_ID // will turn into this in non OpenGL / non PSSL -> uint instanceID : SV_InstanceID;
UNITY_VERTEX_OUTPUT_STEREO
};
#define COUNT 8 //you can edit to any number(e.g. 1~32), the lower the faster. Keeping this number a const can enable many compiler optimizations
// sampler2D _MainTex;
//float4 _MainTex_ST;
//half4 _Emission;
half _ColorSat, _ScaleFactor, _ReferenceDistance, _UVScale;
float _LightSourceViewSpaceRadius;
float _DepthOcclusionTestZBias;
float _StartFadeinDistanceWorldUnit;
float _EndFadeinDistanceWorldUnit;
float _UsePreMultiplyAlpha;
// float _FlickerAnimSpeed;
float _FlickResultIntensityLowestPoint;
//float _ShouldDoFlicker;
half _RemoveTextureArtifact, _CurveMod;
#include "Packages/com.acchosen.vr-stage-lighting/Runtime/Shaders/Shared/VRSL-Defines.cginc"
#include "../Shared/VRSL-AudioLink-Functions.cginc"
float4x4 GetWorldToViewMatrix()
{
return UNITY_MATRIX_V;
}
float4x4 GetObjectToWorldMatrix()
{
return UNITY_MATRIX_M;
}
float3 TransformWorldToView(float3 positionWS)
{
return mul(GetWorldToViewMatrix(), float4(positionWS, 1.0)).xyz;
}
float3 TransformObjectToWorld(float3 vertex)
{
return mul(GetObjectToWorldMatrix(), float4(vertex, 1.0)).xyz;
}
inline float4 CalculateFrustumCorrection()
{
float x1 = -UNITY_MATRIX_P._31/(UNITY_MATRIX_P._11*UNITY_MATRIX_P._34);
float x2 = -UNITY_MATRIX_P._32/(UNITY_MATRIX_P._22*UNITY_MATRIX_P._34);
return float4(x1, x2, 0, UNITY_MATRIX_P._33/UNITY_MATRIX_P._34 + x1*UNITY_MATRIX_P._13 + x2*UNITY_MATRIX_P._23);
}
//CREDIT TO DJ LUKIS FOR MIRROR DEPTH CORRECTION
inline float CorrectedLinearEyeDepth(float z, float B)
{
return 1.0 / (z/UNITY_MATRIX_P._34 + B);
}
float3 RGB2HSV(float3 c)
{
float4 K = float4(0.0, -1.0 / 3.0, 2.0 / 3.0, -1.0);
float4 p = lerp(float4(c.bg, K.wz), float4(c.gb, K.xy), step(c.b, c.g));
float4 q = lerp(float4(p.xyw, c.r), float4(c.r, p.yzx), step(p.x, c.r));
float d = q.x - min(q.w, q.y);
float e = 1.0e-10;
return float3(abs(q.z + (q.w - q.y) / (6.0 * d + e)), d / (q.x + e), q.x);
}
float3 hsb2rgb( float3 c ){
float3 rgb = clamp( abs(fmod(c.x*6.0+float3(0.0,4.0,2.0),6)-3.0)-1.0, 0, 1);
rgb = rgb*rgb*(3.0-2.0*rgb);
return c.z * lerp( float3(1,1,1), rgb, c.y);
}
// float4 getEmissionColor()
// {
// return UNITY_ACCESS_INSTANCED_PROP(Props,_Emission);
// }
// float getGlobalIntensity()
// {
// return UNITY_ACCESS_INSTANCED_PROP(Props, _GlobalIntensity);
// }
// float getFinalIntensity()
// {
// return UNITY_ACCESS_INSTANCED_PROP(Props, _FinalIntensity);
// }
v2f vert (appdata v)
{
v2f o;
UNITY_SETUP_INSTANCE_ID(v); //Insert
UNITY_INITIALIZE_OUTPUT(v2f, o); //Insert
UNITY_INITIALIZE_VERTEX_OUTPUT_STEREO(o); //Insert
half4 e = getEmissionColor();
e = clamp(e, half4(0,0,0,1), half4(_FixtureMaxIntensity*2,_FixtureMaxIntensity*2,_FixtureMaxIntensity*2,1));
#ifdef _ALPHATEST_ON
e*= (_FixutreIntensityMultiplier*0.25);
#else
e*= _FixutreIntensityMultiplier;
#endif
e*= GetAudioReactAmplitude();
e = float4(((e.rgb * _FixtureMaxIntensity) * getGlobalIntensity()) * getFinalIntensity(), e.w);
e*= _UniversalIntensity;
float3 eHSV = RGB2HSV(e.rgb);
if(eHSV.z <= 0.01)
{
v.vertex = float4(0,0,0,0);
o.vertex = UnityObjectToClipPos(v.vertex);
return o;
}
o.uv = TRANSFORM_TEX(v.uv, _MainTex);
o.color = v.color * e;
float3 quadPivotPosOS = float3(0,0,0);
float3 quadPivotPosWS = TransformObjectToWorld(quadPivotPosOS);
float3 quadPivotPosVS = TransformWorldToView(quadPivotPosWS);
//get transform.lossyScale using:
//https://forum.unity.com/threads/can-i-get-the-scale-in-the-transform-of-the-object-i-attach-a-shader-to-if-so-how.418345/
float2 scaleXY_WS = float2(
length(float3(GetObjectToWorldMatrix()[0].x, GetObjectToWorldMatrix()[1].x, GetObjectToWorldMatrix()[2].x)), // scale x axis
length(float3(GetObjectToWorldMatrix()[0].y, GetObjectToWorldMatrix()[1].y, GetObjectToWorldMatrix()[2].y)) // scale y axis
);
float3 posVS = quadPivotPosVS + float3(v.vertex.xy * scaleXY_WS,0);//recontruct quad 4 points in view space
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
//complete SV_POSITION's view space to HClip space transformation
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
o.vertex = mul(UNITY_MATRIX_P,float4(posVS,1));
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
//do smooth visibility test using brute force forloop (COUNT*2+1)^2 times inside a view space 2D grid area
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
float visibilityTestPassedCount = 0;
float linearEyeDepthOfFlarePivot = -quadPivotPosVS.z;//view space's forward is pointing to -Z, but we want +Z, so negate it
float testLoopSingleAxisWidth = COUNT*2+1;
float totalTestCount = testLoopSingleAxisWidth * testLoopSingleAxisWidth;
float divider = 1.0 / totalTestCount;
float maxSingleAxisOffset = _LightSourceViewSpaceRadius / testLoopSingleAxisWidth;
//Test for n*n grid in view space, where quad pivot is grid's center.
//For each iteration,
//if that test point passed the scene depth occlusion test, we add 1 to visibilityTestPassedCount
#if _USE_DEPTH_LIGHT
for(int x = -COUNT; x <= COUNT; x++)
{
for(int y = -COUNT; y <= COUNT ; y++)
{
float3 testPosVS = quadPivotPosVS;
testPosVS.xy += float2(x,y) * maxSingleAxisOffset;//add 2D test grid offset, in const view space unit
float4 PivotPosCS = mul(UNITY_MATRIX_P,float4(testPosVS,1));
float4 PivotScreenPos = ComputeScreenPos(PivotPosCS);
float2 screenUV = PivotScreenPos.xy/PivotScreenPos.w;
//if screenUV out of bound, treat it as occluded, because no correct depth texture data can be used to compare
if(screenUV.x > 1 || screenUV.x < 0 || screenUV.y > 1 || screenUV.y < 0)
continue; //exit means occluded
//we don't have tex2D() in vertex shader, because rasterization is not done by GPU, so we use tex2Dlod() with mip0 instead
float4 ssd = SAMPLE_DEPTH_TEXTURE_LOD(_CameraDepthTexture, float4(screenUV, 0.0, 0.0));//(uv.x,uv.y,0,mipLevel)
float sampledSceneDepth = ssd.x;
#if !UNITY_REVERSED_Z
sampledSceneDepth = lerp(UNITY_NEAR_CLIP_VALUE, 1, sampledSceneDepth);
#endif
float linearEyeDepthFromSceneDepthTexture = LinearEyeDepth(sampledSceneDepth);
float linearEyeDepthFromSelfALU = PivotPosCS.w; //clip space .w is view space z, = linear eye depth
//do the actual depth comparision test
//+1 means flare test point is visible in screen space
//+0 means flare test point blocked by other objects in screen space, not visible
visibilityTestPassedCount += linearEyeDepthFromSelfALU + _DepthOcclusionTestZBias < linearEyeDepthFromSceneDepthTexture ? 1 : 0;
}
}
float visibilityResult01 = visibilityTestPassedCount * divider;//0~100% visiblility result
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
//if camera too close to flare , smooth fade out to prevent flare blocking camera too much (usually for fps games)
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
visibilityResult01 *= smoothstep(_StartFadeinDistanceWorldUnit,_EndFadeinDistanceWorldUnit,linearEyeDepthOfFlarePivot);
// if(_ShouldDoFlicker)
// {
// float flickerMul = 0;
// //TODO: expose more control to noise? (send me an issue in GitHub, if anyone need this)
// flickerMul += saturate(sin(_Time.y * _FlickerAnimSpeed * 1.0000)) * (1-_FlickResultIntensityLowestPoint) + _FlickResultIntensityLowestPoint;
// flickerMul += saturate(sin(_Time.y * _FlickerAnimSpeed * 0.6437)) * (1-_FlickResultIntensityLowestPoint) + _FlickResultIntensityLowestPoint;
// visibilityResult01 *= saturate(flickerMul/2);
// }
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
//apply all combinations(visibilityResult01) to vertex color
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
o.color.a *= visibilityResult01;
o.vertex = visibilityResult01 < divider ? 0 : o.vertex;
// }
#endif
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
//premultiply alpha to rgb after alpha's calculation is done
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
o.color.rgb *= o.color.a;
//o.color.a = _UsePreMultiplyAlpha? o.color.a : 0;
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
//pure optimization:
//if flare is invisible or nearly invisible,
//invalid this vertex (and all connected vertices).
//This 100% early exit at clipping stage will prevent any rasterization & fragment shader cost at all
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
float3 hsvFC = RGB2HSV(o.color.xyz);
hsvFC.y = 0.0;
float4 e2 = float4(1,1,1,o.color.w) * hsvFC.z;
o.maskX = lerp(1, 0, pow(distance(half2(0.5, 0.5), o.uv), _FadeAmt));
float satMask = lerp(1, 0, pow(distance(half2(0.5, 0.5), o.uv), _ColorSat));
o.color = lerp(o.color, e2, satMask);
#if _ALPHATEST_ON
o.screenPos = ComputeScreenPos(o.vertex);
#endif
// UNITY_TRANSFER_FOG(o,o.vertex);
return o;
}
fixed4 frag (v2f i) : SV_Target
{
#if _ALPHATEST_ON
float2 pos = i.screenPos.xy / i.screenPos.w;
pos *= _ScreenParams.xy;
float DITHER_THRESHOLDS[16] =
{
1.0 / 17.0, 9.0 / 17.0, 3.0 / 17.0, 11.0 / 17.0,
13.0 / 17.0, 5.0 / 17.0, 15.0 / 17.0, 7.0 / 17.0,
4.0 / 17.0, 12.0 / 17.0, 2.0 / 17.0, 10.0 / 17.0,
16.0 / 17.0, 8.0 / 17.0, 14.0 / 17.0, 6.0 / 17.0
};
int index = (uint(pos.x) % 4) * 4 + uint(pos.y) % 4;
float4 col = saturate(tex2D(_MainTex, i.uv ));
// col *= i.maskX;
//clip((col.a) - DITHER_THRESHOLDS[index]);
// apply fog
UNITY_APPLY_FOG(i.fogCoord, col);
clip((((col.r + col.g + col.b)/3) * (_ClippingThreshold * 10)) - DITHER_THRESHOLDS[index]);
return col * i.color;
#else
fixed4 col = saturate(tex2D(_MainTex, i.uv )-_RemoveTextureArtifact) * i.color;
// apply fog
UNITY_APPLY_FOG(i.fogCoord, col);
col *= i.maskX;
return col;
#endif
// fixed4 col = saturate(tex2D(_MainTex, i.uv )-_RemoveTextureArtifact) * i.color;
// UNITY_APPLY_FOG(i.fogCoord, col);
// col *= i.maskX;
// return col;
}
ENDCG
}
}
CustomEditor "VRSLInspector"
}

View File

@ -1,265 +1,265 @@
Shader "VRSL/AudioLink/Standard Static/Projection"
{
Properties
{
//[Header (INSTANCED PROPERITES)]
[Enum(UnityEngine.Rendering.BlendMode)] _BlendSrc ("Source Blend mode", Float) = 2
//[Enum(UnityEngine.Rendering.BlendMode)] _BlendDst ("Destination Blend mode", Float) = 1
[Enum(UnityEngine.Rendering.BlendOp)] _BlendOp ("Blend Operation", Float) = 0
// _BlockLengthX("OSC Block Base Distance X", Float) = 0.019231
// _BlockLengthY("OSC Block Base Distance Y", Float) = 0
[Header(Audio Section)]
[Toggle]_EnableAudioLink("Enable Audio Link", Float) = 0
[Toggle] _EnableColorChord ("Enable Color Chord Tinting", Int) = 0
_Band("Band", Float) = 0
_BandMultiplier("Band Multiplier", Range(1, 15)) = 1
_Delay("Delay", Float) = 0
_NumBands("Num Bands", Float) = 4
_AudioSpectrum("AudioSpectrum", 2D) = "black" {}
//[Header(LIGHTING CONTROLS)]
_GlobalIntensity("Global Intensity", Range(0,1)) = 1
_GlobalIntensityBlend("Global Intensity Blend", Range(0,1)) = 1
_FinalIntensity("Final Intensity", Range(0,1)) = 1
_UniversalIntensity ("Universal Intensity", Range (0,1)) = 1
[HDR]_Emission("Light Color Tint", Color) = (1,1,1,1)
_FixtureMaxIntensity ("Maximum Light Intensity",Range (0,6)) = 1
//[NoScaleOffset] _SceneAlbedo ("Scene Albedo Render Texture", 2D) = "white" {}
_RenderTextureMultiplier("Render Texture Multiplier", Range(1,10)) = 1
[Toggle]_UseTraditionalSampling("Use Traditional Texture Sampling", Int) = 0
//Color Texture Sampling Properties
[Toggle] _EnableColorTextureSample ("Enable Color Texture Sampling", Int) = 0
_SamplingTexture ("Texture To Sample From for Color", 2D) = "white" {}
_TextureColorSampleX ("X coordinate to sample the texture from", Range(0,1)) = 0.5
_TextureColorSampleY ("Y coordinate to sample the texture from", Range(0,1)) = 0.5
//[Header(PROJECTION SETTINGS)]
[NoScaleOffset] _ProjectionMainTex ("Projection Texture GOBO 1", 2D) = "white"{}
_ProjectionMaxIntensity ("Maximum Projection Intensity", Range (0,50)) = 1
_XOffset ("Projection Offset X", Range(-6, 6)) = 0
_YOffset ("Projection Offset Y", Range(-6, 6)) = 0
_ConeWidth("Specular Strength or whatever", Range(0,5)) = 0
_ProjectionRange ("Projection Drawing Range", Range(0,10)) = 0
_ProjectionRangeOrigin ("Projection Drawing Range Scale Origin", Float) = (0, -0.07535, 0.12387, 0)
// [Space(12)]
_ProjectionDistanceFallOff("Attenuation Constant", Range(0,1)) = 1
_ProjectionUVMod ("Projection UV Scale Modifier ", Range(0,1)) = 0
_Fade ("Light Range", Range(0, 25)) = 1
_FeatherOffset ("Attenuation Quadratic", Range(0,1)) = 1
//[Space(12)]
[Toggle] _UseWorldNorm("Use World Normal vs View Normal", Float) = 0
_ModX ("Projection UV X Stretch", Range(-2, 2)) = 1
_ModY ("Projection UV Y Stretch", Range(-2, 2)) = 1
_ProjectionRotation("Projection UV Rotation", Range(-180, 180)) = 0
[Toggle] _EnableSpin("Enable Auto Spinning", Float) = 0
_SpinSpeed ("Auto Spin Speed", Range(0, 10)) = 0
//[Space(8)]
_RedMultiplier ("Red Channel Multiplier", Range(1, 5)) = 1
_GreenMultiplier ("Green Channel Multiplier", Range(1, 5)) = 1
_BlueMultiplier ("Blue Channel Multiplier", Range(1,5)) = 1
// [Header(MAIN)]
// [Enum(Unity Default, 0, Non Linear, 1)]_LightProbeMethod("Light Probe Sampling", Int) = 0
// [Enum(UVs, 0, Triplanar World, 1, Triplanar Object, 2)]_TextureSampleMode("Texture Mode", Int) = 0
// _TriplanarFalloff("Triplanar Blend", Range(0.5,1)) = 1
// _MainTex("Main Texture", 2D) = "white" {}
// _Color("Color", Color) = (1,1,1,1)
// [Space(16)]
// [Header(NORMALS)]
// _BumpMap("Normal Map", 2D) = "bump" {}
// _BumpScale("Normal Scale", Range(-1,1)) = 1
// [Space(16)]
// [Header(METALLIC)]
// _MetallicGlossMap("Metallic Map", 2D) = "white" {}
// _Metallic("Metallic", Range(0,1)) = 0
// _Glossiness("Smoothness", Range(0,1)) = 0
// [Space(16)]
// [Header(LIGHTMAPPING HACKS)]
// _SpecularLMOcclusion("Specular Occlusion", Range(0,1)) = 0
// _SpecLMOcclusionAdjust("Spec Occlusion Sensitiviy", Range(0,1)) = 0.2
// _LMStrength("Lightmap Strength", Range(0,1)) = 1
// _RTLMStrength("Realtime Lightmap Strength", Range(0,1)) = 1
[Toggle] _EnableThemeColorSampling ("Enable Theme Color Sampling", Int) = 0
_ThemeColorTarget ("Choose Theme Color", Int) = 0
[Enum(Transparent,1,AlphaToCoverage,2)] _RenderMode ("Render Mode", Int) = 1
[Enum(Off,0,On,1)] _ZWrite ("Z Write", Int) = 0
[Enum(Off,0,On,1)] _AlphaToCoverage ("Alpha To Coverage", Int) = 0
[Enum(Off,0,One,1)] _BlendDst ("Destination Blend mode", Float) = 1
[Enum(UnityEngine.Rendering.BlendOp)] _BlendOp ("Blend Operation", Float) = 0
_ClippingThreshold ("Clipping Threshold", Range (0,1)) = 0.5
_AlphaProjectionIntensity ("Alpha Projection Intesnity", Range (0,1)) = 0.5
[Enum(13CH,0,5CH,1)] _ChannelMode ("Channel Mode", Int) = 0
[Enum(Off,0,On,1)] _MultiSampleDepth ("Multi Sample Depth", Int) = 1
}
SubShader
{
//UNITY_REQUIRE_ADVANDED_BLEND(all_equations)
Tags{ "Queue" = "Transparent+1" "IgnoreProjector"="True" "RenderType" = "Transparent" }
Pass
{
Tags{ "ForceNoShadowCasting"="True" "IgnoreProjector"="True" "LightMode" = "Always"}
Cull Front
Ztest GEqual
ZWrite Off
Blend [_BlendSrc] [_BlendDst]
BlendOp [_BlendOp]
Lighting Off
//SeparateSpecular Off
Stencil
{
Ref 142
Comp NotEqual
}
CGPROGRAM
#pragma vertex vert
#pragma fragment frag
#pragma multi_compile_fog
#pragma multi_compile_instancing
#pragma multi_compile_local _ _ALPHATEST_ON
#pragma shader_feature_local _MULTISAMPLEDEPTH
#define PROJECTION_YES
#define VRSL_AUDIOLINK
#include "UnityCG.cginc"
struct appdata
{
float4 vertex : POSITION;
float2 uv : TEXCOORD0;
float3 texcoord : TEXCOORD1;
float4 color : COLOR;
float3 normal : TEXCOORD3;
float3 tangent : TANGENT;
float4 projectionorigin : TEXCOORD2;
UNITY_VERTEX_INPUT_INSTANCE_ID
};
struct v2f
{
float4 pos : SV_POSITION;
float2 uv : TEXCOORD0;
float3 ray : TEXCOORD2;
float4 screenPos : TEXCOORD4;
float4 color : COLOR;
float3 normal : TEXCOORD3;
float4 projectionorigin : TEXCOORD5;
float4 worldDirection : TEXCOORD6;
float4 worldPos : TEXCOORD7;
float4 emissionColor : TEXCOORD8;
float3 audioGlobalFinalIntensity: TEXCOORD1;
UNITY_VERTEX_INPUT_INSTANCE_ID
UNITY_VERTEX_OUTPUT_STEREO
};
#include "Packages/com.acchosen.vr-stage-lighting/Runtime/Shaders/Shared/VRSL-Defines.cginc"
#include "../Shared/VRSL-AudioLink-Functions.cginc"
#include "Packages/com.acchosen.vr-stage-lighting/Runtime/Shaders/StaticLights/VRSL-StaticLight-ProjectionFrag.cginc"
#define IF(a, b, c) lerp(b, c, step((fixed) (a), 0));
float4 CalculateProjectionScaleRange(appdata v, float4 input, float scalar)
{
float4 oldinput = input;
float4x4 scaleMatrix = float4x4(scalar, 0, 0, 0,
0, scalar, 0, 0,
0, 0, scalar, 0,
0, 0, 0, 1.0);
float4 newOrigin = input.w * _ProjectionRangeOrigin;
input.xyz = input.xyz - newOrigin;
//Do stretch
float4 newProjectionScale = mul(scaleMatrix, input);
input.xyz = newProjectionScale;
input.xyz = input.xyz + newOrigin;
input.xyz = IF(v.color.g != 0, input.xyz, oldinput);
return input;
}
inline float4 CalculateFrustumCorrection()
{
float x1 = -UNITY_MATRIX_P._31/(UNITY_MATRIX_P._11*UNITY_MATRIX_P._34);
float x2 = -UNITY_MATRIX_P._32/(UNITY_MATRIX_P._22*UNITY_MATRIX_P._34);
return float4(x1, x2, 0, UNITY_MATRIX_P._33/UNITY_MATRIX_P._34 + x1*UNITY_MATRIX_P._13 + x2*UNITY_MATRIX_P._23);
}
//VERTEX SHADER
v2f vert (appdata v)
{
v2f o;
UNITY_INITIALIZE_OUTPUT(v2f, o);
UNITY_SETUP_INSTANCE_ID(v);
UNITY_INITIALIZE_VERTEX_OUTPUT_STEREO(o);
//
UNITY_TRANSFER_INSTANCE_ID(v, o);
o.audioGlobalFinalIntensity.x = GetAudioReactAmplitude();
o.audioGlobalFinalIntensity.y = getGlobalIntensity();
o.audioGlobalFinalIntensity.z = getFinalIntensity();
o.emissionColor = getEmissionColor();
v.vertex = CalculateProjectionScaleRange(v, v.vertex, _ProjectionRange);
o.projectionorigin = CalculateProjectionScaleRange(v, _ProjectionRangeOrigin, _ProjectionRange);
//move verts to clip space
o.pos = UnityObjectToClipPos(v.vertex);
//get screen space position of verts
o.screenPos = ComputeScreenPos(o.pos);
//Putting in the vertex position before the transformation seems to somewhat move the projection correctly, but is still incorrect...?
o.ray = UnityObjectToViewPos(v.vertex).xyz;
//invert z axis so that it projects from camera properly
o.ray *= float3(1,1,-1);
//saving vertex color incase needing to perform rotation calculation in fragment shader
o.color = v.color;
o.worldPos = mul(unity_ObjectToWorld, v.vertex);
//For Mirror Depth Correction
o.worldDirection.xyz = o.worldPos.xyz - _WorldSpaceCameraPos;
// pack correction factor into direction w component to save space
o.worldDirection.w = dot(o.pos, CalculateFrustumCorrection());
if(o.audioGlobalFinalIntensity.x <= 0.005 || o.audioGlobalFinalIntensity.y <= 0.005 || o.audioGlobalFinalIntensity.z <= 0.005 || all(o.emissionColor.xyz <= float4(0.005, 0.005, 0.005, 1.0)))
{
v.vertex = float4(0,0,0,0);
o.pos = UnityObjectToClipPos(v.vertex);
}
return o;
}
fixed4 frag (v2f i) : SV_Target
{
//UNITY_SETUP_INSTANCE_ID(i);
return ProjectionFrag(i);
}
ENDCG
}
}
CustomEditor "VRSLInspector"
}
Shader "VRSL/AudioLink/Standard Static/Projection"
{
Properties
{
//[Header (INSTANCED PROPERITES)]
[Enum(UnityEngine.Rendering.BlendMode)] _BlendSrc ("Source Blend mode", Float) = 2
//[Enum(UnityEngine.Rendering.BlendMode)] _BlendDst ("Destination Blend mode", Float) = 1
[Enum(UnityEngine.Rendering.BlendOp)] _BlendOp ("Blend Operation", Float) = 0
// _BlockLengthX("OSC Block Base Distance X", Float) = 0.019231
// _BlockLengthY("OSC Block Base Distance Y", Float) = 0
[Header(Audio Section)]
[Toggle]_EnableAudioLink("Enable Audio Link", Float) = 0
[Toggle] _EnableColorChord ("Enable Color Chord Tinting", Int) = 0
_Band("Band", Float) = 0
_BandMultiplier("Band Multiplier", Range(1, 15)) = 1
_Delay("Delay", Float) = 0
_NumBands("Num Bands", Float) = 4
_AudioSpectrum("AudioSpectrum", 2D) = "black" {}
//[Header(LIGHTING CONTROLS)]
_GlobalIntensity("Global Intensity", Range(0,1)) = 1
_GlobalIntensityBlend("Global Intensity Blend", Range(0,1)) = 1
_FinalIntensity("Final Intensity", Range(0,1)) = 1
_UniversalIntensity ("Universal Intensity", Range (0,1)) = 1
[HDR]_Emission("Light Color Tint", Color) = (1,1,1,1)
_FixtureMaxIntensity ("Maximum Light Intensity",Range (0,6)) = 1
//[NoScaleOffset] _SceneAlbedo ("Scene Albedo Render Texture", 2D) = "white" {}
_RenderTextureMultiplier("Render Texture Multiplier", Range(1,10)) = 1
[Toggle]_UseTraditionalSampling("Use Traditional Texture Sampling", Int) = 0
//Color Texture Sampling Properties
[Toggle] _EnableColorTextureSample ("Enable Color Texture Sampling", Int) = 0
_SamplingTexture ("Texture To Sample From for Color", 2D) = "white" {}
_TextureColorSampleX ("X coordinate to sample the texture from", Range(0,1)) = 0.5
_TextureColorSampleY ("Y coordinate to sample the texture from", Range(0,1)) = 0.5
//[Header(PROJECTION SETTINGS)]
[NoScaleOffset] _ProjectionMainTex ("Projection Texture GOBO 1", 2D) = "white"{}
_ProjectionMaxIntensity ("Maximum Projection Intensity", Range (0,50)) = 1
_XOffset ("Projection Offset X", Range(-6, 6)) = 0
_YOffset ("Projection Offset Y", Range(-6, 6)) = 0
_ConeWidth("Specular Strength or whatever", Range(0,5)) = 0
_ProjectionRange ("Projection Drawing Range", Range(0,10)) = 0
_ProjectionRangeOrigin ("Projection Drawing Range Scale Origin", Float) = (0, -0.07535, 0.12387, 0)
// [Space(12)]
_ProjectionDistanceFallOff("Attenuation Constant", Range(0,1)) = 1
_ProjectionUVMod ("Projection UV Scale Modifier ", Range(0,1)) = 0
_Fade ("Light Range", Range(0, 25)) = 1
_FeatherOffset ("Attenuation Quadratic", Range(0,1)) = 1
//[Space(12)]
[Toggle] _UseWorldNorm("Use World Normal vs View Normal", Float) = 0
_ModX ("Projection UV X Stretch", Range(-2, 2)) = 1
_ModY ("Projection UV Y Stretch", Range(-2, 2)) = 1
_ProjectionRotation("Projection UV Rotation", Range(-180, 180)) = 0
[Toggle] _EnableSpin("Enable Auto Spinning", Float) = 0
_SpinSpeed ("Auto Spin Speed", Range(0, 10)) = 0
//[Space(8)]
_RedMultiplier ("Red Channel Multiplier", Range(1, 5)) = 1
_GreenMultiplier ("Green Channel Multiplier", Range(1, 5)) = 1
_BlueMultiplier ("Blue Channel Multiplier", Range(1,5)) = 1
// [Header(MAIN)]
// [Enum(Unity Default, 0, Non Linear, 1)]_LightProbeMethod("Light Probe Sampling", Int) = 0
// [Enum(UVs, 0, Triplanar World, 1, Triplanar Object, 2)]_TextureSampleMode("Texture Mode", Int) = 0
// _TriplanarFalloff("Triplanar Blend", Range(0.5,1)) = 1
// _MainTex("Main Texture", 2D) = "white" {}
// _Color("Color", Color) = (1,1,1,1)
// [Space(16)]
// [Header(NORMALS)]
// _BumpMap("Normal Map", 2D) = "bump" {}
// _BumpScale("Normal Scale", Range(-1,1)) = 1
// [Space(16)]
// [Header(METALLIC)]
// _MetallicGlossMap("Metallic Map", 2D) = "white" {}
// _Metallic("Metallic", Range(0,1)) = 0
// _Glossiness("Smoothness", Range(0,1)) = 0
// [Space(16)]
// [Header(LIGHTMAPPING HACKS)]
// _SpecularLMOcclusion("Specular Occlusion", Range(0,1)) = 0
// _SpecLMOcclusionAdjust("Spec Occlusion Sensitiviy", Range(0,1)) = 0.2
// _LMStrength("Lightmap Strength", Range(0,1)) = 1
// _RTLMStrength("Realtime Lightmap Strength", Range(0,1)) = 1
[Toggle] _EnableThemeColorSampling ("Enable Theme Color Sampling", Int) = 0
_ThemeColorTarget ("Choose Theme Color", Int) = 0
[Enum(Transparent,1,AlphaToCoverage,2)] _RenderMode ("Render Mode", Int) = 1
[Enum(Off,0,On,1)] _ZWrite ("Z Write", Int) = 0
[Enum(Off,0,On,1)] _AlphaToCoverage ("Alpha To Coverage", Int) = 0
[Enum(Off,0,One,1)] _BlendDst ("Destination Blend mode", Float) = 1
[Enum(UnityEngine.Rendering.BlendOp)] _BlendOp ("Blend Operation", Float) = 0
_ClippingThreshold ("Clipping Threshold", Range (0,1)) = 0.5
_AlphaProjectionIntensity ("Alpha Projection Intesnity", Range (0,1)) = 0.5
[Enum(13CH,0,5CH,1)] _ChannelMode ("Channel Mode", Int) = 0
[Enum(Off,0,On,1)] _MultiSampleDepth ("Multi Sample Depth", Int) = 1
}
SubShader
{
//UNITY_REQUIRE_ADVANDED_BLEND(all_equations)
Tags{ "Queue" = "Transparent+1" "IgnoreProjector"="True" "RenderType" = "Transparent" }
Pass
{
Tags{ "ForceNoShadowCasting"="True" "IgnoreProjector"="True" "LightMode" = "Always"}
Cull Front
Ztest GEqual
ZWrite Off
Blend [_BlendSrc] [_BlendDst]
BlendOp [_BlendOp]
Lighting Off
//SeparateSpecular Off
Stencil
{
Ref 142
Comp NotEqual
}
CGPROGRAM
#pragma vertex vert
#pragma fragment frag
#pragma multi_compile_fog
#pragma multi_compile_instancing
#pragma multi_compile_local _ _ALPHATEST_ON
#pragma shader_feature_local _MULTISAMPLEDEPTH
#define PROJECTION_YES
#define VRSL_AUDIOLINK
#include "UnityCG.cginc"
struct appdata
{
float4 vertex : POSITION;
float2 uv : TEXCOORD0;
float3 texcoord : TEXCOORD1;
float4 color : COLOR;
float3 normal : TEXCOORD3;
float3 tangent : TANGENT;
float4 projectionorigin : TEXCOORD2;
UNITY_VERTEX_INPUT_INSTANCE_ID
};
struct v2f
{
float4 pos : SV_POSITION;
float2 uv : TEXCOORD0;
float3 ray : TEXCOORD2;
float4 screenPos : TEXCOORD4;
float4 color : COLOR;
float3 normal : TEXCOORD3;
float4 projectionorigin : TEXCOORD5;
float4 worldDirection : TEXCOORD6;
float4 worldPos : TEXCOORD7;
float4 emissionColor : TEXCOORD8;
float3 audioGlobalFinalIntensity: TEXCOORD1;
UNITY_VERTEX_INPUT_INSTANCE_ID
UNITY_VERTEX_OUTPUT_STEREO
};
#include "Packages/com.acchosen.vr-stage-lighting/Runtime/Shaders/Shared/VRSL-Defines.cginc"
#include "../Shared/VRSL-AudioLink-Functions.cginc"
#include "Packages/com.acchosen.vr-stage-lighting/Runtime/Shaders/StaticLights/VRSL-StaticLight-ProjectionFrag.cginc"
#define IF(a, b, c) lerp(b, c, step((fixed) (a), 0));
float4 CalculateProjectionScaleRange(appdata v, float4 input, float scalar)
{
float4 oldinput = input;
float4x4 scaleMatrix = float4x4(scalar, 0, 0, 0,
0, scalar, 0, 0,
0, 0, scalar, 0,
0, 0, 0, 1.0);
float4 newOrigin = input.w * _ProjectionRangeOrigin;
input.xyz = input.xyz - newOrigin;
//Do stretch
float4 newProjectionScale = mul(scaleMatrix, input);
input.xyz = newProjectionScale;
input.xyz = input.xyz + newOrigin;
input.xyz = IF(v.color.g != 0, input.xyz, oldinput);
return input;
}
inline float4 CalculateFrustumCorrection()
{
float x1 = -UNITY_MATRIX_P._31/(UNITY_MATRIX_P._11*UNITY_MATRIX_P._34);
float x2 = -UNITY_MATRIX_P._32/(UNITY_MATRIX_P._22*UNITY_MATRIX_P._34);
return float4(x1, x2, 0, UNITY_MATRIX_P._33/UNITY_MATRIX_P._34 + x1*UNITY_MATRIX_P._13 + x2*UNITY_MATRIX_P._23);
}
//VERTEX SHADER
v2f vert (appdata v)
{
v2f o;
UNITY_INITIALIZE_OUTPUT(v2f, o);
UNITY_SETUP_INSTANCE_ID(v);
UNITY_INITIALIZE_VERTEX_OUTPUT_STEREO(o);
//
UNITY_TRANSFER_INSTANCE_ID(v, o);
o.audioGlobalFinalIntensity.x = GetAudioReactAmplitude();
o.audioGlobalFinalIntensity.y = getGlobalIntensity();
o.audioGlobalFinalIntensity.z = getFinalIntensity();
o.emissionColor = getEmissionColor();
v.vertex = CalculateProjectionScaleRange(v, v.vertex, _ProjectionRange);
o.projectionorigin = CalculateProjectionScaleRange(v, _ProjectionRangeOrigin, _ProjectionRange);
//move verts to clip space
o.pos = UnityObjectToClipPos(v.vertex);
//get screen space position of verts
o.screenPos = ComputeScreenPos(o.pos);
//Putting in the vertex position before the transformation seems to somewhat move the projection correctly, but is still incorrect...?
o.ray = UnityObjectToViewPos(v.vertex).xyz;
//invert z axis so that it projects from camera properly
o.ray *= float3(1,1,-1);
//saving vertex color incase needing to perform rotation calculation in fragment shader
o.color = v.color;
o.worldPos = mul(unity_ObjectToWorld, v.vertex);
//For Mirror Depth Correction
o.worldDirection.xyz = o.worldPos.xyz - _WorldSpaceCameraPos;
// pack correction factor into direction w component to save space
o.worldDirection.w = dot(o.pos, CalculateFrustumCorrection());
if(o.audioGlobalFinalIntensity.x <= 0.005 || o.audioGlobalFinalIntensity.y <= 0.005 || o.audioGlobalFinalIntensity.z <= 0.005 || all(o.emissionColor.xyz <= float4(0.005, 0.005, 0.005, 1.0)))
{
v.vertex = float4(0,0,0,0);
o.pos = UnityObjectToClipPos(v.vertex);
}
return o;
}
fixed4 frag (v2f i) : SV_Target
{
//UNITY_SETUP_INSTANCE_ID(i);
return ProjectionFrag(i);
}
ENDCG
}
}
CustomEditor "VRSLInspector"
}

View File

@ -1,399 +1,399 @@
//CREDIT TO DJ LUKIS FOR MIRROR DEPTH CORRECTION
inline float CorrectedLinearEyeDepth(float z, float B)
{
return 1.0 / (z/UNITY_MATRIX_P._34 + B);
}
#define IF(a, b, c) lerp(b, c, step((fixed) (a), 0));
float3 getWpos(float depth, float3 ray)
{
float4 vpos = float4(ray * depth, 1);
// scale ray by linearized depth, which gives us the position of the ray
// intersection with the depth buffer in view space. This is the point of intersection in view space.
float3 wpos = (mul(unity_CameraToWorld, vpos).xyz);
//convert view space coordinate to world space coordinate.
//Wpos is now coordinates for intersection.
return wpos;
}
float3 getProjPos(float3 wpos)
{
return (mul(unity_WorldToObject,float4(wpos.x, wpos.y, wpos.z, 1)));
}
//Huge, huge thanks and shoutout to Uncomfy on the VRC Shader Discord for helping me figure this out <3
half4 InvertRotations (half4 input, half panValue, half tiltValue)
{
half sX, cX, sY, cY;
#ifdef VRSL_DMX
half angleY = radians(getOffsetY() + (panValue));
#endif
#ifdef VRSL_AUDIOLINK
half angleY = radians(0);
#endif
sY = sin(angleY);
cY = cos(angleY);
half4x4 rotateYMatrix = half4x4(cY, sY, 0, 0,
-sY, cY, 0, 0,
0, 0, 1, 0,
0, 0, 0, 1);
half4 BaseAndFixturePos = input;
//INVERSION CHECK
rotateYMatrix = IF(checkPanInvertY() == 1, transpose(rotateYMatrix), rotateYMatrix);
//half4 localRotY = mul(rotateYMatrix, BaseAndFixturePos);
//LOCALROTY IS NEW ROTATION
half tiltOffset = 90.0;
tiltOffset = IF(checkTiltInvertZ() == 1, -tiltOffset, tiltOffset);
//set new origin to do transform
half4 newOrigin = input.w * _FixtureRotationOrigin;
input.xyz -= newOrigin;
#ifdef VRSL_DMX
half angleX = radians(getOffsetX() + (tiltValue + tiltOffset));
#endif
#ifdef VRSL_AUDIOLINK
half angleX = radians(0 + (tiltOffset));
#endif
sX = sin((angleX));
cX = cos((angleX));
half4x4 rotateXMatrix = half4x4(1, 0, 0, 0,
0, cX, sX, 0,
0, -sX, cX, 0,
0, 0, 0, 1);
//half4 fixtureVertexPos = input;
//INVERSION CHECK
rotateXMatrix = IF(checkTiltInvertZ() == 1, transpose(rotateXMatrix), rotateXMatrix);
//half4 localRotX = mul(rotateXMatrix, fixtureVertexPos);
half4x4 rotateXYMatrix = mul(rotateXMatrix, rotateYMatrix);
half4 localRotXY = mul(rotateXYMatrix, input);
input.xyz = localRotXY;
input.xyz += newOrigin;
return input;
}
half2 RotateUV(half2 input, half angle)
{
half2 newOrigin = half2(0.5, 0.5);
input -= newOrigin;
half sinX = sin (radians(angle));
half cosX = cos (radians(angle));
half sinY = sin (radians(angle));
half2x2 rotationMatrix = half2x2( cosX, -sinX, sinY, cosX);
input = mul(input, rotationMatrix);
input += newOrigin;
return input;
}
half4 ChooseProjection(half2 uv, half projChooser)
{
half2 addition = half2(0.0, 0.0);
uv*= half2(0.25, 0.5);
#ifdef WASH
projChooser = 1.0;
#endif
addition = IF(projChooser == 1.0, half2(0.0, 0.5) , addition);
#if !defined(WASH)
addition = IF(projChooser == 2.0, half2(0.25, 0.5), addition);
addition = IF(projChooser == 3.0, half2(0.5, 0.5), addition);
addition = IF(projChooser == 4.0, half2(0.75, 0.5), addition);
addition = IF(projChooser == 5.0, half2(0.0, 0.0) , addition);
addition = IF(projChooser == 6.0, half2(0.25, 0.0), addition);
addition = IF(projChooser == 7.0, half2(0.5, 0.0), addition);
addition = IF(projChooser == 8.0, half2(0.75, 0.0), addition);
#endif
uv.x += addition.x;
uv.y += addition.y;
return tex2D(_ProjectionMainTex, uv);
}
half ChooseProjectionScalar(half coneWidth, half projChooser)
{
//half chooser = IF(isDMX() == 1, selection, instancedGOBOSelection());
half result = _ProjectionUVMod;
result = IF((projChooser) == 1.0, (_ProjectionUVMod * _MinimumBeamRadius), result);
#if !defined(WASH)
result = IF((projChooser) == 2.0, _ProjectionUVMod2 * _MinimumBeamRadius, result);
result = IF((projChooser) == 3.0, _ProjectionUVMod3 * _MinimumBeamRadius, result);
result = IF((projChooser) == 4.0, _ProjectionUVMod4 * _MinimumBeamRadius, result);
result = IF((projChooser) == 5.0, _ProjectionUVMod5 * _MinimumBeamRadius, result);
result = IF((projChooser) == 6.0, _ProjectionUVMod6 * _MinimumBeamRadius, result);
result = IF((projChooser) == 7.0, _ProjectionUVMod7 * _MinimumBeamRadius, result);
result = IF((projChooser) == 8.0, _ProjectionUVMod8 * _MinimumBeamRadius, result);
#endif
// half a = 1.8;
// #ifdef WASH
// a = 3.0;
// #endif
// return result * (clamp(coneWidth, -2.0, 4) + a);
half conewidthControl = coneWidth/4.25;
#ifndef WASH
return result * lerp(0.325, 1, (conewidthControl));
#else
return result * lerp(0.4, 1, (conewidthControl));
#endif
}
fixed4 ProjectionFrag(v2f i) : SV_Target
{
UNITY_SETUP_STEREO_EYE_INDEX_POST_VERTEX(i);
UNITY_SETUP_INSTANCE_ID(i);
if(i.color.g > 0.5)
{
half4 emissionTint = i.emissionColor;
#ifdef VRSL_DMX
half gi = getGlobalIntensity();
half fi = getFinalIntensity();
half coneWidth = i.intensityStrobeWidth.z;
if(((all(i.rgbColor <= half4(0.01,0.01,0.01,1)) || i.intensityStrobeWidth.x <= 0.01) && isDMX() == 1) || gi <= 0.005 || fi <= 0.005 || all(emissionTint <= half4(0.005, 0.005, 0.005, 1)))
{
return half4(0,0,0,0);
}
#endif
#ifdef VRSL_AUDIOLINK
half audioReaction = i.audioGlobalFinalConeIntensity.x;
half gi = i.audioGlobalFinalConeIntensity.y;
half fi = i.audioGlobalFinalConeIntensity.z;
half coneWidth = i.audioGlobalFinalConeIntensity.w;
// if((all(i.rgbColor <= half4(0.01,0.01,0.01,1)) || i.intensityStrobeWidth.x <= 0.01) && isOSC() == 1)
// {
// return (0,0,0,0);
// }
if(audioReaction <= 0.005 || gi <= 0.005 || fi <= 0.005 || all(emissionTint<= half4(0.005, 0.005, 0.005, 1.0)))
{
return half4(0,0,0,0);
}
#endif
#if _ALPHATEST_ON
float2 pos = i.screenPos.xy / i.screenPos.w;
pos *= _ScreenParams.xy;
half DITHER_THRESHOLDS[16] =
{
1.0 / 17.0, 9.0 / 17.0, 3.0 / 17.0, 11.0 / 17.0,
13.0 / 17.0, 5.0 / 17.0, 15.0 / 17.0, 7.0 / 17.0,
4.0 / 17.0, 12.0 / 17.0, 2.0 / 17.0, 10.0 / 17.0,
16.0 / 17.0, 8.0 / 17.0, 14.0 / 17.0, 6.0 / 17.0
};
int index = (int)((uint(pos.x) % 4) * 4 + uint(pos.y) % 4);
#endif
#ifdef VRSL_DMX
half panValue = i.goboPlusSpinPanTilt.z;
half tiltValue = i.goboPlusSpinPanTilt.w;
uint selection = round(i.goboPlusSpinPanTilt.x);
#endif
//Calculating projection
i.ray = i.ray * (_ProjectionParams.z / i.ray.z);
float2 screenposUV = i.screenPos.xy / i.screenPos.w;
//CREDIT TO DJ LUKIS FOR MIRROR DEPTH CORRECTION
float perspectiveDivide = 1.0f / i.pos.w;
float4 depthdirect = i.worldDirection * perspectiveDivide;
//float2 altScreenPos = i.screenPos.xy * perspectiveDivide;
//
// #if _MULTISAMPLEDEPTH
float2 texelSize = _CameraDepthTexture_TexelSize.xy;
float d1 = SAMPLE_DEPTH_TEXTURE(_CameraDepthTexture, screenposUV + float2(1.0, 0.0) * texelSize);
float d2 = SAMPLE_DEPTH_TEXTURE(_CameraDepthTexture, screenposUV + float2(-1.0, 0.0) * texelSize);
float d3 = SAMPLE_DEPTH_TEXTURE(_CameraDepthTexture, screenposUV + float2(0.0, 1.0) * texelSize);
float d4 = SAMPLE_DEPTH_TEXTURE(_CameraDepthTexture, screenposUV + float2(0.0, -1.0) * texelSize);
float sceneZ = min(d1, min(d2, min(d3, d4)));
// #else
// float sceneZ = SAMPLE_DEPTH_TEXTURE(_CameraDepthTexture, screenposUV);
// #endif
#if UNITY_REVERSED_Z
if (sceneZ == 0)
#else
sceneZ = lerp(UNITY_NEAR_CLIP_VALUE, 1, sceneZ);
if (sceneZ == 1)
#endif
return float4(0,0,0,1);
float depth = CorrectedLinearEyeDepth(sceneZ, depthdirect.w);
//Convert from Corrected Linear Eye Depth to Linear01Depth
//Credit: https://www.cyanilux.com/tutorials/depth/#eye-depth
depth = (1.0 - (depth * _ZBufferParams.w)) / (depth * _ZBufferParams.z);
depth = Linear01Depth(depth);
//lienarize the depth
float3 objectOrigin = mul(unity_ObjectToWorld, half4(0.0,0.0,0.0,1.0) ).xyz;
//get object origin in world space.
float3 projectionOriginWorld = mul(unity_ObjectToWorld,_ProjectionRangeOrigin);
float3 wpos = getWpos(depth, i.ray);
//convert view space coordinate to world space coordinate.
//Wpos is now coordinates for intersection.
//get the projection in object space
float3 oPos = mul(unity_WorldToObject, float4(wpos,1)).xyz;
if((distance(oPos, _FixtureRotationOrigin) < _ProjectionCutoff) || distance(oPos, float4(0,0,0,0)) < _ProjectionOriginCutoff)
{
//check the distance of rotation origin to the set cutoff value.
//if distance is less that the set value, discard the pixel.
//this is used to prevent the projection from bleeding on to the source fixture mesh.
discard;
}
float distanceFromOrigin = abs(distance(objectOrigin , wpos));
#ifdef VRSL_DMX
float projChooser = IF(isDMX() == 1, selection, instancedGOBOSelection());
#endif
#ifdef VRSL_AUDIOLINK
float projChooser = round(instancedGOBOSelection());
#endif
//Get distance of intersection from the origin in world space
#ifdef VRSL_DMX
float UVscale = 1/(0 + (distanceFromOrigin * (ChooseProjectionScalar(coneWidth, projChooser)) + (0 * (distanceFromOrigin * distanceFromOrigin))));
distanceFromOrigin = lerp(distanceFromOrigin*0.6 +0.65,distanceFromOrigin, saturate(coneWidth));
#endif
#ifdef VRSL_AUDIOLINK
distanceFromOrigin = lerp(distanceFromOrigin*0.6 +0.65,distanceFromOrigin, saturate(coneWidth));
float UVscale = 1/(0 + ((distanceFromOrigin) * ChooseProjectionScalar(coneWidth, projChooser)));
#endif
// inverse that distance so that it gets smaller as it gets closer,
// multiply it by modifier parameter incase things get wonky.
float3 projPos = getProjPos(wpos);
//position of the intersection fragment in the cone's object space
#ifdef VRSL_DMX
projPos = InvertRotations(float4(projPos,1.0), panValue, tiltValue);
#endif
#ifdef VRSL_AUDIOLINK
projPos = InvertRotations(float4(projPos,1.0), 0, 0);
#endif
float2 uvCoords = (((float2((projPos.x), projPos.y) * UVscale)));
uvCoords.x += 0.5;
uvCoords.y += 0.5;
//Get coordinate plane
half2 uvOrigin = half2(0,0);
uvOrigin = (half2(0,0) * UVscale) * (clamp(coneWidth+0.5, -1.0, 4) + 1.6) + 0.5;
#ifdef VRSL_DMX
_SpinSpeed = IF(checkPanInvertY() == 1, -_SpinSpeed, _SpinSpeed);
_SpinSpeed = IF(isDMX() == 1, _SpinSpeed, _SpinSpeed);
uvCoords = IF(isGOBOSpin() == 1 && projChooser > 1.0, RotateUV(uvCoords, degrees(i.goboPlusSpinPanTilt.y)), RotateUV(uvCoords, _ProjectionRotation));
#endif
#ifdef VRSL_AUDIOLINK
half goboSpinSpeed = IF(checkPanInvertY() == 1, -getGoboSpinSpeed(), getGoboSpinSpeed());
uvCoords = IF(isGOBOSpin() == 1 && projChooser > 1.0, RotateUV(uvCoords, _Time.w * ( 10* goboSpinSpeed)), RotateUV(uvCoords, _ProjectionRotation));
#endif
// uvCoords = IF(isGOBOSpin() == 1 && projChooser > 1.0, RotateUV(uvCoords, _Time.w * ( 10* _SpinSpeed)), RotateUV(uvCoords, _ProjectionRotation));
clip(uvCoords);
//Discard any pixels that are outside of the traditional 0-1 UV bounds.
float4 tex = ChooseProjection(uvCoords, projChooser);
float distFromUVOrigin = (abs(distance(uvCoords, uvOrigin)));
// Create create xy coordinate plane based on object space, make sure it scales based on the
// distance from the intersection
//Discard any pixels that are outside of the traditional 0-1 UV bounds in the negative range.
clip(1.0 - uvCoords);
half4 col = tex;
clip(projPos.z);
float projectionDistance = abs(distance(i.projectionorigin.xyz, projPos.xyz));
//Projection Fade
#ifdef _ALPHATEST_ON
col = lerp(col, half4(0,0,0,0), clamp(pow(distFromUVOrigin * (_ProjectionFade-1.0),_ProjectionFadeCurve),0.0,1.0));
#else
col = lerp(col, half4(0,0,0,0), clamp(pow(distFromUVOrigin * _ProjectionFade,_ProjectionFadeCurve),0.0,1.0));
#endif
#ifdef VRSL_DMX
half strobe = IF(isStrobe() == 1, i.intensityStrobeWidth.y, 1);
col = IF(isDMX() == 1 & _EnableStaticEmissionColor == 0, col * i.rgbColor, col);
#endif
#ifdef VRSL_AUDIOLINK
half strobe = 1.0;
#endif
//col = IF(_EnableStaticEmissionColor == 1, col * half4(_StaticEmission.r * _RedMultiplier,_StaticEmission.g * _GreenMultiplier,_StaticEmission.b * _BlueMultiplier,_StaticEmission.a), col);
// project plane on to the world normals in object space in the z direction of the object origin.
half projectionIntesnity = _ProjectionIntensity;
#ifdef _ALPHATEST_ON
projectionIntesnity +=4.0;
#endif
col = ((col * emissionTint * UVscale * projectionIntesnity)) * strobe;
col = col * (1/(_ProjectionDistanceFallOff * (distanceFromOrigin * distanceFromOrigin)));
#ifdef VRSL_AUDIOLINK
col = col * audioReaction;
#endif
col = lerp(half4(0,0,0,col.w), col, gi);
col = lerp(half4(0,0,0,col.w), col, fi);
//half saturation = saturate(RGB2HSV(col)).y;
//col = IF(_EnableStaticEmissionColor == 1, lerp(half4(0,0,0,0), col, saturation), col);
col = IF( _EnableStaticEmissionColor == 1, half4(col.r * _RedMultiplier, col.g * _GreenMultiplier, col.b * _BlueMultiplier, col.a), col);
col *= _UniversalIntensity;
#ifdef _ALPHATEST_ON
clip(col.a - DITHER_THRESHOLDS[index]);
clip((((col.r + col.g + col.b)/3) * (_ClippingThreshold)) - DITHER_THRESHOLDS[index]);
return col;
#else
return col;
#endif
}
else
{
return half4(0,0,0,0);
}
//CREDIT TO DJ LUKIS FOR MIRROR DEPTH CORRECTION
inline float CorrectedLinearEyeDepth(float z, float B)
{
return 1.0 / (z/UNITY_MATRIX_P._34 + B);
}
#define IF(a, b, c) lerp(b, c, step((fixed) (a), 0));
float3 getWpos(float depth, float3 ray)
{
float4 vpos = float4(ray * depth, 1);
// scale ray by linearized depth, which gives us the position of the ray
// intersection with the depth buffer in view space. This is the point of intersection in view space.
float3 wpos = (mul(unity_CameraToWorld, vpos).xyz);
//convert view space coordinate to world space coordinate.
//Wpos is now coordinates for intersection.
return wpos;
}
float3 getProjPos(float3 wpos)
{
return (mul(unity_WorldToObject,float4(wpos.x, wpos.y, wpos.z, 1)));
}
//Huge, huge thanks and shoutout to Uncomfy on the VRC Shader Discord for helping me figure this out <3
half4 InvertRotations (half4 input, half panValue, half tiltValue)
{
half sX, cX, sY, cY;
#ifdef VRSL_DMX
half angleY = radians(getOffsetY() + (panValue));
#endif
#ifdef VRSL_AUDIOLINK
half angleY = radians(0);
#endif
sY = sin(angleY);
cY = cos(angleY);
half4x4 rotateYMatrix = half4x4(cY, sY, 0, 0,
-sY, cY, 0, 0,
0, 0, 1, 0,
0, 0, 0, 1);
half4 BaseAndFixturePos = input;
//INVERSION CHECK
rotateYMatrix = IF(checkPanInvertY() == 1, transpose(rotateYMatrix), rotateYMatrix);
//half4 localRotY = mul(rotateYMatrix, BaseAndFixturePos);
//LOCALROTY IS NEW ROTATION
half tiltOffset = 90.0;
tiltOffset = IF(checkTiltInvertZ() == 1, -tiltOffset, tiltOffset);
//set new origin to do transform
half4 newOrigin = input.w * _FixtureRotationOrigin;
input.xyz -= newOrigin;
#ifdef VRSL_DMX
half angleX = radians(getOffsetX() + (tiltValue + tiltOffset));
#endif
#ifdef VRSL_AUDIOLINK
half angleX = radians(0 + (tiltOffset));
#endif
sX = sin((angleX));
cX = cos((angleX));
half4x4 rotateXMatrix = half4x4(1, 0, 0, 0,
0, cX, sX, 0,
0, -sX, cX, 0,
0, 0, 0, 1);
//half4 fixtureVertexPos = input;
//INVERSION CHECK
rotateXMatrix = IF(checkTiltInvertZ() == 1, transpose(rotateXMatrix), rotateXMatrix);
//half4 localRotX = mul(rotateXMatrix, fixtureVertexPos);
half4x4 rotateXYMatrix = mul(rotateXMatrix, rotateYMatrix);
half4 localRotXY = mul(rotateXYMatrix, input);
input.xyz = localRotXY;
input.xyz += newOrigin;
return input;
}
half2 RotateUV(half2 input, half angle)
{
half2 newOrigin = half2(0.5, 0.5);
input -= newOrigin;
half sinX = sin (radians(angle));
half cosX = cos (radians(angle));
half sinY = sin (radians(angle));
half2x2 rotationMatrix = half2x2( cosX, -sinX, sinY, cosX);
input = mul(input, rotationMatrix);
input += newOrigin;
return input;
}
half4 ChooseProjection(half2 uv, half projChooser)
{
half2 addition = half2(0.0, 0.0);
uv*= half2(0.25, 0.5);
#ifdef WASH
projChooser = 1.0;
#endif
addition = IF(projChooser == 1.0, half2(0.0, 0.5) , addition);
#if !defined(WASH)
addition = IF(projChooser == 2.0, half2(0.25, 0.5), addition);
addition = IF(projChooser == 3.0, half2(0.5, 0.5), addition);
addition = IF(projChooser == 4.0, half2(0.75, 0.5), addition);
addition = IF(projChooser == 5.0, half2(0.0, 0.0) , addition);
addition = IF(projChooser == 6.0, half2(0.25, 0.0), addition);
addition = IF(projChooser == 7.0, half2(0.5, 0.0), addition);
addition = IF(projChooser == 8.0, half2(0.75, 0.0), addition);
#endif
uv.x += addition.x;
uv.y += addition.y;
return tex2D(_ProjectionMainTex, uv);
}
half ChooseProjectionScalar(half coneWidth, half projChooser)
{
//half chooser = IF(isDMX() == 1, selection, instancedGOBOSelection());
half result = _ProjectionUVMod;
result = IF((projChooser) == 1.0, (_ProjectionUVMod * _MinimumBeamRadius), result);
#if !defined(WASH)
result = IF((projChooser) == 2.0, _ProjectionUVMod2 * _MinimumBeamRadius, result);
result = IF((projChooser) == 3.0, _ProjectionUVMod3 * _MinimumBeamRadius, result);
result = IF((projChooser) == 4.0, _ProjectionUVMod4 * _MinimumBeamRadius, result);
result = IF((projChooser) == 5.0, _ProjectionUVMod5 * _MinimumBeamRadius, result);
result = IF((projChooser) == 6.0, _ProjectionUVMod6 * _MinimumBeamRadius, result);
result = IF((projChooser) == 7.0, _ProjectionUVMod7 * _MinimumBeamRadius, result);
result = IF((projChooser) == 8.0, _ProjectionUVMod8 * _MinimumBeamRadius, result);
#endif
// half a = 1.8;
// #ifdef WASH
// a = 3.0;
// #endif
// return result * (clamp(coneWidth, -2.0, 4) + a);
half conewidthControl = coneWidth/4.25;
#ifndef WASH
return result * lerp(0.325, 1, (conewidthControl));
#else
return result * lerp(0.4, 1, (conewidthControl));
#endif
}
fixed4 ProjectionFrag(v2f i) : SV_Target
{
UNITY_SETUP_STEREO_EYE_INDEX_POST_VERTEX(i);
UNITY_SETUP_INSTANCE_ID(i);
if(i.color.g > 0.5)
{
half4 emissionTint = i.emissionColor;
#ifdef VRSL_DMX
half gi = getGlobalIntensity();
half fi = getFinalIntensity();
half coneWidth = i.intensityStrobeWidth.z;
if(((all(i.rgbColor <= half4(0.01,0.01,0.01,1)) || i.intensityStrobeWidth.x <= 0.01) && isDMX() == 1) || gi <= 0.005 || fi <= 0.005 || all(emissionTint <= half4(0.005, 0.005, 0.005, 1)))
{
return half4(0,0,0,0);
}
#endif
#ifdef VRSL_AUDIOLINK
half audioReaction = i.audioGlobalFinalConeIntensity.x;
half gi = i.audioGlobalFinalConeIntensity.y;
half fi = i.audioGlobalFinalConeIntensity.z;
half coneWidth = i.audioGlobalFinalConeIntensity.w;
// if((all(i.rgbColor <= half4(0.01,0.01,0.01,1)) || i.intensityStrobeWidth.x <= 0.01) && isOSC() == 1)
// {
// return (0,0,0,0);
// }
if(audioReaction <= 0.005 || gi <= 0.005 || fi <= 0.005 || all(emissionTint<= half4(0.005, 0.005, 0.005, 1.0)))
{
return half4(0,0,0,0);
}
#endif
#if _ALPHATEST_ON
float2 pos = i.screenPos.xy / i.screenPos.w;
pos *= _ScreenParams.xy;
half DITHER_THRESHOLDS[16] =
{
1.0 / 17.0, 9.0 / 17.0, 3.0 / 17.0, 11.0 / 17.0,
13.0 / 17.0, 5.0 / 17.0, 15.0 / 17.0, 7.0 / 17.0,
4.0 / 17.0, 12.0 / 17.0, 2.0 / 17.0, 10.0 / 17.0,
16.0 / 17.0, 8.0 / 17.0, 14.0 / 17.0, 6.0 / 17.0
};
int index = (int)((uint(pos.x) % 4) * 4 + uint(pos.y) % 4);
#endif
#ifdef VRSL_DMX
half panValue = i.goboPlusSpinPanTilt.z;
half tiltValue = i.goboPlusSpinPanTilt.w;
uint selection = round(i.goboPlusSpinPanTilt.x);
#endif
//Calculating projection
i.ray = i.ray * (_ProjectionParams.z / i.ray.z);
float2 screenposUV = i.screenPos.xy / i.screenPos.w;
//CREDIT TO DJ LUKIS FOR MIRROR DEPTH CORRECTION
float perspectiveDivide = 1.0f / i.pos.w;
float4 depthdirect = i.worldDirection * perspectiveDivide;
//float2 altScreenPos = i.screenPos.xy * perspectiveDivide;
//
// #if _MULTISAMPLEDEPTH
float2 texelSize = _CameraDepthTexture_TexelSize.xy;
float d1 = SAMPLE_DEPTH_TEXTURE(_CameraDepthTexture, screenposUV + float2(1.0, 0.0) * texelSize);
float d2 = SAMPLE_DEPTH_TEXTURE(_CameraDepthTexture, screenposUV + float2(-1.0, 0.0) * texelSize);
float d3 = SAMPLE_DEPTH_TEXTURE(_CameraDepthTexture, screenposUV + float2(0.0, 1.0) * texelSize);
float d4 = SAMPLE_DEPTH_TEXTURE(_CameraDepthTexture, screenposUV + float2(0.0, -1.0) * texelSize);
float sceneZ = min(d1, min(d2, min(d3, d4)));
// #else
// float sceneZ = SAMPLE_DEPTH_TEXTURE(_CameraDepthTexture, screenposUV);
// #endif
#if UNITY_REVERSED_Z
if (sceneZ == 0)
#else
sceneZ = lerp(UNITY_NEAR_CLIP_VALUE, 1, sceneZ);
if (sceneZ == 1)
#endif
return float4(0,0,0,1);
float depth = CorrectedLinearEyeDepth(sceneZ, depthdirect.w);
//Convert from Corrected Linear Eye Depth to Linear01Depth
//Credit: https://www.cyanilux.com/tutorials/depth/#eye-depth
depth = (1.0 - (depth * _ZBufferParams.w)) / (depth * _ZBufferParams.z);
depth = Linear01Depth(depth);
//lienarize the depth
float3 objectOrigin = mul(unity_ObjectToWorld, half4(0.0,0.0,0.0,1.0) ).xyz;
//get object origin in world space.
float3 projectionOriginWorld = mul(unity_ObjectToWorld,_ProjectionRangeOrigin);
float3 wpos = getWpos(depth, i.ray);
//convert view space coordinate to world space coordinate.
//Wpos is now coordinates for intersection.
//get the projection in object space
float3 oPos = mul(unity_WorldToObject, float4(wpos,1)).xyz;
if((distance(oPos, _FixtureRotationOrigin) < _ProjectionCutoff) || distance(oPos, float4(0,0,0,0)) < _ProjectionOriginCutoff)
{
//check the distance of rotation origin to the set cutoff value.
//if distance is less that the set value, discard the pixel.
//this is used to prevent the projection from bleeding on to the source fixture mesh.
discard;
}
float distanceFromOrigin = abs(distance(objectOrigin , wpos));
#ifdef VRSL_DMX
float projChooser = IF(isDMX() == 1, selection, instancedGOBOSelection());
#endif
#ifdef VRSL_AUDIOLINK
float projChooser = round(instancedGOBOSelection());
#endif
//Get distance of intersection from the origin in world space
#ifdef VRSL_DMX
float UVscale = 1/(0 + (distanceFromOrigin * (ChooseProjectionScalar(coneWidth, projChooser)) + (0 * (distanceFromOrigin * distanceFromOrigin))));
distanceFromOrigin = lerp(distanceFromOrigin*0.6 +0.65,distanceFromOrigin, saturate(coneWidth));
#endif
#ifdef VRSL_AUDIOLINK
distanceFromOrigin = lerp(distanceFromOrigin*0.6 +0.65,distanceFromOrigin, saturate(coneWidth));
float UVscale = 1/(0 + ((distanceFromOrigin) * ChooseProjectionScalar(coneWidth, projChooser)));
#endif
// inverse that distance so that it gets smaller as it gets closer,
// multiply it by modifier parameter incase things get wonky.
float3 projPos = getProjPos(wpos);
//position of the intersection fragment in the cone's object space
#ifdef VRSL_DMX
projPos = InvertRotations(float4(projPos,1.0), panValue, tiltValue);
#endif
#ifdef VRSL_AUDIOLINK
projPos = InvertRotations(float4(projPos,1.0), 0, 0);
#endif
float2 uvCoords = (((float2((projPos.x), projPos.y) * UVscale)));
uvCoords.x += 0.5;
uvCoords.y += 0.5;
//Get coordinate plane
half2 uvOrigin = half2(0,0);
uvOrigin = (half2(0,0) * UVscale) * (clamp(coneWidth+0.5, -1.0, 4) + 1.6) + 0.5;
#ifdef VRSL_DMX
_SpinSpeed = IF(checkPanInvertY() == 1, -_SpinSpeed, _SpinSpeed);
_SpinSpeed = IF(isDMX() == 1, _SpinSpeed, _SpinSpeed);
uvCoords = IF(isGOBOSpin() == 1 && projChooser > 1.0, RotateUV(uvCoords, degrees(i.goboPlusSpinPanTilt.y)), RotateUV(uvCoords, _ProjectionRotation));
#endif
#ifdef VRSL_AUDIOLINK
half goboSpinSpeed = IF(checkPanInvertY() == 1, -getGoboSpinSpeed(), getGoboSpinSpeed());
uvCoords = IF(isGOBOSpin() == 1 && projChooser > 1.0, RotateUV(uvCoords, _Time.w * ( 10* goboSpinSpeed)), RotateUV(uvCoords, _ProjectionRotation));
#endif
// uvCoords = IF(isGOBOSpin() == 1 && projChooser > 1.0, RotateUV(uvCoords, _Time.w * ( 10* _SpinSpeed)), RotateUV(uvCoords, _ProjectionRotation));
clip(uvCoords);
//Discard any pixels that are outside of the traditional 0-1 UV bounds.
float4 tex = ChooseProjection(uvCoords, projChooser);
float distFromUVOrigin = (abs(distance(uvCoords, uvOrigin)));
// Create create xy coordinate plane based on object space, make sure it scales based on the
// distance from the intersection
//Discard any pixels that are outside of the traditional 0-1 UV bounds in the negative range.
clip(1.0 - uvCoords);
half4 col = tex;
clip(projPos.z);
float projectionDistance = abs(distance(i.projectionorigin.xyz, projPos.xyz));
//Projection Fade
#ifdef _ALPHATEST_ON
col = lerp(col, half4(0,0,0,0), clamp(pow(distFromUVOrigin * (_ProjectionFade-1.0),_ProjectionFadeCurve),0.0,1.0));
#else
col = lerp(col, half4(0,0,0,0), clamp(pow(distFromUVOrigin * _ProjectionFade,_ProjectionFadeCurve),0.0,1.0));
#endif
#ifdef VRSL_DMX
half strobe = IF(isStrobe() == 1, i.intensityStrobeWidth.y, 1);
col = IF(isDMX() == 1 & _EnableStaticEmissionColor == 0, col * i.rgbColor, col);
#endif
#ifdef VRSL_AUDIOLINK
half strobe = 1.0;
#endif
//col = IF(_EnableStaticEmissionColor == 1, col * half4(_StaticEmission.r * _RedMultiplier,_StaticEmission.g * _GreenMultiplier,_StaticEmission.b * _BlueMultiplier,_StaticEmission.a), col);
// project plane on to the world normals in object space in the z direction of the object origin.
half projectionIntesnity = _ProjectionIntensity;
#ifdef _ALPHATEST_ON
projectionIntesnity +=4.0;
#endif
col = ((col * emissionTint * UVscale * projectionIntesnity)) * strobe;
col = col * (1/(_ProjectionDistanceFallOff * (distanceFromOrigin * distanceFromOrigin)));
#ifdef VRSL_AUDIOLINK
col = col * audioReaction;
#endif
col = lerp(half4(0,0,0,col.w), col, gi);
col = lerp(half4(0,0,0,col.w), col, fi);
//half saturation = saturate(RGB2HSV(col)).y;
//col = IF(_EnableStaticEmissionColor == 1, lerp(half4(0,0,0,0), col, saturation), col);
col = IF( _EnableStaticEmissionColor == 1, half4(col.r * _RedMultiplier, col.g * _GreenMultiplier, col.b * _BlueMultiplier, col.a), col);
col *= _UniversalIntensity;
#ifdef _ALPHATEST_ON
clip(col.a - DITHER_THRESHOLDS[index]);
clip((((col.r + col.g + col.b)/3) * (_ClippingThreshold)) - DITHER_THRESHOLDS[index]);
return col;
#else
return col;
#endif
}
else
{
return half4(0,0,0,0);
}
}

View File

@ -1,189 +1,189 @@
// Upgrade NOTE: replaced '_Object2World' with 'unity_ObjectToWorld'
#define IF(a, b, c) lerp(b, c, step((fixed) (a), 0));
float3 getWpos(float depth, float3 ray)
{
float4 vpos = float4(ray * depth, 1);
// scale ray by linearized depth, which gives us the position of the ray
// intersection with the depth buffer in view space. This is the point of intersection in view space.
float3 wpos = (mul(unity_CameraToWorld, vpos).xyz);
//convert view space coordinate to world space coordinate.
//Wpos is now coordinates for intersection.
return wpos;
}
float3 getProjPos(float3 wpos)
{
return (mul(unity_WorldToObject,float4(wpos.x, wpos.y, wpos.z, 1)));
}
inline float CorrectedLinearEyeDepth(float z, float B)
{
return 1.0 / (z/UNITY_MATRIX_P._34 + B);
}
fixed4 ProjectionFrag(v2f i) : SV_Target
{
UNITY_SETUP_STEREO_EYE_INDEX_POST_VERTEX(i);
UNITY_SETUP_INSTANCE_ID(i);
float4 emissionTint = i.emissionColor;
#ifdef VRSL_DMX
float gi = i.globalFinalIntensity.x;
float fi = i.globalFinalIntensity.y;
#ifdef FIVECH
if(((all(i.rgbColor <= float4(0.01,0.01,0.01,1)) || i.intensityStrobe.x <= 0.01) && isDMX() == 1) || gi <= 0.005 || fi <= 0.005 || all(emissionTint <= float4(0.005, 0.005, 0.005, 1.0)))
{
return float4(0,0,0,0);
}
#else
if(((all(i.rgbColor <= float4(0.05,0.05,0.05,1)) || i.intensityStrobe.x <= 0.05) && isDMX() == 1) || gi <= 0.005 || fi <= 0.005 || all(emissionTint <= float4(0.005, 0.005, 0.005, 1.0)))
{
return float4(0,0,0,0);
}
#endif
#endif
#ifdef VRSL_AUDIOLINK
float audioReaction = i.audioGlobalFinalIntensity.x;
float gi = i.audioGlobalFinalIntensity.y;
float fi = i.audioGlobalFinalIntensity.z;
if(audioReaction <= 0.005 || gi <= 0.005 || fi <= 0.005 || all(emissionTint <= float4(0.005, 0.005, 0.005, 1.0)))
{
return half4(0,0,0,0);
}
#endif
#if _ALPHATEST_ON
float2 pos = i.screenPos.xy / i.screenPos.w;
pos *= _ScreenParams.xy;
float DITHER_THRESHOLDS[16] =
{
1.0 / 17.0, 9.0 / 17.0, 3.0 / 17.0, 11.0 / 17.0,
13.0 / 17.0, 5.0 / 17.0, 15.0 / 17.0, 7.0 / 17.0,
4.0 / 17.0, 12.0 / 17.0, 2.0 / 17.0, 10.0 / 17.0,
16.0 / 17.0, 8.0 / 17.0, 14.0 / 17.0, 6.0 / 17.0
};
int index = (int)((uint(pos.x) % 4) * 4 + uint(pos.y) % 4);
#endif
if(i.color.g != 0)
{
i.ray = i.ray * (_ProjectionParams.z / i.ray.z);
float2 screenposUV = i.screenPos.xy / i.screenPos.w;
//CREDIT TO DJ LUKIS FOR MIRROR DEPTH CORRECTION
float perspectiveDivide = 1.0f / i.pos.w;
float4 direction = i.worldDirection * perspectiveDivide;
float2 altScreenPos = i.screenPos.xy * perspectiveDivide;
#if _MULTISAMPLEDEPTH
float2 texelSize = _CameraDepthTexture_TexelSize.xy;
float d1 = SAMPLE_DEPTH_TEXTURE(_CameraDepthTexture, screenposUV + float2(1.0, 0.0) * texelSize);
float d2 = SAMPLE_DEPTH_TEXTURE(_CameraDepthTexture, screenposUV + float2(-1.0, 0.0) * texelSize);
float d3 = SAMPLE_DEPTH_TEXTURE(_CameraDepthTexture, screenposUV + float2(0.0, 1.0) * texelSize);
float d4 = SAMPLE_DEPTH_TEXTURE(_CameraDepthTexture, screenposUV + float2(0.0, -1.0) * texelSize);
float sceneZ = min(d1, min(d2, min(d3, d4)));
#else
float sceneZ = SAMPLE_DEPTH_TEXTURE(_CameraDepthTexture, screenposUV);
#endif
#if UNITY_REVERSED_Z
if (sceneZ == 0)
#else
sceneZ = lerp(UNITY_NEAR_CLIP_VALUE, 1, sceneZ);
if (sceneZ == 1)
#endif
return float4(0,0,0,1);
//Convert to Corrected LinearEyeDepth by DJ Lukis
float depth = CorrectedLinearEyeDepth(sceneZ, direction.w);
//Convert from Corrected Linear Eye Depth to Linear01Depth
//Credit: https://www.cyanilux.com/tutorials/depth/#eye-depth
depth = (1.0 - (depth * _ZBufferParams.w)) / (depth * _ZBufferParams.z);
//Convert to Linear01Depth
depth = Linear01Depth(depth);
float3 objectOrigin = mul(unity_ObjectToWorld, float4(0.0,0.0,0.0,1.0) ).xyz;
//get object origin in world space.
//float3 fragViewPos = float4(i.ray * depth, 1);
float3 wpos = getWpos(depth, i.ray);
float3 projPos = getProjPos(wpos);
float distanceFromOrigin = length(objectOrigin - wpos);
float attenuationDist = length(objectOrigin - wpos);
float f = _Fade;
#if _ALPHATEST_ON
f += 1.0;
#endif
float attenuation = 1.0 / (_ProjectionDistanceFallOff + f * attenuationDist + _FeatherOffset * (attenuationDist * attenuationDist));
float UVscale = 1/(_ProjectionDistanceFallOff + (distanceFromOrigin * _ProjectionUVMod) + (_FeatherOffset * (distanceFromOrigin * distanceFromOrigin)));
//float3 calculatedWorldNormal = getCalculatedWorldNormal(projPos);
float2 uvCoords = (((float2((projPos.x), projPos.y) * UVscale)));
//uvCoords = mul(uvCoords, projPos.z);
float2 oldUVcoords = uvCoords;
//Get coordinate plane in object space
uvCoords.x += _XOffset;
uvCoords.y += _YOffset;
uvCoords.x *= _ModX;
uvCoords.y *= _ModY;
//uvCoords = normalize(mul(float4(uvCoords, 0.0, 0.0), unity_ObjectToWorld)).xy;
clip(uvCoords);
//Discard any pixels that are outside of the traditional 0-1 UV bounds.
float4 tex = tex2D(_ProjectionMainTex, uvCoords);
//tex = float4(tex.x, tex.y, tex.z, pow(tex.w * distanceFromOrigin, -1));
//tex = pow(tex * distanceFromOrigin, 1);
float distFromUVOrigin = (abs(distance(uvCoords, half2(0,0))));
//calculatedWorldNormal = UnpackNormal(tex2D(_SceneNormals, oldUVcoords));
// Create create xy coordinate plane based on object space, make sure it scales based on the
// distance from the intersection
clip(1.0 - uvCoords);
float4 col = tex;
//float4 col = tex * float4(n,1);
//clip(projPos.z);
#ifdef VRSL_AUDIOLINK
float strobe = 1.0;
col *= audioReaction;
#endif
#ifdef VRSL_DMX
float strobe = IF(isStrobe() == 1, i.intensityStrobe.y, 1);
float4 DMXcol = col;
DMXcol *= i.rgbColor;
col = IF(isDMX() == 1, DMXcol, col);
#endif
float4 result = ((col * UVscale * _ProjectionMaxIntensity) * emissionTint) * strobe;
float fadeRange = (saturate(1-(pow(10, distanceFromOrigin - 2))));
col = (((lerp(result,float4(0,0,0,0), smoothstep(distanceFromOrigin, 0, f))) * gi) * fi) * _UniversalIntensity;
#ifdef _ALPHATEST_ON
col *= _AlphaProjectionIntensity;
clip(col.a - DITHER_THRESHOLDS[index]);
clip((((col.r + col.g + col.b)/3) * (_ClippingThreshold)) - DITHER_THRESHOLDS[index]);
return col;
#else
return col;
#endif
}
else
{
clip(i.pos);
discard;
return float4(0,0,0,0);
}
// Upgrade NOTE: replaced '_Object2World' with 'unity_ObjectToWorld'
#define IF(a, b, c) lerp(b, c, step((fixed) (a), 0));
float3 getWpos(float depth, float3 ray)
{
float4 vpos = float4(ray * depth, 1);
// scale ray by linearized depth, which gives us the position of the ray
// intersection with the depth buffer in view space. This is the point of intersection in view space.
float3 wpos = (mul(unity_CameraToWorld, vpos).xyz);
//convert view space coordinate to world space coordinate.
//Wpos is now coordinates for intersection.
return wpos;
}
float3 getProjPos(float3 wpos)
{
return (mul(unity_WorldToObject,float4(wpos.x, wpos.y, wpos.z, 1)));
}
inline float CorrectedLinearEyeDepth(float z, float B)
{
return 1.0 / (z/UNITY_MATRIX_P._34 + B);
}
fixed4 ProjectionFrag(v2f i) : SV_Target
{
UNITY_SETUP_STEREO_EYE_INDEX_POST_VERTEX(i);
UNITY_SETUP_INSTANCE_ID(i);
float4 emissionTint = i.emissionColor;
#ifdef VRSL_DMX
float gi = i.globalFinalIntensity.x;
float fi = i.globalFinalIntensity.y;
#ifdef FIVECH
if(((all(i.rgbColor <= float4(0.01,0.01,0.01,1)) || i.intensityStrobe.x <= 0.01) && isDMX() == 1) || gi <= 0.005 || fi <= 0.005 || all(emissionTint <= float4(0.005, 0.005, 0.005, 1.0)))
{
return float4(0,0,0,0);
}
#else
if(((all(i.rgbColor <= float4(0.05,0.05,0.05,1)) || i.intensityStrobe.x <= 0.05) && isDMX() == 1) || gi <= 0.005 || fi <= 0.005 || all(emissionTint <= float4(0.005, 0.005, 0.005, 1.0)))
{
return float4(0,0,0,0);
}
#endif
#endif
#ifdef VRSL_AUDIOLINK
float audioReaction = i.audioGlobalFinalIntensity.x;
float gi = i.audioGlobalFinalIntensity.y;
float fi = i.audioGlobalFinalIntensity.z;
if(audioReaction <= 0.005 || gi <= 0.005 || fi <= 0.005 || all(emissionTint <= float4(0.005, 0.005, 0.005, 1.0)))
{
return half4(0,0,0,0);
}
#endif
#if _ALPHATEST_ON
float2 pos = i.screenPos.xy / i.screenPos.w;
pos *= _ScreenParams.xy;
float DITHER_THRESHOLDS[16] =
{
1.0 / 17.0, 9.0 / 17.0, 3.0 / 17.0, 11.0 / 17.0,
13.0 / 17.0, 5.0 / 17.0, 15.0 / 17.0, 7.0 / 17.0,
4.0 / 17.0, 12.0 / 17.0, 2.0 / 17.0, 10.0 / 17.0,
16.0 / 17.0, 8.0 / 17.0, 14.0 / 17.0, 6.0 / 17.0
};
int index = (int)((uint(pos.x) % 4) * 4 + uint(pos.y) % 4);
#endif
if(i.color.g != 0)
{
i.ray = i.ray * (_ProjectionParams.z / i.ray.z);
float2 screenposUV = i.screenPos.xy / i.screenPos.w;
//CREDIT TO DJ LUKIS FOR MIRROR DEPTH CORRECTION
float perspectiveDivide = 1.0f / i.pos.w;
float4 direction = i.worldDirection * perspectiveDivide;
float2 altScreenPos = i.screenPos.xy * perspectiveDivide;
#if _MULTISAMPLEDEPTH
float2 texelSize = _CameraDepthTexture_TexelSize.xy;
float d1 = SAMPLE_DEPTH_TEXTURE(_CameraDepthTexture, screenposUV + float2(1.0, 0.0) * texelSize);
float d2 = SAMPLE_DEPTH_TEXTURE(_CameraDepthTexture, screenposUV + float2(-1.0, 0.0) * texelSize);
float d3 = SAMPLE_DEPTH_TEXTURE(_CameraDepthTexture, screenposUV + float2(0.0, 1.0) * texelSize);
float d4 = SAMPLE_DEPTH_TEXTURE(_CameraDepthTexture, screenposUV + float2(0.0, -1.0) * texelSize);
float sceneZ = min(d1, min(d2, min(d3, d4)));
#else
float sceneZ = SAMPLE_DEPTH_TEXTURE(_CameraDepthTexture, screenposUV);
#endif
#if UNITY_REVERSED_Z
if (sceneZ == 0)
#else
sceneZ = lerp(UNITY_NEAR_CLIP_VALUE, 1, sceneZ);
if (sceneZ == 1)
#endif
return float4(0,0,0,1);
//Convert to Corrected LinearEyeDepth by DJ Lukis
float depth = CorrectedLinearEyeDepth(sceneZ, direction.w);
//Convert from Corrected Linear Eye Depth to Linear01Depth
//Credit: https://www.cyanilux.com/tutorials/depth/#eye-depth
depth = (1.0 - (depth * _ZBufferParams.w)) / (depth * _ZBufferParams.z);
//Convert to Linear01Depth
depth = Linear01Depth(depth);
float3 objectOrigin = mul(unity_ObjectToWorld, float4(0.0,0.0,0.0,1.0) ).xyz;
//get object origin in world space.
//float3 fragViewPos = float4(i.ray * depth, 1);
float3 wpos = getWpos(depth, i.ray);
float3 projPos = getProjPos(wpos);
float distanceFromOrigin = length(objectOrigin - wpos);
float attenuationDist = length(objectOrigin - wpos);
float f = _Fade;
#if _ALPHATEST_ON
f += 1.0;
#endif
float attenuation = 1.0 / (_ProjectionDistanceFallOff + f * attenuationDist + _FeatherOffset * (attenuationDist * attenuationDist));
float UVscale = 1/(_ProjectionDistanceFallOff + (distanceFromOrigin * _ProjectionUVMod) + (_FeatherOffset * (distanceFromOrigin * distanceFromOrigin)));
//float3 calculatedWorldNormal = getCalculatedWorldNormal(projPos);
float2 uvCoords = (((float2((projPos.x), projPos.y) * UVscale)));
//uvCoords = mul(uvCoords, projPos.z);
float2 oldUVcoords = uvCoords;
//Get coordinate plane in object space
uvCoords.x += _XOffset;
uvCoords.y += _YOffset;
uvCoords.x *= _ModX;
uvCoords.y *= _ModY;
//uvCoords = normalize(mul(float4(uvCoords, 0.0, 0.0), unity_ObjectToWorld)).xy;
clip(uvCoords);
//Discard any pixels that are outside of the traditional 0-1 UV bounds.
float4 tex = tex2D(_ProjectionMainTex, uvCoords);
//tex = float4(tex.x, tex.y, tex.z, pow(tex.w * distanceFromOrigin, -1));
//tex = pow(tex * distanceFromOrigin, 1);
float distFromUVOrigin = (abs(distance(uvCoords, half2(0,0))));
//calculatedWorldNormal = UnpackNormal(tex2D(_SceneNormals, oldUVcoords));
// Create create xy coordinate plane based on object space, make sure it scales based on the
// distance from the intersection
clip(1.0 - uvCoords);
float4 col = tex;
//float4 col = tex * float4(n,1);
//clip(projPos.z);
#ifdef VRSL_AUDIOLINK
float strobe = 1.0;
col *= audioReaction;
#endif
#ifdef VRSL_DMX
float strobe = IF(isStrobe() == 1, i.intensityStrobe.y, 1);
float4 DMXcol = col;
DMXcol *= i.rgbColor;
col = IF(isDMX() == 1, DMXcol, col);
#endif
float4 result = ((col * UVscale * _ProjectionMaxIntensity) * emissionTint) * strobe;
float fadeRange = (saturate(1-(pow(10, distanceFromOrigin - 2))));
col = (((lerp(result,float4(0,0,0,0), smoothstep(distanceFromOrigin, 0, f))) * gi) * fi) * _UniversalIntensity;
#ifdef _ALPHATEST_ON
col *= _AlphaProjectionIntensity;
clip(col.a - DITHER_THRESHOLDS[index]);
clip((((col.r + col.g + col.b)/3) * (_ClippingThreshold)) - DITHER_THRESHOLDS[index]);
return col;
#else
return col;
#endif
}
else
{
clip(i.pos);
discard;
return float4(0,0,0,0);
}
}