This commit is contained in:
2021-06-13 10:28:03 +02:00
parent eb70603c85
commit df2d24cbd3
7487 changed files with 943244 additions and 0 deletions

View File

@@ -0,0 +1,17 @@
namespace UnityEngine.Experimental.Rendering.Universal
{
/// <summary>
/// (Deprecated) An add-on module for Cinemachine Virtual Camera that tweaks the orthographic size
/// of the virtual camera. It detects the presence of the Pixel Perfect Camera component and use the
/// settings from that Pixel Perfect Camera to correct the orthographic size so that pixel art
/// sprites would appear pixel perfect when the virtual camera becomes live.
/// </summary>
[AddComponentMenu("")] // Hide in menu
public class CinemachineUniversalPixelPerfect : MonoBehaviour
{
void OnEnable()
{
Debug.LogError("CinemachineUniversalPixelPerfect is now deprecated and doesn't function properly. Instead, use the one from Cinemachine v2.4.0 or newer.");
}
}
}

View File

@@ -0,0 +1,351 @@
using System;
using System.Collections.Generic;
using UnityEngine;
using UnityEngine.Serialization;
using UnityEngine.Rendering;
using UnityEngine.Rendering.Universal;
#if UNITY_EDITOR
using UnityEditor.Experimental.SceneManagement;
#endif
namespace UnityEngine.Experimental.Rendering.Universal
{
/// <summary>
/// Class <c>Light2D</c> is a 2D light which can be used with the 2D Renderer.
/// </summary>
///
[ExecuteAlways, DisallowMultipleComponent]
[AddComponentMenu("Rendering/2D/Light 2D")]
[HelpURL("https://docs.unity3d.com/Packages/com.unity.render-pipelines.universal@latest/index.html?subfolder=/manual/2DLightProperties.html")]
public sealed partial class Light2D : MonoBehaviour, ISerializationCallbackReceiver
{
public enum DeprecatedLightType
{
Parametric = 0,
}
/// <summary>
/// an enumeration of the types of light
/// </summary>
public enum LightType
{
Parametric = 0,
Freeform = 1,
Sprite = 2,
Point = 3,
Global = 4
}
public enum NormalMapQuality
{
Disabled = 2,
Fast = 0,
Accurate = 1
}
public enum OverlapOperation
{
Additive,
AlphaBlend
}
public enum ComponentVersions
{
Version_Unserialized = 0,
Version_1 = 1
}
const ComponentVersions k_CurrentComponentVersion = ComponentVersions.Version_1;
[SerializeField] ComponentVersions m_ComponentVersion = ComponentVersions.Version_Unserialized;
#if USING_ANIMATION_MODULE
[UnityEngine.Animations.NotKeyable]
#endif
[SerializeField] LightType m_LightType = LightType.Point;
[SerializeField, FormerlySerializedAs("m_LightOperationIndex")]
int m_BlendStyleIndex = 0;
[SerializeField] float m_FalloffIntensity = 0.5f;
[ColorUsage(true)]
[SerializeField] Color m_Color = Color.white;
[SerializeField] float m_Intensity = 1;
[FormerlySerializedAs("m_LightVolumeOpacity")]
[SerializeField] float m_LightVolumeIntensity = 1.0f;
[SerializeField] bool m_LightVolumeIntensityEnabled = false;
[SerializeField] int[] m_ApplyToSortingLayers = new int[1]; // These are sorting layer IDs. If we need to update this at runtime make sure we add code to update global lights
[Reload("Textures/2D/Sparkle.png")]
[SerializeField] Sprite m_LightCookieSprite;
[FormerlySerializedAs("m_LightCookieSprite")]
[SerializeField] Sprite m_DeprecatedPointLightCookieSprite;
[SerializeField] int m_LightOrder = 0;
[SerializeField] OverlapOperation m_OverlapOperation = OverlapOperation.Additive;
[FormerlySerializedAs("m_PointLightDistance")]
[SerializeField] float m_NormalMapDistance = 3.0f;
#if USING_ANIMATION_MODULE
[UnityEngine.Animations.NotKeyable]
#endif
[FormerlySerializedAs("m_PointLightQuality")]
[SerializeField] NormalMapQuality m_NormalMapQuality = NormalMapQuality.Disabled;
[SerializeField] bool m_UseNormalMap = false; // This is now deprecated. Keep it here for backwards compatibility.
[SerializeField] bool m_ShadowIntensityEnabled = false;
[Range(0, 1)]
[SerializeField] float m_ShadowIntensity = 0.75f;
[SerializeField] bool m_ShadowVolumeIntensityEnabled = false;
[Range(0, 1)]
[SerializeField] float m_ShadowVolumeIntensity = 0.75f;
Mesh m_Mesh;
[SerializeField]
private LightUtility.LightMeshVertex[] m_Vertices = new LightUtility.LightMeshVertex[1];
[SerializeField]
private ushort[] m_Triangles = new ushort[1];
internal LightUtility.LightMeshVertex[] vertices { get { return m_Vertices; } set { m_Vertices = value; } }
internal ushort[] indices { get { return m_Triangles; } set { m_Triangles = value; } }
// Transients
int m_PreviousLightCookieSprite;
internal int[] affectedSortingLayers => m_ApplyToSortingLayers;
private int lightCookieSpriteInstanceID => m_LightCookieSprite?.GetInstanceID() ?? 0;
[SerializeField]
Bounds m_LocalBounds;
internal BoundingSphere boundingSphere { get; private set; }
internal Mesh lightMesh
{
get
{
if (null == m_Mesh)
m_Mesh = new Mesh();
return m_Mesh;
}
}
internal bool hasCachedMesh => (vertices.Length > 1 && indices.Length > 1);
/// <summary>
/// The lights current type
/// </summary>
public LightType lightType
{
get => m_LightType;
set
{
if (m_LightType != value)
UpdateMesh(true);
m_LightType = value;
Light2DManager.ErrorIfDuplicateGlobalLight(this);
}
}
/// <summary>
/// The lights current operation index
/// </summary>
public int blendStyleIndex { get => m_BlendStyleIndex; set => m_BlendStyleIndex = value; }
/// <summary>
/// Specifies the darkness of the shadow
/// </summary>
public float shadowIntensity { get => m_ShadowIntensity; set => m_ShadowIntensity = Mathf.Clamp01(value); }
/// <summary>
/// Specifies that the shadows are enabled
/// </summary>
public bool shadowsEnabled { get => m_ShadowIntensityEnabled; set => m_ShadowIntensityEnabled = value; }
/// <summary>
/// Specifies the darkness of the shadow
/// </summary>
public float shadowVolumeIntensity { get => m_ShadowVolumeIntensity; set => m_ShadowVolumeIntensity = Mathf.Clamp01(value); }
/// <summary>
/// Specifies that the volumetric shadows are enabled
/// </summary>
public bool volumetricShadowsEnabled { get => m_ShadowVolumeIntensityEnabled; set => m_ShadowVolumeIntensityEnabled = value; }
/// <summary>
/// The lights current color
/// </summary>
public Color color { get => m_Color; set => m_Color = value; }
/// <summary>
/// The lights current intensity
/// </summary>
public float intensity { get => m_Intensity; set => m_Intensity = value; }
/// <summary>
/// The lights current intensity
/// </summary>
///
[Obsolete]
public float volumeOpacity => m_LightVolumeIntensity;
public float volumeIntensity => m_LightVolumeIntensity;
public bool volumeIntensityEnabled { get => m_LightVolumeIntensityEnabled; set => m_LightVolumeIntensityEnabled = value; }
public Sprite lightCookieSprite { get { return m_LightType != LightType.Point ? m_LightCookieSprite : m_DeprecatedPointLightCookieSprite; } }
public float falloffIntensity => m_FalloffIntensity;
[Obsolete]
public bool alphaBlendOnOverlap { get { return m_OverlapOperation == OverlapOperation.AlphaBlend; }}
public OverlapOperation overlapOperation => m_OverlapOperation;
public int lightOrder { get => m_LightOrder; set => m_LightOrder = value; }
public float normalMapDistance => m_NormalMapDistance;
public NormalMapQuality normalMapQuality => m_NormalMapQuality;
internal int GetTopMostLitLayer()
{
var largestIndex = Int32.MinValue;
var largestLayer = 0;
var layers = Light2DManager.GetCachedSortingLayer();
for (var i = 0; i < m_ApplyToSortingLayers.Length; ++i)
{
for (var layer = layers.Length - 1; layer >= largestLayer; --layer)
{
if (layers[layer].id == m_ApplyToSortingLayers[i])
{
largestIndex = layers[layer].value;
largestLayer = layer;
}
}
}
return largestIndex;
}
internal void UpdateMesh(bool forceUpdate)
{
var shapePathHash = LightUtility.GetShapePathHash(shapePath);
var fallOffSizeChanged = LightUtility.CheckForChange(m_ShapeLightFalloffSize, ref m_PreviousShapeLightFalloffSize);
var parametricRadiusChanged = LightUtility.CheckForChange(m_ShapeLightParametricRadius, ref m_PreviousShapeLightParametricRadius);
var parametricSidesChanged = LightUtility.CheckForChange(m_ShapeLightParametricSides, ref m_PreviousShapeLightParametricSides);
var parametricAngleOffsetChanged = LightUtility.CheckForChange(m_ShapeLightParametricAngleOffset, ref m_PreviousShapeLightParametricAngleOffset);
var spriteInstanceChanged = LightUtility.CheckForChange(lightCookieSpriteInstanceID, ref m_PreviousLightCookieSprite);
var shapePathHashChanged = LightUtility.CheckForChange(shapePathHash, ref m_PreviousShapePathHash);
var lightTypeChanged = LightUtility.CheckForChange(m_LightType, ref m_PreviousLightType);
var hashChanged = fallOffSizeChanged || parametricRadiusChanged || parametricSidesChanged ||
parametricAngleOffsetChanged || spriteInstanceChanged || shapePathHashChanged || lightTypeChanged;
// Mesh Rebuilding
if (hashChanged && forceUpdate)
{
switch (m_LightType)
{
case LightType.Freeform:
m_LocalBounds = LightUtility.GenerateShapeMesh(this, m_ShapePath, m_ShapeLightFalloffSize);
break;
case LightType.Parametric:
m_LocalBounds = LightUtility.GenerateParametricMesh(this, m_ShapeLightParametricRadius, m_ShapeLightFalloffSize, m_ShapeLightParametricAngleOffset, m_ShapeLightParametricSides);
break;
case LightType.Sprite:
m_LocalBounds = LightUtility.GenerateSpriteMesh(this, m_LightCookieSprite);
break;
case LightType.Point:
m_LocalBounds = LightUtility.GenerateParametricMesh(this, 1.412135f, 0, 0, 4);
break;
}
}
}
internal void UpdateBoundingSphere()
{
if (isPointLight)
{
boundingSphere = new BoundingSphere(transform.position, m_PointLightOuterRadius);
return;
}
var maxBound = transform.TransformPoint(Vector3.Max(m_LocalBounds.max, m_LocalBounds.max + (Vector3)m_ShapeLightFalloffOffset));
var minBound = transform.TransformPoint(Vector3.Min(m_LocalBounds.min, m_LocalBounds.min + (Vector3)m_ShapeLightFalloffOffset));
var center = 0.5f * (maxBound + minBound);
var radius = Vector3.Magnitude(maxBound - center);
boundingSphere = new BoundingSphere(center, radius);
}
internal bool IsLitLayer(int layer)
{
if (m_ApplyToSortingLayers == null)
return false;
for (var i = 0; i < m_ApplyToSortingLayers.Length; i++)
if (m_ApplyToSortingLayers[i] == layer)
return true;
return false;
}
private void Awake()
{
if (!m_UseNormalMap && m_NormalMapQuality != NormalMapQuality.Disabled)
m_NormalMapQuality = NormalMapQuality.Disabled;
UpdateMesh(!hasCachedMesh);
if (hasCachedMesh)
{
lightMesh.SetVertexBufferParams(vertices.Length, LightUtility.LightMeshVertex.VertexLayout);
lightMesh.SetVertexBufferData(vertices, 0, 0, vertices.Length);
lightMesh.SetIndices(indices, MeshTopology.Triangles, 0, false);
}
}
void OnEnable()
{
m_PreviousLightCookieSprite = lightCookieSpriteInstanceID;
Light2DManager.RegisterLight(this);
}
private void OnDisable()
{
Light2DManager.DeregisterLight(this);
}
private void LateUpdate()
{
if (m_LightType == LightType.Global)
return;
UpdateMesh(true);
UpdateBoundingSphere();
}
public void OnBeforeSerialize()
{
m_ComponentVersion = k_CurrentComponentVersion;
}
public void OnAfterDeserialize()
{
// Upgrade from no serialized version
if (m_ComponentVersion == ComponentVersions.Version_Unserialized)
{
m_ShadowVolumeIntensityEnabled = m_ShadowVolumeIntensity > 0;
m_ShadowIntensityEnabled = m_ShadowIntensity > 0;
m_LightVolumeIntensityEnabled = m_LightVolumeIntensity > 0;
m_ComponentVersion = ComponentVersions.Version_1;
}
}
}
}

View File

@@ -0,0 +1,36 @@
using System.Collections.Generic;
namespace UnityEngine.Experimental.Rendering.Universal
{
public sealed partial class Light2D
{
#if UNITY_EDITOR
private const string s_IconsPath = "Packages/com.unity.render-pipelines.universal/Editor/2D/Resources/SceneViewIcons/";
private static readonly string[] s_LightIconFileNames = new[]
{
"ParametricLight.png",
"FreeformLight.png",
"SpriteLight.png",
"PointLight.png",
"PointLight.png"
};
private void OnDrawGizmos()
{
Gizmos.color = Color.blue;
Gizmos.DrawIcon(transform.position, s_IconsPath + s_LightIconFileNames[(int)m_LightType], true);
}
void Reset()
{
m_ShapePath = new Vector3[] { new Vector3(-0.5f, -0.5f), new Vector3(0.5f, -0.5f), new Vector3(0.5f, 0.5f), new Vector3(-0.5f, 0.5f) };
}
internal List<Vector2> GetFalloffShape()
{
return LightUtility.GetOutlinePath(m_ShapePath, m_ShapeLightFalloffSize);
}
#endif
}
}

View File

@@ -0,0 +1,122 @@
using System;
using UnityEngine.Rendering.Universal;
using UnityEngine.Scripting.APIUpdating;
using UnityEngine.Serialization;
namespace UnityEngine.Experimental.Rendering.Universal
{
[Serializable]
[MovedFrom("UnityEngine.Experimental.Rendering.LWRP")] public struct Light2DBlendStyle
{
internal enum TextureChannel
{
None = 0,
R = 1,
G = 2,
B = 3,
A = 4,
OneMinusR = 5,
OneMinusG = 6,
OneMinusB = 7,
OneMinusA = 8
}
internal struct MaskChannelFilter
{
public Vector4 mask { get; private set; }
public Vector4 inverted { get; private set; }
public MaskChannelFilter(Vector4 m, Vector4 i)
{
mask = m;
inverted = i;
}
}
internal enum BlendMode
{
Additive = 0,
Multiply = 1,
Subtractive = 2
}
[Serializable]
internal struct BlendFactors
{
public float multiplicative;
public float additive;
}
public string name;
[SerializeField]
internal TextureChannel maskTextureChannel;
[SerializeField]
internal BlendMode blendMode;
internal Vector2 blendFactors
{
get
{
var result = new Vector2();
switch (blendMode)
{
case BlendMode.Additive:
result.x = 0.0f;
result.y = 1.0f;
break;
case BlendMode.Multiply:
result.x = 1.0f;
result.y = 0.0f;
break;
case BlendMode.Subtractive:
result.x = 0.0f;
result.y = -1.0f;
break;
default:
result.x = 1.0f;
result.y = 0.0f;
break;
}
return result;
}
}
internal MaskChannelFilter maskTextureChannelFilter
{
get
{
switch (maskTextureChannel)
{
case TextureChannel.R:
return new MaskChannelFilter(new Vector4(1, 0, 0, 0), new Vector4(0, 0, 0, 0));
case TextureChannel.OneMinusR:
return new MaskChannelFilter(new Vector4(1, 0, 0, 0), new Vector4(1, 0, 0, 0));
case TextureChannel.G:
return new MaskChannelFilter(new Vector4(0, 1, 0, 0), new Vector4(0, 0, 0, 0));
case TextureChannel.OneMinusG:
return new MaskChannelFilter(new Vector4(0, 1, 0, 0), new Vector4(0, 1, 0, 0));
case TextureChannel.B:
return new MaskChannelFilter(new Vector4(0, 0, 1, 0), new Vector4(0, 0, 0, 0));
case TextureChannel.OneMinusB:
return new MaskChannelFilter(new Vector4(0, 0, 1, 0), new Vector4(0, 0, 1, 0));
case TextureChannel.A:
return new MaskChannelFilter(new Vector4(0, 0, 0, 1), new Vector4(0, 0, 0, 0));
case TextureChannel.OneMinusA:
return new MaskChannelFilter(new Vector4(0, 0, 0, 1), new Vector4(0, 0, 0, 1));
case TextureChannel.None:
default:
return new MaskChannelFilter(Vector4.zero, Vector4.zero);
}
}
}
// Transient data
internal bool isDirty { get; set; }
internal bool hasRenderTarget { get; set; }
internal RenderTargetHandle renderTargetHandle;
}
}

View File

@@ -0,0 +1,112 @@
using System.Collections.Generic;
using Unity.Mathematics;
using UnityEngine.Profiling;
using UnityEngine.Rendering;
using UnityEngine.Rendering.Universal;
namespace UnityEngine.Experimental.Rendering.Universal
{
internal struct LightStats
{
public int totalLights;
public int totalNormalMapUsage;
public int totalVolumetricUsage;
public uint blendStylesUsed;
public uint blendStylesWithLights;
}
internal interface ILight2DCullResult
{
List<Light2D> visibleLights { get; }
LightStats GetLightStatsByLayer(int layer);
bool IsSceneLit();
}
internal class Light2DCullResult : ILight2DCullResult
{
private List<Light2D> m_VisibleLights = new List<Light2D>();
public List<Light2D> visibleLights => m_VisibleLights;
public bool IsSceneLit()
{
if (visibleLights.Count > 0)
return true;
foreach (var light in Light2DManager.lights)
{
if (light.lightType == Light2D.LightType.Global)
return true;
}
return false;
}
public LightStats GetLightStatsByLayer(int layer)
{
var returnStats = new LightStats();
foreach (var light in visibleLights)
{
if (!light.IsLitLayer(layer))
continue;
returnStats.totalLights++;
if (light.normalMapQuality != Light2D.NormalMapQuality.Disabled)
returnStats.totalNormalMapUsage++;
if (light.volumeIntensity > 0)
returnStats.totalVolumetricUsage++;
returnStats.blendStylesUsed |= (uint)(1 << light.blendStyleIndex);
if (light.lightType != Light2D.LightType.Global)
returnStats.blendStylesWithLights |= (uint)(1 << light.blendStyleIndex);
}
return returnStats;
}
public void SetupCulling(ref ScriptableCullingParameters cullingParameters, Camera camera)
{
Profiler.BeginSample("Cull 2D Lights");
m_VisibleLights.Clear();
foreach (var light in Light2DManager.lights)
{
if ((camera.cullingMask & (1 << light.gameObject.layer)) == 0)
continue;
#if UNITY_EDITOR
if (!UnityEditor.SceneManagement.StageUtility.IsGameObjectRenderedByCamera(light.gameObject, camera))
continue;
#endif
if (light.lightType == Light2D.LightType.Global)
{
m_VisibleLights.Add(light);
continue;
}
Profiler.BeginSample("Test Planes");
var position = light.boundingSphere.position;
var culled = false;
for (var i = 0; i < cullingParameters.cullingPlaneCount; ++i)
{
var plane = cullingParameters.GetCullingPlane(i);
// most of the time is spent getting world position
var distance = math.dot(position, plane.normal) + plane.distance;
if (distance < -light.boundingSphere.radius)
{
culled = true;
break;
}
}
Profiler.EndSample();
if (culled)
continue;
m_VisibleLights.Add(light);
}
// must be sorted here because light order could change
m_VisibleLights.Sort((l1, l2) => l1.lightOrder - l2.lightOrder);
Profiler.EndSample();
}
}
}

View File

@@ -0,0 +1,121 @@
using System.Collections.Generic;
#if UNITY_EDITOR
using UnityEditor.Experimental.SceneManagement;
#endif
namespace UnityEngine.Experimental.Rendering.Universal
{
internal static class Light2DManager
{
private static SortingLayer[] s_SortingLayers;
public static List<Light2D> lights { get; } = new List<Light2D>();
// Called during OnEnable
public static void RegisterLight(Light2D light)
{
Debug.Assert(!lights.Contains(light));
lights.Add(light);
ErrorIfDuplicateGlobalLight(light);
}
// Called during OnEnable
public static void DeregisterLight(Light2D light)
{
Debug.Assert(lights.Contains(light));
lights.Remove(light);
}
public static void ErrorIfDuplicateGlobalLight(Light2D light)
{
if (light.lightType != Light2D.LightType.Global)
return;
foreach (var sortingLayer in light.affectedSortingLayers)
{
// should this really trigger at runtime?
if (ContainsDuplicateGlobalLight(sortingLayer, light.blendStyleIndex))
Debug.LogError("More than one global light on layer " + SortingLayer.IDToName(sortingLayer) + " for light blend style index " + light.blendStyleIndex);
}
}
public static bool GetGlobalColor(int sortingLayerIndex, int blendStyleIndex, out Color color)
{
var foundGlobalColor = false;
color = Color.black;
// This should be rewritten to search only global lights
foreach (var light in lights)
{
if (light.lightType != Light2D.LightType.Global ||
light.blendStyleIndex != blendStyleIndex ||
!light.IsLitLayer(sortingLayerIndex))
continue;
var inCurrentPrefabStage = true;
#if UNITY_EDITOR
// If we found the first global light in our prefab stage
inCurrentPrefabStage = PrefabStageUtility.GetCurrentPrefabStage()?.IsPartOfPrefabContents(light.gameObject) ?? true;
#endif
if (inCurrentPrefabStage)
{
color = light.color * light.intensity;
return true;
}
else
{
if (!foundGlobalColor)
{
color = light.color * light.intensity;
foundGlobalColor = true;
}
}
}
return foundGlobalColor;
}
private static bool ContainsDuplicateGlobalLight(int sortingLayerIndex, int blendStyleIndex)
{
var globalLightCount = 0;
// This should be rewritten to search only global lights
foreach (var light in lights)
{
if (light.lightType == Light2D.LightType.Global &&
light.blendStyleIndex == blendStyleIndex &&
light.IsLitLayer(sortingLayerIndex))
{
#if UNITY_EDITOR
// If we found the first global light in our prefab stage
if (PrefabStageUtility.GetPrefabStage(light.gameObject) == PrefabStageUtility.GetCurrentPrefabStage())
#endif
{
if (globalLightCount > 0)
return true;
globalLightCount++;
}
}
}
return false;
}
public static SortingLayer[] GetCachedSortingLayer()
{
if (s_SortingLayers is null)
{
s_SortingLayers = SortingLayer.layers;
}
#if UNITY_EDITOR
// we should fix. Make a non allocating version of this
if (!Application.isPlaying)
s_SortingLayers = SortingLayer.layers;
#endif
return s_SortingLayers;
}
}
}

View File

@@ -0,0 +1,46 @@
using System;
using UnityEngine.Scripting.APIUpdating;
namespace UnityEngine.Experimental.Rendering.Universal
{
public sealed partial class Light2D
{
[SerializeField] float m_PointLightInnerAngle = 360.0f;
[SerializeField] float m_PointLightOuterAngle = 360.0f;
[SerializeField] float m_PointLightInnerRadius = 0.0f;
[SerializeField] float m_PointLightOuterRadius = 1.0f;
public float pointLightInnerAngle
{
get => m_PointLightInnerAngle;
set => m_PointLightInnerAngle = value;
}
public float pointLightOuterAngle
{
get => m_PointLightOuterAngle;
set => m_PointLightOuterAngle = value;
}
public float pointLightInnerRadius
{
get => m_PointLightInnerRadius;
set => m_PointLightInnerRadius = value;
}
public float pointLightOuterRadius
{
get => m_PointLightOuterRadius;
set => m_PointLightOuterRadius = value;
}
[Obsolete("pointLightDistance has been changed to normalMapDistance", true)]
public float pointLightDistance => m_NormalMapDistance;
[Obsolete("pointLightQuality has been changed to normalMapQuality", true)]
public NormalMapQuality pointLightQuality => m_NormalMapQuality;
internal bool isPointLight => m_LightType == LightType.Point;
}
}

View File

@@ -0,0 +1,35 @@
namespace UnityEngine.Experimental.Rendering.Universal
{
public sealed partial class Light2D
{
[SerializeField] int m_ShapeLightParametricSides = 5;
[SerializeField] float m_ShapeLightParametricAngleOffset = 0.0f;
[SerializeField] float m_ShapeLightParametricRadius = 1.0f;
[SerializeField] float m_ShapeLightFalloffSize = 0.50f;
[SerializeField] Vector2 m_ShapeLightFalloffOffset = Vector2.zero;
[SerializeField] Vector3[] m_ShapePath = null;
float m_PreviousShapeLightFalloffSize = -1;
int m_PreviousShapeLightParametricSides = -1;
float m_PreviousShapeLightParametricAngleOffset = -1;
float m_PreviousShapeLightParametricRadius = -1;
int m_PreviousShapePathHash = -1;
LightType m_PreviousLightType = LightType.Parametric;
public int shapeLightParametricSides => m_ShapeLightParametricSides;
public float shapeLightParametricAngleOffset => m_ShapeLightParametricAngleOffset;
public float shapeLightParametricRadius => m_ShapeLightParametricRadius;
public float shapeLightFalloffSize => m_ShapeLightFalloffSize;
public Vector3[] shapePath
{
get { return m_ShapePath; }
internal set { m_ShapePath = value; }
}
internal void SetShapePath(Vector3[] path)
{
m_ShapePath = path;
}
}
}

View File

@@ -0,0 +1,8 @@
using System;
namespace UnityEngine.Experimental.Rendering.LWRP
{
[Obsolete("LWRP -> Universal (UnityUpgradable) -> UnityEngine.Experimental.Rendering.Universal.Light2D", true)]
public class Light2D
{}
}

View File

@@ -0,0 +1,522 @@
using System;
using System.Collections.Generic;
using System.Linq;
using Unity.Collections;
using Unity.Mathematics;
using UnityEngine.Experimental.Rendering.Universal.LibTessDotNet;
using UnityEngine.Rendering;
using UnityEngine.U2D;
namespace UnityEngine.Experimental.Rendering.Universal
{
internal static class LightUtility
{
public static bool CheckForChange(Light2D.LightType a, ref Light2D.LightType b)
{
var changed = a != b;
b = a;
return changed;
}
public static bool CheckForChange(int a, ref int b)
{
var changed = a != b;
b = a;
return changed;
}
public static bool CheckForChange(float a, ref float b)
{
var changed = a != b;
b = a;
return changed;
}
public static bool CheckForChange(bool a, ref bool b)
{
var changed = a != b;
b = a;
return changed;
}
private enum PivotType
{
PivotBase,
PivotCurve,
PivotIntersect,
PivotSkip,
PivotClip
};
[Serializable]
internal struct LightMeshVertex
{
public Vector3 position;
public Color color;
public Vector2 uv;
public static readonly VertexAttributeDescriptor[] VertexLayout = new[]
{
new VertexAttributeDescriptor(VertexAttribute.Position, VertexAttributeFormat.Float32, 3),
new VertexAttributeDescriptor(VertexAttribute.Color, VertexAttributeFormat.Float32, 4),
new VertexAttributeDescriptor(VertexAttribute.TexCoord0, VertexAttributeFormat.Float32, 2),
};
}
static void Tessellate(Tess tess, ElementType boundaryType, NativeArray<ushort> indices,
NativeArray<LightMeshVertex> vertices, Color c, ref int VCount, ref int ICount)
{
tess.Tessellate(WindingRule.NonZero, boundaryType, 3);
var prevCount = VCount;
var tessIndices = tess.Elements.Select(i => i);
var tessVertices = tess.Vertices.Select(v =>
new LightMeshVertex() { position = new float3(v.Position.X, v.Position.Y, 0), color = c });
foreach (var v in tessVertices)
vertices[VCount++] = v;
foreach (var i in tessIndices)
indices[ICount++] = (ushort)(i + prevCount);
}
static bool TestPivot(List<IntPoint> path, int activePoint, long lastPoint)
{
for (int i = activePoint; i < path.Count; ++i)
{
if (path[i].N > lastPoint)
return true;
}
return (path[activePoint].N == -1);
}
// Degenerate Pivots at the End Points.
static List<IntPoint> DegeneratePivots(List<IntPoint> path, List<IntPoint> inPath)
{
List<IntPoint> degenerate = new List<IntPoint>();
var minN = path[0].N;
var maxN = path[0].N;
for (int i = 1; i < path.Count; ++i)
{
if (path[i].N != -1)
{
minN = Math.Min(minN, path[i].N);
maxN = Math.Max(maxN, path[i].N);
}
}
for (long i = 0; i < minN; ++i)
{
IntPoint ins = path[(int)minN];
ins.N = i;
degenerate.Add(ins);
}
degenerate.AddRange(path.GetRange(0, path.Count));
for (long i = maxN + 1; i < inPath.Count; ++i)
{
IntPoint ins = inPath[(int)i];
ins.N = i;
degenerate.Add(ins);
}
return degenerate;
}
// Ensure that we get a valid path from 0.
static List<IntPoint> SortPivots(List<IntPoint> outPath, List<IntPoint> inPath)
{
List<IntPoint> sorted = new List<IntPoint>();
var min = outPath[0].N;
var max = outPath[0].N;
var minIndex = 0;
bool newMin = true;
for (int i = 1; i < outPath.Count; ++i)
{
if (max > outPath[i].N && newMin && outPath[i].N != -1)
{
min = max = outPath[i].N;
minIndex = i;
newMin = false;
}
else if (outPath[i].N >= max)
{
max = outPath[i].N;
newMin = true;
}
}
sorted.AddRange(outPath.GetRange(minIndex, (outPath.Count - minIndex)));
sorted.AddRange(outPath.GetRange(0, minIndex));
return sorted;
}
// Ensure that all points eliminated due to overlaps and intersections are accounted for Tessellation.
static List<IntPoint> FixPivots(List<IntPoint> outPath, List<IntPoint> inPath)
{
var path = SortPivots(outPath, inPath);
long pivotPoint = path[0].N;
// Connect Points for Overlaps.
for (int i = 1; i < path.Count; ++i)
{
var j = (i == path.Count - 1) ? 0 : (i + 1);
var prev = path[i - 1];
var curr = path[i];
var next = path[j];
if (prev.N > curr.N)
{
var incr = TestPivot(path, i, pivotPoint);
if (incr)
{
if (prev.N == next.N)
curr.N = prev.N;
else
curr.N = (pivotPoint + 1) < inPath.Count ? (pivotPoint + 1) : 0;
curr.D = 3;
path[i] = curr;
}
}
pivotPoint = path[i].N;
}
// Insert Skipped Points.
for (int i = 1; i < path.Count - 1;)
{
var prev = path[i - 1];
var curr = path[i];
var next = path[i + 1];
if (curr.N - prev.N > 1)
{
if (curr.N == next.N)
{
IntPoint ins = curr;
ins.N = (ins.N - 1);
path[i] = ins;
}
else
{
IntPoint ins = curr;
ins.N = (ins.N - 1);
path.Insert(i, ins);
}
}
else
{
i++;
}
}
path = DegeneratePivots(path, inPath);
return path;
}
// Rough shape only used in Inspector for quick preview.
internal static List<Vector2> GetOutlinePath(Vector3[] shapePath, float offsetDistance)
{
const float kClipperScale = 10000.0f;
List<IntPoint> path = new List<IntPoint>();
List<Vector2> output = new List<Vector2>();
for (var i = 0; i < shapePath.Length; ++i)
{
var newPoint = new Vector2(shapePath[i].x, shapePath[i].y) * kClipperScale;
path.Add(new IntPoint((System.Int64)(newPoint.x), (System.Int64)(newPoint.y)));
}
List<List<IntPoint>> solution = new List<List<IntPoint>>();
ClipperOffset clipOffset = new ClipperOffset(2048.0f);
clipOffset.AddPath(path, JoinType.jtRound, EndType.etClosedPolygon);
clipOffset.Execute(ref solution, kClipperScale * offsetDistance, path.Count);
if (solution.Count > 0)
{
for (int i = 0; i < solution[0].Count; ++i)
output.Add(new Vector2(solution[0][i].X / kClipperScale, solution[0][i].Y / kClipperScale));
}
return output;
}
static void TransferToMesh(NativeArray<LightMeshVertex> vertices, int vertexCount, NativeArray<ushort> indices,
int indexCount, Light2D light)
{
var mesh = light.lightMesh;
mesh.SetVertexBufferParams(vertexCount, LightMeshVertex.VertexLayout);
mesh.SetVertexBufferData(vertices, 0, 0, vertexCount);
mesh.SetIndices(indices, 0, indexCount, MeshTopology.Triangles, 0, true);
light.vertices = new LightMeshVertex[vertexCount];
NativeArray<LightMeshVertex>.Copy(vertices, light.vertices, vertexCount);
light.indices = new ushort[indexCount];
NativeArray<ushort>.Copy(indices, light.indices, indexCount);
}
public static Bounds GenerateShapeMesh(Light2D light, Vector3[] shapePath, float falloffDistance)
{
var ix = 0;
var vcount = 0;
var icount = 0;
const float kClipperScale = 10000.0f;
var mesh = light.lightMesh;
// todo Revisit this while we do Batching.
var meshInteriorColor = new Color(0.0f, 0, 0, 1.0f);
var meshExteriorColor = new Color(0.0f, 0, 0, 0.0f);
var vertices = new NativeArray<LightMeshVertex>(shapePath.Length * 256, Allocator.Temp);
var indices = new NativeArray<ushort>(shapePath.Length * 256, Allocator.Temp);
// Create shape geometry
var inputPointCount = shapePath.Length;
var inner = new ContourVertex[inputPointCount + 1];
for (var i = 0; i < inputPointCount; ++i)
inner[ix++] = new ContourVertex() { Position = new Vec3() { X = shapePath[i].x, Y = shapePath[i].y, Z = 0 } };
inner[ix++] = inner[0];
var tess = new Tess();
tess.AddContour(inner, ContourOrientation.CounterClockwise);
Tessellate(tess, ElementType.Polygons, indices, vertices, meshInteriorColor, ref vcount, ref icount);
// Create falloff geometry
List<IntPoint> path = new List<IntPoint>();
for (var i = 0; i < inputPointCount; ++i)
{
var newPoint = new Vector2(inner[i].Position.X, inner[i].Position.Y) * kClipperScale;
var addPoint = new IntPoint((System.Int64)(newPoint.x), (System.Int64)(newPoint.y));
addPoint.N = i; addPoint.D = -1;
path.Add(addPoint);
}
var lastPointIndex = inputPointCount - 1;
// Generate Bevels.
List<List<IntPoint>> solution = new List<List<IntPoint>>();
ClipperOffset clipOffset = new ClipperOffset(24.0f);
clipOffset.AddPath(path, JoinType.jtRound, EndType.etClosedPolygon);
clipOffset.Execute(ref solution, kClipperScale * falloffDistance, path.Count);
if (solution.Count > 0)
{
// Fix path for Pivots.
var outPath = solution[0];
var minPath = (long)inputPointCount;
for (int i = 0; i < outPath.Count; ++i)
minPath = (outPath[i].N != -1) ? Math.Min(minPath, outPath[i].N) : minPath;
var containsStart = minPath == 0;
outPath = FixPivots(outPath, path);
// Tessellate.
var bIndices = new NativeArray<ushort>(icount + (outPath.Count * 6) + 6, Allocator.Temp);
for (int i = 0; i < icount; ++i)
bIndices[i] = indices[i];
var bVertices = new NativeArray<LightMeshVertex>(vcount + outPath.Count + inputPointCount, Allocator.Temp);
for (int i = 0; i < vcount; ++i)
bVertices[i] = vertices[i];
var innerIndices = new ushort[inputPointCount];
// Inner Vertices. (These may or may not be part of the created path. Beware!!)
for (int i = 0; i < inputPointCount; ++i)
{
bVertices[vcount++] = new LightMeshVertex()
{
position = new float3(inner[i].Position.X, inner[i].Position.Y, 0),
color = meshInteriorColor
};
innerIndices[i] = (ushort)(vcount - 1);
}
var saveIndex = (ushort)vcount;
var pathStart = saveIndex;
var prevIndex = outPath[0].N == -1 ? 0 : outPath[0].N;
for (int i = 0; i < outPath.Count; ++i)
{
var curr = outPath[i];
var currPoint = new float2(curr.X / kClipperScale, curr.Y / kClipperScale);
var currIndex = curr.N == -1 ? 0 : curr.N;
bVertices[vcount++] = new LightMeshVertex()
{
position = new float3(currPoint.x, currPoint.y, 0),
color = meshExteriorColor
};
if (prevIndex != currIndex)
{
bIndices[icount++] = innerIndices[prevIndex];
bIndices[icount++] = innerIndices[currIndex];
bIndices[icount++] = (ushort)(vcount - 1);
}
bIndices[icount++] = innerIndices[prevIndex];
bIndices[icount++] = saveIndex;
bIndices[icount++] = saveIndex = (ushort)(vcount - 1);
prevIndex = currIndex;
}
// Close the Loop.
{
bIndices[icount++] = pathStart;
bIndices[icount++] = innerIndices[minPath];
bIndices[icount++] = containsStart ? innerIndices[lastPointIndex] : saveIndex;
bIndices[icount++] = containsStart ? pathStart : saveIndex;
bIndices[icount++] = containsStart ? saveIndex : innerIndices[minPath];
bIndices[icount++] = containsStart ? innerIndices[lastPointIndex] : innerIndices[minPath - 1];
}
TransferToMesh(bVertices, vcount, bIndices, icount, light);
}
else
{
TransferToMesh(vertices, vcount, indices, icount, light);
}
return mesh.GetSubMesh(0).bounds;
}
public static Bounds GenerateParametricMesh(Light2D light, float radius, float falloffDistance, float angle, int sides)
{
var angleOffset = Mathf.PI / 2.0f + Mathf.Deg2Rad * angle;
if (sides < 3)
{
radius = 0.70710678118654752440084436210485f * radius;
sides = 4;
}
if (sides == 4)
{
angleOffset = Mathf.PI / 4.0f + Mathf.Deg2Rad * angle;
}
var vertexCount = 1 + 2 * sides;
var indexCount = 3 * 3 * sides;
var vertices = new NativeArray<LightMeshVertex>(vertexCount, Allocator.Temp);
var triangles = new NativeArray<ushort>(indexCount, Allocator.Temp);
var centerIndex = (ushort)(2 * sides);
var mesh = light.lightMesh;
// Only Alpha value in Color channel is ever used. May remove it or keep it for batching params in the future.
var color = new Color(0, 0, 0, 1);
vertices[centerIndex] = new LightMeshVertex
{
position = float3.zero,
color = color
};
var radiansPerSide = 2 * Mathf.PI / sides;
var min = new float3(float.MaxValue, float.MaxValue, 0);
var max = new float3(float.MinValue, float.MinValue, 0);
for (var i = 0; i < sides; i++)
{
var endAngle = (i + 1) * radiansPerSide;
var extrudeDir = new float3(math.cos(endAngle + angleOffset), math.sin(endAngle + angleOffset), 0);
var endPoint = radius * extrudeDir;
var vertexIndex = (2 * i + 2) % (2 * sides);
vertices[vertexIndex] = new LightMeshVertex
{
position = endPoint,
color = new Color(extrudeDir.x, extrudeDir.y, 0, 0)
};
vertices[vertexIndex + 1] = new LightMeshVertex
{
position = endPoint,
color = color
};
// Triangle 1 (Tip)
var triangleIndex = 9 * i;
triangles[triangleIndex] = (ushort)(vertexIndex + 1);
triangles[triangleIndex + 1] = (ushort)(2 * i + 1);
triangles[triangleIndex + 2] = centerIndex;
// Triangle 2 (Upper Top Left)
triangles[triangleIndex + 3] = (ushort)(vertexIndex);
triangles[triangleIndex + 4] = (ushort)(2 * i);
triangles[triangleIndex + 5] = (ushort)(2 * i + 1);
// Triangle 2 (Bottom Top Left)
triangles[triangleIndex + 6] = (ushort)(vertexIndex + 1);
triangles[triangleIndex + 7] = (ushort)(vertexIndex);
triangles[triangleIndex + 8] = (ushort)(2 * i + 1);
min = math.min(min, endPoint + extrudeDir * falloffDistance);
max = math.max(max, endPoint + extrudeDir * falloffDistance);
}
mesh.SetVertexBufferParams(vertexCount, LightMeshVertex.VertexLayout);
mesh.SetVertexBufferData(vertices, 0, 0, vertexCount);
mesh.SetIndices(triangles, MeshTopology.Triangles, 0, false);
light.vertices = new LightMeshVertex[vertexCount];
NativeArray<LightMeshVertex>.Copy(vertices, light.vertices, vertexCount);
light.indices = new ushort[indexCount];
NativeArray<ushort>.Copy(triangles, light.indices, indexCount);
return new Bounds
{
min = min,
max = max
};
}
public static Bounds GenerateSpriteMesh(Light2D light, Sprite sprite)
{
// this needs to be called before getting UV at the line below.
// Venky fixed it, enroute to trunk
var uvs = sprite.uv;
var mesh = light.lightMesh;
if (sprite == null)
{
mesh.Clear();
return new Bounds(Vector3.zero, Vector3.zero);
}
var srcVertices = sprite.GetVertexAttribute<Vector3>(VertexAttribute.Position);
var srcUVs = sprite.GetVertexAttribute<Vector2>(VertexAttribute.TexCoord0);
var srcIndices = sprite.GetIndices();
var center = 0.5f * (sprite.bounds.min + sprite.bounds.max);
var vertices = new NativeArray<LightMeshVertex>(srcIndices.Length, Allocator.Temp);
var color = new Color(0, 0, 0, 1);
for (var i = 0; i < srcVertices.Length; i++)
{
vertices[i] = new LightMeshVertex
{
position = new Vector3(srcVertices[i].x, srcVertices[i].y, 0) - center,
color = color,
uv = srcUVs[i]
};
}
mesh.SetVertexBufferParams(vertices.Length, LightMeshVertex.VertexLayout);
mesh.SetVertexBufferData(vertices, 0, 0, vertices.Length);
mesh.SetIndices(srcIndices, MeshTopology.Triangles, 0, true);
light.vertices = new LightMeshVertex[vertices.Length];
NativeArray<LightMeshVertex>.Copy(vertices, light.vertices, vertices.Length);
light.indices = new ushort[srcIndices.Length];
NativeArray<ushort>.Copy(srcIndices, light.indices, srcIndices.Length);
return mesh.GetSubMesh(0).bounds;
}
public static int GetShapePathHash(Vector3[] path)
{
unchecked
{
int hashCode = (int)2166136261;
if (path != null)
{
foreach (var point in path)
hashCode = hashCode * 16777619 ^ point.GetHashCode();
}
else
{
hashCode = 0;
}
return hashCode;
}
}
}
}

View File

@@ -0,0 +1,7 @@
namespace UnityEngine.Experimental.Rendering.Universal
{
internal interface IRenderPass2D
{
Renderer2DData rendererData { get; }
}
}

View File

@@ -0,0 +1,37 @@
using UnityEngine.Rendering;
using UnityEngine.Rendering.Universal;
namespace UnityEngine.Experimental.Rendering.Universal
{
// Only to be used when Pixel Perfect Camera is present and it has Crop Frame X or Y enabled.
// This pass simply clears BuiltinRenderTextureType.CameraTarget to black, so that the letterbox or pillarbox is black instead of garbage.
// In the future this can be extended to draw a custom background image instead of just clearing.
internal class PixelPerfectBackgroundPass : ScriptableRenderPass
{
private static readonly ProfilingSampler m_ProfilingScope = new ProfilingSampler("Pixel Perfect Background Pass");
public PixelPerfectBackgroundPass(RenderPassEvent evt)
{
renderPassEvent = evt;
}
public override void Execute(ScriptableRenderContext context, ref RenderingData renderingData)
{
var cmd = CommandBufferPool.Get();
using (new ProfilingScope(cmd, m_ProfilingScope))
{
CoreUtils.SetRenderTarget(
cmd,
BuiltinRenderTextureType.CameraTarget,
RenderBufferLoadAction.DontCare,
RenderBufferStoreAction.Store,
ClearFlag.Color,
Color.black);
}
context.ExecuteCommandBuffer(cmd);
CommandBufferPool.Release(cmd);
}
}
}

View File

@@ -0,0 +1,328 @@
using System.Collections.Generic;
using Unity.Mathematics;
using UnityEngine.Rendering;
using UnityEngine.Profiling;
using UnityEngine.Rendering.Universal;
namespace UnityEngine.Experimental.Rendering.Universal
{
internal class Render2DLightingPass : ScriptableRenderPass, IRenderPass2D
{
private static readonly int k_HDREmulationScaleID = Shader.PropertyToID("_HDREmulationScale");
private static readonly int k_InverseHDREmulationScaleID = Shader.PropertyToID("_InverseHDREmulationScale");
private static readonly int k_UseSceneLightingID = Shader.PropertyToID("_UseSceneLighting");
private static readonly int k_RendererColorID = Shader.PropertyToID("_RendererColor");
private static readonly int k_CameraSortingLayerTextureID = Shader.PropertyToID("_CameraSortingLayerTexture");
private static readonly int[] k_ShapeLightTextureIDs =
{
Shader.PropertyToID("_ShapeLightTexture0"),
Shader.PropertyToID("_ShapeLightTexture1"),
Shader.PropertyToID("_ShapeLightTexture2"),
Shader.PropertyToID("_ShapeLightTexture3")
};
private static readonly ShaderTagId k_CombinedRenderingPassNameOld = new ShaderTagId("Lightweight2D");
private static readonly ShaderTagId k_CombinedRenderingPassName = new ShaderTagId("Universal2D");
private static readonly ShaderTagId k_NormalsRenderingPassName = new ShaderTagId("NormalsRendering");
private static readonly ShaderTagId k_LegacyPassName = new ShaderTagId("SRPDefaultUnlit");
private static readonly List<ShaderTagId> k_ShaderTags = new List<ShaderTagId>() { k_LegacyPassName, k_CombinedRenderingPassName, k_CombinedRenderingPassNameOld };
private static readonly ProfilingSampler m_ProfilingDrawLights = new ProfilingSampler("Draw 2D Lights");
private static readonly ProfilingSampler m_ProfilingDrawLightTextures = new ProfilingSampler("Draw 2D Lights Textures");
private static readonly ProfilingSampler m_ProfilingDrawRenderers = new ProfilingSampler("Draw All Renderers");
private static readonly ProfilingSampler m_ProfilingDrawLayerBatch = new ProfilingSampler("Draw Layer Batch");
private static readonly ProfilingSampler m_ProfilingSamplerUnlit = new ProfilingSampler("Render Unlit");
Material m_BlitMaterial;
Material m_SamplingMaterial;
private readonly Renderer2DData m_Renderer2DData;
private bool m_HasValidDepth;
public Render2DLightingPass(Renderer2DData rendererData, Material blitMaterial, Material samplingMaterial)
{
m_Renderer2DData = rendererData;
m_BlitMaterial = blitMaterial;
m_SamplingMaterial = samplingMaterial;
}
internal void Setup(bool hasValidDepth)
{
m_HasValidDepth = hasValidDepth;
}
private void GetTransparencySortingMode(Camera camera, ref SortingSettings sortingSettings)
{
var mode = m_Renderer2DData.transparencySortMode;
if (mode == TransparencySortMode.Default)
{
mode = camera.orthographic ? TransparencySortMode.Orthographic : TransparencySortMode.Perspective;
}
switch (mode)
{
case TransparencySortMode.Perspective:
sortingSettings.distanceMetric = DistanceMetric.Perspective;
break;
case TransparencySortMode.Orthographic:
sortingSettings.distanceMetric = DistanceMetric.Orthographic;
break;
default:
sortingSettings.distanceMetric = DistanceMetric.CustomAxis;
sortingSettings.customAxis = m_Renderer2DData.transparencySortAxis;
break;
}
}
private void CopyCameraSortingLayerRenderTexture(ScriptableRenderContext context, RenderingData renderingData)
{
var cmd = CommandBufferPool.Get();
cmd.Clear();
this.CreateCameraSortingLayerRenderTexture(renderingData, cmd, m_Renderer2DData.cameraSortingLayerDownsamplingMethod);
Material copyMaterial = m_Renderer2DData.cameraSortingLayerDownsamplingMethod == Downsampling._4xBox ? m_SamplingMaterial : m_BlitMaterial;
RenderingUtils.Blit(cmd, colorAttachment, m_Renderer2DData.cameraSortingLayerRenderTarget.id, copyMaterial, 0, false, RenderBufferLoadAction.DontCare, RenderBufferStoreAction.Store, RenderBufferLoadAction.DontCare, RenderBufferStoreAction.DontCare);
cmd.SetRenderTarget(colorAttachment);
cmd.SetGlobalTexture(k_CameraSortingLayerTextureID, m_Renderer2DData.cameraSortingLayerRenderTarget.id);
context.ExecuteCommandBuffer(cmd);
}
private short GetCameraSortingLayerBoundsIndex()
{
SortingLayer[] sortingLayers = Light2DManager.GetCachedSortingLayer();
for (short i = 0; i < sortingLayers.Length; i++)
{
if (sortingLayers[i].id == m_Renderer2DData.cameraSortingLayerTextureBound)
return (short)sortingLayers[i].value;
}
return short.MinValue;
}
private int DrawLayerBatches(
LayerBatch[] layerBatches,
int batchCount,
int startIndex,
CommandBuffer cmd,
ScriptableRenderContext context,
ref RenderingData renderingData,
ref FilteringSettings filterSettings,
ref DrawingSettings normalsDrawSettings,
ref DrawingSettings drawSettings,
ref RenderTextureDescriptor desc)
{
var batchesDrawn = 0;
var rtCount = 0U;
// Draw lights
using (new ProfilingScope(cmd, m_ProfilingDrawLights))
{
for (var i = startIndex; i < batchCount; ++i)
{
ref var layerBatch = ref layerBatches[i];
var blendStyleMask = layerBatch.lightStats.blendStylesUsed;
var blendStyleCount = 0U;
while (blendStyleMask > 0)
{
blendStyleCount += blendStyleMask & 1;
blendStyleMask >>= 1;
}
rtCount += blendStyleCount;
if (rtCount > LayerUtility.maxTextureCount)
break;
batchesDrawn++;
if (layerBatch.lightStats.totalNormalMapUsage > 0)
{
filterSettings.sortingLayerRange = layerBatch.layerRange;
var depthTarget = m_HasValidDepth ? depthAttachment : BuiltinRenderTextureType.None;
this.RenderNormals(context, renderingData, normalsDrawSettings, filterSettings, depthTarget, cmd, layerBatch.lightStats);
}
using (new ProfilingScope(cmd, m_ProfilingDrawLightTextures))
{
this.RenderLights(renderingData, cmd, layerBatch.startLayerID, ref layerBatch, ref desc);
}
}
}
// Draw renderers
var blendStylesCount = m_Renderer2DData.lightBlendStyles.Length;
using (new ProfilingScope(cmd, m_ProfilingDrawRenderers))
{
cmd.SetRenderTarget(colorAttachment, depthAttachment);
for (var i = startIndex; i < startIndex + batchesDrawn; i++)
{
using (new ProfilingScope(cmd, m_ProfilingDrawLayerBatch))
{
// This is a local copy of the array element (it's a struct). Remember to add a ref here if you need to modify the real thing.
var layerBatch = layerBatches[i];
if (layerBatch.lightStats.totalLights > 0)
{
for (var blendStyleIndex = 0; blendStyleIndex < blendStylesCount; blendStyleIndex++)
{
var blendStyleMask = (uint)(1 << blendStyleIndex);
var blendStyleUsed = (layerBatch.lightStats.blendStylesUsed & blendStyleMask) > 0;
if (blendStyleUsed)
{
var identifier = layerBatch.GetRTId(cmd, desc, blendStyleIndex);
cmd.SetGlobalTexture(k_ShapeLightTextureIDs[blendStyleIndex], identifier);
}
RendererLighting.EnableBlendStyle(cmd, blendStyleIndex, blendStyleUsed);
}
}
else
{
for (var blendStyleIndex = 0; blendStyleIndex < k_ShapeLightTextureIDs.Length; blendStyleIndex++)
{
cmd.SetGlobalTexture(k_ShapeLightTextureIDs[blendStyleIndex], Texture2D.blackTexture);
RendererLighting.EnableBlendStyle(cmd, blendStyleIndex, blendStyleIndex == 0);
}
}
context.ExecuteCommandBuffer(cmd);
cmd.Clear();
short cameraSortingLayerBoundsIndex = GetCameraSortingLayerBoundsIndex();
// If our camera sorting layer texture bound is inside our batch we need to break up the DrawRenderers into two batches
if (cameraSortingLayerBoundsIndex >= layerBatch.layerRange.lowerBound && cameraSortingLayerBoundsIndex < layerBatch.layerRange.upperBound && m_Renderer2DData.useCameraSortingLayerTexture)
{
filterSettings.sortingLayerRange = new SortingLayerRange(layerBatch.layerRange.lowerBound, cameraSortingLayerBoundsIndex);
context.DrawRenderers(renderingData.cullResults, ref drawSettings, ref filterSettings);
CopyCameraSortingLayerRenderTexture(context, renderingData);
filterSettings.sortingLayerRange = new SortingLayerRange((short)(cameraSortingLayerBoundsIndex + 1), layerBatch.layerRange.upperBound);
context.DrawRenderers(renderingData.cullResults, ref drawSettings, ref filterSettings);
}
else
{
filterSettings.sortingLayerRange = new SortingLayerRange(layerBatch.layerRange.lowerBound, layerBatch.layerRange.upperBound);
context.DrawRenderers(renderingData.cullResults, ref drawSettings, ref filterSettings);
if (cameraSortingLayerBoundsIndex == layerBatch.layerRange.upperBound && m_Renderer2DData.useCameraSortingLayerTexture)
CopyCameraSortingLayerRenderTexture(context, renderingData);
}
// Draw light volumes
if (layerBatch.lightStats.totalVolumetricUsage > 0)
{
var sampleName = "Render 2D Light Volumes";
cmd.BeginSample(sampleName);
this.RenderLightVolumes(renderingData, cmd, layerBatch.startLayerID, layerBatch.endLayerValue, colorAttachment, depthAttachment, m_Renderer2DData.lightCullResult.visibleLights);
cmd.EndSample(sampleName);
}
}
}
}
for (var i = startIndex; i < startIndex + batchesDrawn; ++i)
{
ref var layerBatch = ref layerBatches[i];
layerBatch.ReleaseRT(cmd);
}
return batchesDrawn;
}
public override void Execute(ScriptableRenderContext context, ref RenderingData renderingData)
{
var isLitView = true;
#if UNITY_EDITOR
if (renderingData.cameraData.isSceneViewCamera)
isLitView = UnityEditor.SceneView.currentDrawingSceneView.sceneLighting;
if (renderingData.cameraData.camera.cameraType == CameraType.Preview)
isLitView = false;
#endif
var camera = renderingData.cameraData.camera;
var filterSettings = new FilteringSettings();
filterSettings.renderQueueRange = RenderQueueRange.all;
filterSettings.layerMask = -1;
filterSettings.renderingLayerMask = 0xFFFFFFFF;
filterSettings.sortingLayerRange = SortingLayerRange.all;
LayerUtility.InitializeBudget(m_Renderer2DData.lightRenderTextureMemoryBudget);
ShadowRendering.InitializeBudget(m_Renderer2DData.shadowRenderTextureMemoryBudget);
var isSceneLit = m_Renderer2DData.lightCullResult.IsSceneLit();
if (isSceneLit)
{
var combinedDrawSettings = CreateDrawingSettings(k_ShaderTags, ref renderingData, SortingCriteria.CommonTransparent);
var normalsDrawSettings = CreateDrawingSettings(k_NormalsRenderingPassName, ref renderingData, SortingCriteria.CommonTransparent);
var sortSettings = combinedDrawSettings.sortingSettings;
GetTransparencySortingMode(camera, ref sortSettings);
combinedDrawSettings.sortingSettings = sortSettings;
normalsDrawSettings.sortingSettings = sortSettings;
var cmd = CommandBufferPool.Get();
cmd.SetGlobalFloat(k_HDREmulationScaleID, m_Renderer2DData.hdrEmulationScale);
cmd.SetGlobalFloat(k_InverseHDREmulationScaleID, 1.0f / m_Renderer2DData.hdrEmulationScale);
cmd.SetGlobalFloat(k_UseSceneLightingID, isLitView ? 1.0f : 0.0f);
cmd.SetGlobalColor(k_RendererColorID, Color.white);
this.SetShapeLightShaderGlobals(cmd);
var desc = this.GetBlendStyleRenderTextureDesc(renderingData);
var layerBatches = LayerUtility.CalculateBatches(m_Renderer2DData.lightCullResult, out var batchCount);
var batchesDrawn = 0;
for (var i = 0; i < batchCount; i += batchesDrawn)
batchesDrawn = DrawLayerBatches(layerBatches, batchCount, i, cmd, context, ref renderingData, ref filterSettings, ref normalsDrawSettings, ref combinedDrawSettings, ref desc);
this.ReleaseRenderTextures(cmd);
context.ExecuteCommandBuffer(cmd);
CommandBufferPool.Release(cmd);
}
else
{
var unlitDrawSettings = CreateDrawingSettings(k_ShaderTags, ref renderingData, SortingCriteria.CommonTransparent);
var cmd = CommandBufferPool.Get();
using (new ProfilingScope(cmd, m_ProfilingSamplerUnlit))
{
CoreUtils.SetRenderTarget(cmd, colorAttachment, depthAttachment, ClearFlag.None, Color.white);
cmd.SetGlobalFloat(k_UseSceneLightingID, isLitView ? 1.0f : 0.0f);
cmd.SetGlobalColor(k_RendererColorID, Color.white);
for (var blendStyleIndex = 0; blendStyleIndex < k_ShapeLightTextureIDs.Length; blendStyleIndex++)
{
if (blendStyleIndex == 0)
cmd.SetGlobalTexture(k_ShapeLightTextureIDs[blendStyleIndex], Texture2D.blackTexture);
RendererLighting.EnableBlendStyle(cmd, blendStyleIndex, blendStyleIndex == 0);
}
}
context.ExecuteCommandBuffer(cmd);
CommandBufferPool.Release(cmd);
Profiler.BeginSample("Render Sprites Unlit");
context.DrawRenderers(renderingData.cullResults, ref unlitDrawSettings, ref filterSettings);
Profiler.EndSample();
}
filterSettings.sortingLayerRange = SortingLayerRange.all;
RenderingUtils.RenderObjectsWithError(context, ref renderingData.cullResults, camera, filterSettings, SortingCriteria.None);
}
Renderer2DData IRenderPass2D.rendererData
{
get { return m_Renderer2DData; }
}
}
}

View File

@@ -0,0 +1,154 @@
using Unity.Mathematics;
using UnityEngine.Rendering;
namespace UnityEngine.Experimental.Rendering.Universal
{
internal struct LayerBatch
{
public int startLayerID;
public int endLayerValue;
public SortingLayerRange layerRange;
public LightStats lightStats;
private unsafe fixed int renderTargetIds[4];
private unsafe fixed bool renderTargetUsed[4];
public void InitRTIds(int index)
{
for (var i = 0; i < 4; i++)
{
unsafe
{
renderTargetUsed[i] = false;
renderTargetIds[i] = Shader.PropertyToID($"_LightTexture_{index}_{i}");
}
}
}
public RenderTargetIdentifier GetRTId(CommandBuffer cmd, RenderTextureDescriptor desc, int index)
{
unsafe
{
if (!renderTargetUsed[index])
{
cmd.GetTemporaryRT(renderTargetIds[index], desc, FilterMode.Bilinear);
renderTargetUsed[index] = true;
}
return new RenderTargetIdentifier(renderTargetIds[index]);
}
}
public void ReleaseRT(CommandBuffer cmd)
{
for (var i = 0; i < 4; i++)
{
unsafe
{
if (!renderTargetUsed[i])
continue;
cmd.ReleaseTemporaryRT(renderTargetIds[i]);
renderTargetUsed[i] = false;
}
}
}
}
internal static class LayerUtility
{
private static LayerBatch[] s_LayerBatches;
public static uint maxTextureCount { get; private set; }
public static void InitializeBudget(uint maxTextureCount)
{
LayerUtility.maxTextureCount = math.max(4, maxTextureCount);
}
private static bool CanBatchLightsInLayer(int layerIndex1, int layerIndex2, SortingLayer[] sortingLayers, ILight2DCullResult lightCullResult)
{
var layerId1 = sortingLayers[layerIndex1].id;
var layerId2 = sortingLayers[layerIndex2].id;
foreach (var light in lightCullResult.visibleLights)
{
// If the lit layers are different, or if they are lit but this is a shadow casting light then don't batch.
if ((light.IsLitLayer(layerId1) != light.IsLitLayer(layerId2)) || (light.IsLitLayer(layerId1) && light.shadowsEnabled))
return false;
}
return true;
}
private static int FindUpperBoundInBatch(int startLayerIndex, SortingLayer[] sortingLayers, ILight2DCullResult lightCullResult)
{
// start checking at the next layer
for (var i = startLayerIndex + 1; i < sortingLayers.Length; i++)
{
if (!CanBatchLightsInLayer(startLayerIndex, i, sortingLayers, lightCullResult))
return i - 1;
}
return sortingLayers.Length - 1;
}
private static void InitializeBatchInfos(SortingLayer[] cachedSortingLayers)
{
var count = cachedSortingLayers.Length;
var needInit = s_LayerBatches == null;
if (s_LayerBatches is null)
{
s_LayerBatches = new LayerBatch[count];
}
#if UNITY_EDITOR
// we should fix. Make a non allocating version of this
if (!Application.isPlaying && s_LayerBatches.Length != count)
{
s_LayerBatches = new LayerBatch[count];
needInit = true;
}
#endif
if (needInit)
{
for (var i = 0; i < s_LayerBatches.Length; i++)
{
ref var layerBatch = ref s_LayerBatches[i];
layerBatch.InitRTIds(i);
}
}
}
public static LayerBatch[] CalculateBatches(ILight2DCullResult lightCullResult, out int batchCount)
{
var cachedSortingLayers = Light2DManager.GetCachedSortingLayer();
InitializeBatchInfos(cachedSortingLayers);
batchCount = 0;
for (var i = 0; i < cachedSortingLayers.Length;)
{
var layerToRender = cachedSortingLayers[i].id;
var lightStats = lightCullResult.GetLightStatsByLayer(layerToRender);
ref var layerBatch = ref s_LayerBatches[batchCount++];
// Find the highest layer that share the same set of lights as this layer.
var upperLayerInBatch = FindUpperBoundInBatch(i, cachedSortingLayers, lightCullResult);
// Some renderers override their sorting layer value with short.MinValue or short.MaxValue.
// When drawing the first sorting layer, we should include the range from short.MinValue to layerValue.
// Similarly, when drawing the last sorting layer, include the range from layerValue to short.MaxValue.
var startLayerValue = (short)cachedSortingLayers[i].value;
var lowerBound = (i == 0) ? short.MinValue : startLayerValue;
var endLayerValue = (short)cachedSortingLayers[upperLayerInBatch].value;
var upperBound = (upperLayerInBatch == cachedSortingLayers.Length - 1) ? short.MaxValue : endLayerValue;
// Renderer within this range share the same set of lights so they should be rendered together.
var sortingLayerRange = new SortingLayerRange(lowerBound, upperBound);
layerBatch.startLayerID = layerToRender;
layerBatch.endLayerValue = endLayerValue;
layerBatch.layerRange = sortingLayerRange;
layerBatch.lightStats = lightStats;
i = upperLayerInBatch + 1;
}
return s_LayerBatches;
}
}
}

View File

@@ -0,0 +1,96 @@
using UnityEngine.Rendering.Universal;
namespace UnityEngine.Experimental.Rendering.Universal
{
internal static class Light2DLookupTexture
{
private static Texture2D s_PointLightLookupTexture;
private static Texture2D s_FalloffLookupTexture;
public static Texture GetLightLookupTexture()
{
if (s_PointLightLookupTexture == null)
s_PointLightLookupTexture = CreatePointLightLookupTexture();
return s_PointLightLookupTexture;
}
private static Texture2D CreatePointLightLookupTexture()
{
const int WIDTH = 256;
const int HEIGHT = 256;
var textureFormat = GraphicsFormat.R8G8B8A8_UNorm;
if (RenderingUtils.SupportsGraphicsFormat(GraphicsFormat.R16G16B16A16_SFloat, FormatUsage.SetPixels))
textureFormat = GraphicsFormat.R16G16B16A16_SFloat;
else if (RenderingUtils.SupportsGraphicsFormat(GraphicsFormat.R32G32B32A32_SFloat, FormatUsage.SetPixels))
textureFormat = GraphicsFormat.R32G32B32A32_SFloat;
var texture = new Texture2D(WIDTH, HEIGHT, textureFormat, TextureCreationFlags.None);
texture.filterMode = FilterMode.Bilinear;
texture.wrapMode = TextureWrapMode.Clamp;
var center = new Vector2(WIDTH / 2.0f, HEIGHT / 2.0f);
for (var y = 0; y < HEIGHT; y++)
{
for (var x = 0; x < WIDTH; x++)
{
var pos = new Vector2(x, y);
var distance = Vector2.Distance(pos, center);
var relPos = pos - center;
var direction = center - pos;
direction.Normalize();
// red = 1-0 distance
// green = 1-0 angle
// blue = direction.x
// alpha = direction.y
float red;
if (x == WIDTH - 1 || y == HEIGHT - 1)
red = 0;
else
red = Mathf.Clamp(1 - (2.0f * distance / WIDTH), 0.0f, 1.0f);
var cosAngle = Vector2.Dot(Vector2.down, relPos.normalized);
var angle = Mathf.Acos(cosAngle) / Mathf.PI; // 0-1
var green = Mathf.Clamp(1 - angle, 0.0f, 1.0f);
var blue = direction.x;
var alpha = direction.y;
var color = new Color(red, green, blue, alpha);
texture.SetPixel(x, y, color);
}
}
texture.Apply();
return texture;
}
//#if UNITY_EDITOR
// [MenuItem("Light2D Debugging/Write Light Texture")]
// static public void WriteLightTexture()
// {
// var path = EditorUtility.SaveFilePanel("Save texture as PNG", "", "LightLookupTexture.exr", "png");
// CreatePointLightLookupTexture();
// byte[] imgData = s_PointLightLookupTexture.EncodeToEXR(Texture2D.EXRFlags.CompressRLE);
// if (imgData != null)
// File.WriteAllBytes(path, imgData);
// }
// [MenuItem("Light2D Debugging/Write Falloff Texture")]
// static public void WriteCurveTexture()
// {
// var path = EditorUtility.SaveFilePanel("Save texture as PNG", "", "FalloffLookupTexture.png", "png");
// CreateFalloffLookupTexture();
// byte[] imgData = s_FalloffLookupTexture.EncodeToPNG();
// if (imgData != null)
// File.WriteAllBytes(path, imgData);
// }
//#endif
}
}

View File

@@ -0,0 +1,615 @@
using System.Collections.Generic;
using UnityEngine.Rendering;
using UnityEngine.Rendering.Universal;
namespace UnityEngine.Experimental.Rendering.Universal
{
internal static class RendererLighting
{
private static readonly ProfilingSampler m_ProfilingSampler = new ProfilingSampler("Draw Normals");
private static readonly ShaderTagId k_NormalsRenderingPassName = new ShaderTagId("NormalsRendering");
private static readonly Color k_NormalClearColor = new Color(0.5f, 0.5f, 0.5f, 1.0f);
private static readonly string k_SpriteLightKeyword = "SPRITE_LIGHT";
private static readonly string k_UsePointLightCookiesKeyword = "USE_POINT_LIGHT_COOKIES";
private static readonly string k_LightQualityFastKeyword = "LIGHT_QUALITY_FAST";
private static readonly string k_UseNormalMap = "USE_NORMAL_MAP";
private static readonly string k_UseAdditiveBlendingKeyword = "USE_ADDITIVE_BLENDING";
private static readonly string[] k_UseBlendStyleKeywords =
{
"USE_SHAPE_LIGHT_TYPE_0", "USE_SHAPE_LIGHT_TYPE_1", "USE_SHAPE_LIGHT_TYPE_2", "USE_SHAPE_LIGHT_TYPE_3"
};
private static readonly int[] k_BlendFactorsPropIDs =
{
Shader.PropertyToID("_ShapeLightBlendFactors0"),
Shader.PropertyToID("_ShapeLightBlendFactors1"),
Shader.PropertyToID("_ShapeLightBlendFactors2"),
Shader.PropertyToID("_ShapeLightBlendFactors3")
};
private static readonly int[] k_MaskFilterPropIDs =
{
Shader.PropertyToID("_ShapeLightMaskFilter0"),
Shader.PropertyToID("_ShapeLightMaskFilter1"),
Shader.PropertyToID("_ShapeLightMaskFilter2"),
Shader.PropertyToID("_ShapeLightMaskFilter3")
};
private static readonly int[] k_InvertedFilterPropIDs =
{
Shader.PropertyToID("_ShapeLightInvertedFilter0"),
Shader.PropertyToID("_ShapeLightInvertedFilter1"),
Shader.PropertyToID("_ShapeLightInvertedFilter2"),
Shader.PropertyToID("_ShapeLightInvertedFilter3")
};
private static GraphicsFormat s_RenderTextureFormatToUse = GraphicsFormat.R8G8B8A8_UNorm;
private static bool s_HasSetupRenderTextureFormatToUse;
private static readonly int k_SrcBlendID = Shader.PropertyToID("_SrcBlend");
private static readonly int k_DstBlendID = Shader.PropertyToID("_DstBlend");
private static readonly int k_FalloffIntensityID = Shader.PropertyToID("_FalloffIntensity");
private static readonly int k_FalloffDistanceID = Shader.PropertyToID("_FalloffDistance");
private static readonly int k_LightColorID = Shader.PropertyToID("_LightColor");
private static readonly int k_VolumeOpacityID = Shader.PropertyToID("_VolumeOpacity");
private static readonly int k_CookieTexID = Shader.PropertyToID("_CookieTex");
private static readonly int k_FalloffLookupID = Shader.PropertyToID("_FalloffLookup");
private static readonly int k_LightPositionID = Shader.PropertyToID("_LightPosition");
private static readonly int k_LightInvMatrixID = Shader.PropertyToID("_LightInvMatrix");
private static readonly int k_InnerRadiusMultID = Shader.PropertyToID("_InnerRadiusMult");
private static readonly int k_OuterAngleID = Shader.PropertyToID("_OuterAngle");
private static readonly int k_InnerAngleMultID = Shader.PropertyToID("_InnerAngleMult");
private static readonly int k_LightLookupID = Shader.PropertyToID("_LightLookup");
private static readonly int k_IsFullSpotlightID = Shader.PropertyToID("_IsFullSpotlight");
private static readonly int k_LightZDistanceID = Shader.PropertyToID("_LightZDistance");
private static readonly int k_PointLightCookieTexID = Shader.PropertyToID("_PointLightCookieTex");
private static GraphicsFormat GetRenderTextureFormat()
{
if (!s_HasSetupRenderTextureFormatToUse)
{
if (SystemInfo.IsFormatSupported(GraphicsFormat.B10G11R11_UFloatPack32, FormatUsage.Linear | FormatUsage.Render))
s_RenderTextureFormatToUse = GraphicsFormat.B10G11R11_UFloatPack32;
else if (SystemInfo.IsFormatSupported(GraphicsFormat.R16G16B16A16_SFloat, FormatUsage.Linear | FormatUsage.Render))
s_RenderTextureFormatToUse = GraphicsFormat.R16G16B16A16_SFloat;
s_HasSetupRenderTextureFormatToUse = true;
}
return s_RenderTextureFormatToUse;
}
public static void CreateNormalMapRenderTexture(this IRenderPass2D pass, RenderingData renderingData, CommandBuffer cmd, float renderScale)
{
if (renderScale != pass.rendererData.normalsRenderTargetScale)
{
if (pass.rendererData.isNormalsRenderTargetValid)
{
cmd.ReleaseTemporaryRT(pass.rendererData.normalsRenderTarget.id);
}
pass.rendererData.isNormalsRenderTargetValid = true;
pass.rendererData.normalsRenderTargetScale = renderScale;
var descriptor = new RenderTextureDescriptor(
(int)(renderingData.cameraData.cameraTargetDescriptor.width * renderScale),
(int)(renderingData.cameraData.cameraTargetDescriptor.height * renderScale));
descriptor.graphicsFormat = GetRenderTextureFormat();
descriptor.useMipMap = false;
descriptor.autoGenerateMips = false;
descriptor.depthBufferBits = 0;
descriptor.msaaSamples = renderingData.cameraData.cameraTargetDescriptor.msaaSamples;
descriptor.dimension = TextureDimension.Tex2D;
cmd.GetTemporaryRT(pass.rendererData.normalsRenderTarget.id, descriptor, FilterMode.Bilinear);
}
}
public static RenderTextureDescriptor GetBlendStyleRenderTextureDesc(this IRenderPass2D pass, RenderingData renderingData)
{
var renderTextureScale = Mathf.Clamp(pass.rendererData.lightRenderTextureScale, 0.01f, 1.0f);
var width = (int)(renderingData.cameraData.cameraTargetDescriptor.width * renderTextureScale);
var height = (int)(renderingData.cameraData.cameraTargetDescriptor.height * renderTextureScale);
var descriptor = new RenderTextureDescriptor(width, height);
descriptor.graphicsFormat = GetRenderTextureFormat();
descriptor.useMipMap = false;
descriptor.autoGenerateMips = false;
descriptor.depthBufferBits = 0;
descriptor.msaaSamples = 1;
descriptor.dimension = TextureDimension.Tex2D;
return descriptor;
}
public static void CreateCameraSortingLayerRenderTexture(this IRenderPass2D pass, RenderingData renderingData, CommandBuffer cmd, Downsampling downsamplingMethod)
{
var renderTextureScale = 1.0f;
if (downsamplingMethod == Downsampling._2xBilinear)
renderTextureScale = 0.5f;
else if (downsamplingMethod == Downsampling._4xBox || downsamplingMethod == Downsampling._4xBilinear)
renderTextureScale = 0.25f;
var width = (int)(renderingData.cameraData.cameraTargetDescriptor.width * renderTextureScale);
var height = (int)(renderingData.cameraData.cameraTargetDescriptor.height * renderTextureScale);
var descriptor = new RenderTextureDescriptor(width, height);
descriptor.graphicsFormat = renderingData.cameraData.cameraTargetDescriptor.graphicsFormat;
descriptor.useMipMap = false;
descriptor.autoGenerateMips = false;
descriptor.depthBufferBits = 0;
descriptor.msaaSamples = 1;
descriptor.dimension = TextureDimension.Tex2D;
cmd.GetTemporaryRT(pass.rendererData.cameraSortingLayerRenderTarget.id, descriptor, FilterMode.Bilinear);
}
public static void EnableBlendStyle(CommandBuffer cmd, int blendStyleIndex, bool enabled)
{
var keyword = k_UseBlendStyleKeywords[blendStyleIndex];
if (enabled)
cmd.EnableShaderKeyword(keyword);
else
cmd.DisableShaderKeyword(keyword);
}
public static void ReleaseRenderTextures(this IRenderPass2D pass, CommandBuffer cmd)
{
pass.rendererData.isNormalsRenderTargetValid = false;
pass.rendererData.normalsRenderTargetScale = 0.0f;
cmd.ReleaseTemporaryRT(pass.rendererData.normalsRenderTarget.id);
cmd.ReleaseTemporaryRT(pass.rendererData.shadowsRenderTarget.id);
cmd.ReleaseTemporaryRT(pass.rendererData.cameraSortingLayerRenderTarget.id);
}
public static void DrawPointLight(CommandBuffer cmd, Light2D light, Mesh lightMesh, Material material)
{
var scale = new Vector3(light.pointLightOuterRadius, light.pointLightOuterRadius, light.pointLightOuterRadius);
var matrix = Matrix4x4.TRS(light.transform.position, light.transform.rotation, scale);
cmd.DrawMesh(lightMesh, matrix, material);
}
private static void RenderLightSet(IRenderPass2D pass, RenderingData renderingData, int blendStyleIndex, CommandBuffer cmd, int layerToRender, RenderTargetIdentifier renderTexture, List<Light2D> lights)
{
var maxShadowTextureCount = ShadowRendering.maxTextureCount;
var requiresRTInit = true;
// This case should never happen, but if it does it may cause an infinite loop later.
if (maxShadowTextureCount < 1)
{
Debug.LogError("maxShadowTextureCount cannot be less than 1");
return;
}
// Break up light rendering into batches for the purpose of shadow casting
var lightIndex = 0;
while (lightIndex < lights.Count)
{
var remainingLights = (uint)lights.Count - lightIndex;
var batchedLights = 0;
// Add lights to our batch until the number of shadow textures reach the maxShadowTextureCount
var shadowLightCount = 0;
while (batchedLights < remainingLights && shadowLightCount < maxShadowTextureCount)
{
var light = lights[lightIndex + batchedLights];
if (light.shadowsEnabled && light.shadowIntensity > 0)
{
ShadowRendering.CreateShadowRenderTexture(pass, renderingData, cmd, shadowLightCount);
ShadowRendering.PrerenderShadows(pass, renderingData, cmd, layerToRender, light, shadowLightCount, light.shadowIntensity);
shadowLightCount++;
}
batchedLights++;
}
// Set the current RT to the light RT
if (shadowLightCount > 0 || requiresRTInit)
{
cmd.SetRenderTarget(renderTexture, RenderBufferLoadAction.Load, RenderBufferStoreAction.Store, RenderBufferLoadAction.DontCare, RenderBufferStoreAction.DontCare);
requiresRTInit = false;
}
// Render all the lights.
shadowLightCount = 0;
for (var lightIndexOffset = 0; lightIndexOffset < batchedLights; lightIndexOffset++)
{
var light = lights[(int)(lightIndex + lightIndexOffset)];
if (light != null &&
light.lightType != Light2D.LightType.Global &&
light.blendStyleIndex == blendStyleIndex &&
light.IsLitLayer(layerToRender))
{
// Render light
var lightMaterial = pass.rendererData.GetLightMaterial(light, false);
if (lightMaterial == null)
continue;
var lightMesh = light.lightMesh;
if (lightMesh == null)
continue;
// Set the shadow texture to read from
if (light.shadowsEnabled && light.shadowIntensity > 0)
ShadowRendering.SetGlobalShadowTexture(cmd, light, shadowLightCount++);
else
ShadowRendering.DisableGlobalShadowTexture(cmd);
if (light.lightType == Light2D.LightType.Sprite && light.lightCookieSprite != null && light.lightCookieSprite.texture != null)
cmd.SetGlobalTexture(k_CookieTexID, light.lightCookieSprite.texture);
SetGeneralLightShaderGlobals(pass, cmd, light);
if (light.normalMapQuality != Light2D.NormalMapQuality.Disabled || light.lightType == Light2D.LightType.Point)
SetPointLightShaderGlobals(pass, cmd, light);
// Light code could be combined...
if (light.lightType == (Light2D.LightType)Light2D.DeprecatedLightType.Parametric || light.lightType == Light2D.LightType.Freeform || light.lightType == Light2D.LightType.Sprite)
{
cmd.DrawMesh(lightMesh, light.transform.localToWorldMatrix, lightMaterial);
}
else if (light.lightType == Light2D.LightType.Point)
{
DrawPointLight(cmd, light, lightMesh, lightMaterial);
}
}
}
// Release all of the temporary shadow textures
for (var releaseIndex = shadowLightCount - 1; releaseIndex >= 0; releaseIndex--)
ShadowRendering.ReleaseShadowRenderTexture(cmd, releaseIndex);
lightIndex += batchedLights;
}
}
public static void RenderLightVolumes(this IRenderPass2D pass, RenderingData renderingData, CommandBuffer cmd, int layerToRender, int endLayerValue, RenderTargetIdentifier renderTexture, RenderTargetIdentifier depthTexture, List<Light2D> lights)
{
var maxShadowTextureCount = ShadowRendering.maxTextureCount;
var requiresRTInit = true;
// This case should never happen, but if it does it may cause an infinite loop later.
if (maxShadowTextureCount < 1)
{
Debug.LogError("maxShadowTextureCount cannot be less than 1");
return;
}
// Break up light rendering into batches for the purpose of shadow casting
var lightIndex = 0;
while (lightIndex < lights.Count)
{
var remainingLights = (uint)lights.Count - lightIndex;
var batchedLights = 0;
// Add lights to our batch until the number of shadow textures reach the maxShadowTextureCount
var shadowLightCount = 0;
while (batchedLights < remainingLights && shadowLightCount < maxShadowTextureCount)
{
var light = lights[lightIndex + batchedLights];
if (light.volumetricShadowsEnabled && light.shadowVolumeIntensity > 0)
{
ShadowRendering.CreateShadowRenderTexture(pass, renderingData, cmd, shadowLightCount);
ShadowRendering.PrerenderShadows(pass, renderingData, cmd, layerToRender, light, shadowLightCount, light.shadowVolumeIntensity);
shadowLightCount++;
}
batchedLights++;
}
// Set the current RT to the light RT
if (shadowLightCount > 0 || requiresRTInit)
{
cmd.SetRenderTarget(renderTexture, depthTexture);
requiresRTInit = false;
}
// Render all the lights.
shadowLightCount = 0;
for (var lightIndexOffset = 0; lightIndexOffset < batchedLights; lightIndexOffset++)
{
var light = lights[(int)(lightIndex + lightIndexOffset)];
if (light.lightType == Light2D.LightType.Global)
continue;
if (light.volumeIntensity <= 0.0f || !light.volumeIntensityEnabled)
continue;
var topMostLayerValue = light.GetTopMostLitLayer();
if (endLayerValue == topMostLayerValue) // this implies the layer is correct
{
var lightVolumeMaterial = pass.rendererData.GetLightMaterial(light, true);
var lightMesh = light.lightMesh;
// Set the shadow texture to read from
if (light.volumetricShadowsEnabled && light.shadowVolumeIntensity > 0)
ShadowRendering.SetGlobalShadowTexture(cmd, light, shadowLightCount++);
else
ShadowRendering.DisableGlobalShadowTexture(cmd);
if (light.lightType == Light2D.LightType.Sprite && light.lightCookieSprite != null && light.lightCookieSprite.texture != null)
cmd.SetGlobalTexture(k_CookieTexID, light.lightCookieSprite.texture);
SetGeneralLightShaderGlobals(pass, cmd, light);
// Is this needed
if (light.normalMapQuality != Light2D.NormalMapQuality.Disabled || light.lightType == Light2D.LightType.Point)
SetPointLightShaderGlobals(pass, cmd, light);
// Could be combined...
if (light.lightType == Light2D.LightType.Parametric || light.lightType == Light2D.LightType.Freeform || light.lightType == Light2D.LightType.Sprite)
{
cmd.DrawMesh(lightMesh, light.transform.localToWorldMatrix, lightVolumeMaterial);
}
else if (light.lightType == Light2D.LightType.Point)
{
DrawPointLight(cmd, light, lightMesh, lightVolumeMaterial);
}
}
}
// Release all of the temporary shadow textures
for (var releaseIndex = shadowLightCount - 1; releaseIndex >= 0; releaseIndex--)
ShadowRendering.ReleaseShadowRenderTexture(cmd, releaseIndex);
lightIndex += batchedLights;
}
}
public static void SetShapeLightShaderGlobals(this IRenderPass2D pass, CommandBuffer cmd)
{
for (var i = 0; i < pass.rendererData.lightBlendStyles.Length; i++)
{
var blendStyle = pass.rendererData.lightBlendStyles[i];
if (i >= k_BlendFactorsPropIDs.Length)
break;
cmd.SetGlobalVector(k_BlendFactorsPropIDs[i], blendStyle.blendFactors);
cmd.SetGlobalVector(k_MaskFilterPropIDs[i], blendStyle.maskTextureChannelFilter.mask);
cmd.SetGlobalVector(k_InvertedFilterPropIDs[i], blendStyle.maskTextureChannelFilter.inverted);
}
cmd.SetGlobalTexture(k_FalloffLookupID, pass.rendererData.fallOffLookup);
}
private static float GetNormalizedInnerRadius(Light2D light)
{
return light.pointLightInnerRadius / light.pointLightOuterRadius;
}
private static float GetNormalizedAngle(float angle)
{
return (angle / 360.0f);
}
private static void GetScaledLightInvMatrix(Light2D light, out Matrix4x4 retMatrix)
{
var outerRadius = light.pointLightOuterRadius;
var lightScale = Vector3.one;
var outerRadiusScale = new Vector3(lightScale.x * outerRadius, lightScale.y * outerRadius, lightScale.z * outerRadius);
var transform = light.transform;
var scaledLightMat = Matrix4x4.TRS(transform.position, transform.rotation, outerRadiusScale);
retMatrix = Matrix4x4.Inverse(scaledLightMat);
}
private static void SetGeneralLightShaderGlobals(IRenderPass2D pass, CommandBuffer cmd, Light2D light)
{
float intensity = light.intensity * light.color.a;
Color color = intensity * light.color;
color.a = 1.0f;
float volumeIntensity = light.volumeIntensity;
cmd.SetGlobalFloat(k_FalloffIntensityID, light.falloffIntensity);
cmd.SetGlobalFloat(k_FalloffDistanceID, light.shapeLightFalloffSize);
cmd.SetGlobalColor(k_LightColorID, color);
cmd.SetGlobalFloat(k_VolumeOpacityID, volumeIntensity);
}
private static void SetPointLightShaderGlobals(IRenderPass2D pass, CommandBuffer cmd, Light2D light)
{
// This is used for the lookup texture
GetScaledLightInvMatrix(light, out var lightInverseMatrix);
var innerRadius = GetNormalizedInnerRadius(light);
var innerAngle = GetNormalizedAngle(light.pointLightInnerAngle);
var outerAngle = GetNormalizedAngle(light.pointLightOuterAngle);
var innerRadiusMult = 1 / (1 - innerRadius);
cmd.SetGlobalVector(k_LightPositionID, light.transform.position);
cmd.SetGlobalMatrix(k_LightInvMatrixID, lightInverseMatrix);
cmd.SetGlobalFloat(k_InnerRadiusMultID, innerRadiusMult);
cmd.SetGlobalFloat(k_OuterAngleID, outerAngle);
cmd.SetGlobalFloat(k_InnerAngleMultID, 1 / (outerAngle - innerAngle));
cmd.SetGlobalTexture(k_LightLookupID, Light2DLookupTexture.GetLightLookupTexture());
cmd.SetGlobalTexture(k_FalloffLookupID, pass.rendererData.fallOffLookup);
cmd.SetGlobalFloat(k_FalloffIntensityID, light.falloffIntensity);
cmd.SetGlobalFloat(k_IsFullSpotlightID, innerAngle == 1 ? 1.0f : 0.0f);
cmd.SetGlobalFloat(k_LightZDistanceID, light.normalMapDistance);
if (light.lightCookieSprite != null && light.lightCookieSprite.texture != null)
cmd.SetGlobalTexture(k_PointLightCookieTexID, light.lightCookieSprite.texture);
}
public static void ClearDirtyLighting(this IRenderPass2D pass, CommandBuffer cmd, uint blendStylesUsed)
{
for (var i = 0; i < pass.rendererData.lightBlendStyles.Length; ++i)
{
if ((blendStylesUsed & (uint)(1 << i)) == 0)
continue;
if (!pass.rendererData.lightBlendStyles[i].isDirty)
continue;
cmd.SetRenderTarget(pass.rendererData.lightBlendStyles[i].renderTargetHandle.Identifier());
cmd.ClearRenderTarget(false, true, Color.black);
pass.rendererData.lightBlendStyles[i].isDirty = false;
}
}
public static void RenderNormals(this IRenderPass2D pass, ScriptableRenderContext context, RenderingData renderingData, DrawingSettings drawSettings, FilteringSettings filterSettings, RenderTargetIdentifier depthTarget, CommandBuffer cmd, LightStats lightStats)
{
using (new ProfilingScope(cmd, m_ProfilingSampler))
{
// figure out the scale
var normalRTScale = 0.0f;
if (depthTarget != BuiltinRenderTextureType.None)
normalRTScale = 1.0f;
else
normalRTScale = Mathf.Clamp(pass.rendererData.lightRenderTextureScale, 0.01f, 1.0f);
pass.CreateNormalMapRenderTexture(renderingData, cmd, normalRTScale);
if (depthTarget != BuiltinRenderTextureType.None)
{
cmd.SetRenderTarget(
pass.rendererData.normalsRenderTarget.Identifier(),
RenderBufferLoadAction.DontCare,
RenderBufferStoreAction.Store,
depthTarget,
RenderBufferLoadAction.Load,
RenderBufferStoreAction.DontCare);
}
else
cmd.SetRenderTarget(pass.rendererData.normalsRenderTarget.Identifier(), RenderBufferLoadAction.DontCare, RenderBufferStoreAction.Store);
cmd.ClearRenderTarget(true, true, k_NormalClearColor);
context.ExecuteCommandBuffer(cmd);
cmd.Clear();
drawSettings.SetShaderPassName(0, k_NormalsRenderingPassName);
context.DrawRenderers(renderingData.cullResults, ref drawSettings, ref filterSettings);
}
}
public static void RenderLights(this IRenderPass2D pass, RenderingData renderingData, CommandBuffer cmd, int layerToRender, ref LayerBatch layerBatch, ref RenderTextureDescriptor rtDesc)
{
var blendStyles = pass.rendererData.lightBlendStyles;
for (var i = 0; i < blendStyles.Length; ++i)
{
if ((layerBatch.lightStats.blendStylesUsed & (uint)(1 << i)) == 0)
continue;
var sampleName = blendStyles[i].name;
cmd.BeginSample(sampleName);
if (!Light2DManager.GetGlobalColor(layerToRender, i, out var clearColor))
clearColor = Color.black;
var anyLights = (layerBatch.lightStats.blendStylesWithLights & (uint)(1 << i)) != 0;
var desc = rtDesc;
if (!anyLights) // No lights -- create tiny texture
desc.width = desc.height = 4;
var identifier = layerBatch.GetRTId(cmd, desc, i);
cmd.SetRenderTarget(identifier,
RenderBufferLoadAction.DontCare,
RenderBufferStoreAction.Store,
RenderBufferLoadAction.DontCare,
RenderBufferStoreAction.DontCare);
cmd.ClearRenderTarget(false, true, clearColor);
if (anyLights)
{
RenderLightSet(
pass, renderingData,
i,
cmd,
layerToRender,
identifier,
pass.rendererData.lightCullResult.visibleLights
);
}
cmd.EndSample(sampleName);
}
}
private static void SetBlendModes(Material material, BlendMode src, BlendMode dst)
{
material.SetFloat(k_SrcBlendID, (float)src);
material.SetFloat(k_DstBlendID, (float)dst);
}
private static uint GetLightMaterialIndex(Light2D light, bool isVolume)
{
var isPoint = light.isPointLight;
var bitIndex = 0;
var volumeBit = isVolume ? 1u << bitIndex : 0u;
bitIndex++;
var shapeBit = !isPoint ? 1u << bitIndex : 0u;
bitIndex++;
var additiveBit = light.overlapOperation == Light2D.OverlapOperation.AlphaBlend ? 0u : 1u << bitIndex;
bitIndex++;
var spriteBit = light.lightType == Light2D.LightType.Sprite ? 1u << bitIndex : 0u;
bitIndex++;
var pointCookieBit = (isPoint && light.lightCookieSprite != null && light.lightCookieSprite.texture != null) ? 1u << bitIndex : 0u;
bitIndex++;
var pointFastQualityBit = (isPoint && light.normalMapQuality == Light2D.NormalMapQuality.Fast) ? 1u << bitIndex : 0u;
bitIndex++;
var useNormalMap = light.normalMapQuality != Light2D.NormalMapQuality.Disabled ? 1u << bitIndex : 0u;
return pointFastQualityBit | pointCookieBit | spriteBit | additiveBit | shapeBit | volumeBit | useNormalMap;
}
private static Material CreateLightMaterial(Renderer2DData rendererData, Light2D light, bool isVolume)
{
var isPoint = light.isPointLight;
Material material;
if (isVolume)
material = CoreUtils.CreateEngineMaterial(isPoint ? rendererData.pointLightVolumeShader : rendererData.shapeLightVolumeShader);
else
{
material = CoreUtils.CreateEngineMaterial(isPoint ? rendererData.pointLightShader : rendererData.shapeLightShader);
if (light.overlapOperation == Light2D.OverlapOperation.Additive)
{
SetBlendModes(material, BlendMode.One, BlendMode.One);
material.EnableKeyword(k_UseAdditiveBlendingKeyword);
}
else
SetBlendModes(material, BlendMode.SrcAlpha, BlendMode.OneMinusSrcAlpha);
}
if (light.lightType == Light2D.LightType.Sprite)
material.EnableKeyword(k_SpriteLightKeyword);
if (isPoint && light.lightCookieSprite != null && light.lightCookieSprite.texture != null)
material.EnableKeyword(k_UsePointLightCookiesKeyword);
if (isPoint && light.normalMapQuality == Light2D.NormalMapQuality.Fast)
material.EnableKeyword(k_LightQualityFastKeyword);
if (light.normalMapQuality != Light2D.NormalMapQuality.Disabled)
material.EnableKeyword(k_UseNormalMap);
return material;
}
private static Material GetLightMaterial(this Renderer2DData rendererData, Light2D light, bool isVolume)
{
var materialIndex = GetLightMaterialIndex(light, isVolume);
if (!rendererData.lightMaterials.TryGetValue(materialIndex, out var material))
{
material = CreateLightMaterial(rendererData, light, isVolume);
rendererData.lightMaterials[materialIndex] = material;
}
return material;
}
}
}

View File

@@ -0,0 +1,311 @@
using System.Collections.Generic;
using UnityEngine.Rendering;
using UnityEngine.Scripting.APIUpdating;
namespace UnityEngine.Experimental.Rendering.Universal
{
/// <summary>
/// The Pixel Perfect Camera component ensures your pixel art remains crisp and clear at different resolutions, and stable in motion.
/// </summary>
[DisallowMultipleComponent]
[AddComponentMenu("Rendering/2D/Pixel Perfect Camera (Experimental)")]
[RequireComponent(typeof(Camera))]
[MovedFrom("UnityEngine.Experimental.Rendering.LWRP")]
[HelpURL("https://docs.unity3d.com/Packages/com.unity.render-pipelines.universal@latest/index.html?subfolder=/manual/2d-pixelperfect.html%23properties")]
public class PixelPerfectCamera : MonoBehaviour, IPixelPerfectCamera
{
/// <summary>
/// Match this value to to the Pixels Per Unit values of all Sprites within the Scene.
/// </summary>
public int assetsPPU { get { return m_AssetsPPU; } set { m_AssetsPPU = value > 0 ? value : 1; } }
/// <summary>
/// The original horizontal resolution your Assets are designed for.
/// </summary>
public int refResolutionX { get { return m_RefResolutionX; } set { m_RefResolutionX = value > 0 ? value : 1; } }
/// <summary>
/// Original vertical resolution your Assets are designed for.
/// </summary>
public int refResolutionY { get { return m_RefResolutionY; } set { m_RefResolutionY = value > 0 ? value : 1; } }
/// <summary>
/// Set to true to have the Scene rendered to a temporary texture set as close as possible to the Reference Resolution,
/// while maintaining the full screen aspect ratio. This temporary texture is then upscaled to fit the full screen.
/// </summary>
public bool upscaleRT { get { return m_UpscaleRT; } set { m_UpscaleRT = value; } }
/// <summary>
/// Set to true to prevent subpixel movement and make Sprites appear to move in pixel-by-pixel increments.
/// Only applicable when upscaleRT is false.
/// </summary>
public bool pixelSnapping { get { return m_PixelSnapping; } set { m_PixelSnapping = value; } }
/// <summary>
/// Set to true to crop the viewport with black bars to match refResolutionX in the horizontal direction.
/// </summary>
public bool cropFrameX { get { return m_CropFrameX; } set { m_CropFrameX = value; } }
/// <summary>
/// Set to true to crop the viewport with black bars to match refResolutionY in the vertical direction.
/// </summary>
public bool cropFrameY { get { return m_CropFrameY; } set { m_CropFrameY = value; } }
/// <summary>
/// Set to true to expand the viewport to fit the screen resolution while maintaining the viewport's aspect ratio.
/// Only applicable when both cropFrameX and cropFrameY are true.
/// </summary>
public bool stretchFill { get { return m_StretchFill; } set { m_StretchFill = value; } }
/// <summary>
/// Ratio of the rendered Sprites compared to their original size (readonly).
/// </summary>
public int pixelRatio
{
get
{
if (m_CinemachineCompatibilityMode)
{
if (m_UpscaleRT)
return m_Internal.zoom * m_Internal.cinemachineVCamZoom;
else
return m_Internal.cinemachineVCamZoom;
}
else
{
return m_Internal.zoom;
}
}
}
/// <summary>
/// Round a arbitrary position to an integer pixel position. Works in world space.
/// </summary>
/// <param name="position"> The position you want to round.</param>
/// <returns>
/// The rounded pixel position.
/// Depending on the values of upscaleRT and pixelSnapping, it could be a screen pixel position or an art pixel position.
/// </returns>
public Vector3 RoundToPixel(Vector3 position)
{
float unitsPerPixel = m_Internal.unitsPerPixel;
if (unitsPerPixel == 0.0f)
return position;
Vector3 result;
result.x = Mathf.Round(position.x / unitsPerPixel) * unitsPerPixel;
result.y = Mathf.Round(position.y / unitsPerPixel) * unitsPerPixel;
result.z = Mathf.Round(position.z / unitsPerPixel) * unitsPerPixel;
return result;
}
/// <summary>
/// Find a pixel-perfect orthographic size as close to targetOrthoSize as possible. Used by Cinemachine to solve compatibility issues with Pixel Perfect Camera.
/// </summary>
/// <param name="targetOrthoSize">Orthographic size from the live Cinemachine Virtual Camera.</param>
/// <returns>The corrected orthographic size.</returns>
public float CorrectCinemachineOrthoSize(float targetOrthoSize)
{
m_CinemachineCompatibilityMode = true;
if (m_Internal == null)
return targetOrthoSize;
else
return m_Internal.CorrectCinemachineOrthoSize(targetOrthoSize);
}
[SerializeField] int m_AssetsPPU = 100;
[SerializeField] int m_RefResolutionX = 320;
[SerializeField] int m_RefResolutionY = 180;
[SerializeField] bool m_UpscaleRT;
[SerializeField] bool m_PixelSnapping;
[SerializeField] bool m_CropFrameX;
[SerializeField] bool m_CropFrameY;
[SerializeField] bool m_StretchFill;
Camera m_Camera;
PixelPerfectCameraInternal m_Internal;
bool m_CinemachineCompatibilityMode;
internal bool isRunning
{
get
{
#if UNITY_EDITOR
return (Application.isPlaying || runInEditMode) && enabled;
#else
return enabled;
#endif
}
}
internal FilterMode finalBlitFilterMode
{
get
{
if (!isRunning)
return FilterMode.Bilinear;
else
return m_Internal.useStretchFill ? FilterMode.Bilinear : FilterMode.Point;
}
}
internal Vector2Int offscreenRTSize
{
get
{
if (!isRunning)
return Vector2Int.zero;
else
return new Vector2Int(m_Internal.offscreenRTWidth, m_Internal.offscreenRTHeight);
}
}
Vector2Int cameraRTSize
{
get
{
var targetTexture = m_Camera.targetTexture;
return targetTexture == null ? new Vector2Int(Screen.width, Screen.height) : new Vector2Int(targetTexture.width, targetTexture.height);
}
}
// Snap camera position to pixels using Camera.worldToCameraMatrix.
void PixelSnap()
{
Vector3 cameraPosition = m_Camera.transform.position;
Vector3 roundedCameraPosition = RoundToPixel(cameraPosition);
Vector3 offset = roundedCameraPosition - cameraPosition;
offset.z = -offset.z;
Matrix4x4 offsetMatrix = Matrix4x4.TRS(-offset, Quaternion.identity, new Vector3(1.0f, 1.0f, -1.0f));
m_Camera.worldToCameraMatrix = offsetMatrix * m_Camera.transform.worldToLocalMatrix;
}
void Awake()
{
m_Camera = GetComponent<Camera>();
m_Internal = new PixelPerfectCameraInternal(this);
m_Internal.originalOrthoSize = m_Camera.orthographicSize;
// Case 1249076: Initialize internals immediately after the scene is loaded,
// as the Cinemachine extension may need them before OnBeginContextRendering is called.
var rtSize = cameraRTSize;
m_Internal.CalculateCameraProperties(rtSize.x, rtSize.y);
}
void OnBeginContextRendering(ScriptableRenderContext context, List<Camera> cameras)
{
var rtSize = cameraRTSize;
m_Internal.CalculateCameraProperties(rtSize.x, rtSize.y);
PixelSnap();
if (m_Internal.useOffscreenRT)
m_Camera.pixelRect = m_Internal.CalculateFinalBlitPixelRect(rtSize.x, rtSize.y);
else
m_Camera.rect = new Rect(0.0f, 0.0f, 1.0f, 1.0f);
// In Cinemachine compatibility mode the control over orthographic size should
// be given to the virtual cameras, whose orthographic sizes will be corrected to
// be pixel-perfect. This way when there's blending between virtual cameras, we
// can have temporary not-pixel-perfect but smooth transitions.
if (!m_CinemachineCompatibilityMode)
{
m_Camera.orthographicSize = m_Internal.orthoSize;
}
}
void OnBeginCameraRendering(ScriptableRenderContext context, Camera camera)
{
if (camera == m_Camera)
UnityEngine.U2D.PixelPerfectRendering.pixelSnapSpacing = m_Internal.unitsPerPixel;
}
void OnEndCameraRendering(ScriptableRenderContext context, Camera camera)
{
if (camera == m_Camera)
UnityEngine.U2D.PixelPerfectRendering.pixelSnapSpacing = 0.0f;
}
void OnEnable()
{
m_CinemachineCompatibilityMode = false;
RenderPipelineManager.beginContextRendering += OnBeginContextRendering;
RenderPipelineManager.beginCameraRendering += OnBeginCameraRendering;
RenderPipelineManager.endCameraRendering += OnEndCameraRendering;
#if UNITY_EDITOR
if (!UnityEditor.EditorApplication.isPlaying)
UnityEditor.EditorApplication.playModeStateChanged += OnPlayModeChanged;
#endif
}
internal void OnDisable()
{
RenderPipelineManager.beginContextRendering -= OnBeginContextRendering;
RenderPipelineManager.beginCameraRendering -= OnBeginCameraRendering;
RenderPipelineManager.endCameraRendering -= OnEndCameraRendering;
m_Camera.rect = new Rect(0.0f, 0.0f, 1.0f, 1.0f);
m_Camera.orthographicSize = m_Internal.originalOrthoSize;
m_Camera.ResetWorldToCameraMatrix();
#if UNITY_EDITOR
if (!UnityEditor.EditorApplication.isPlaying)
UnityEditor.EditorApplication.playModeStateChanged -= OnPlayModeChanged;
#endif
}
#if DEVELOPMENT_BUILD || UNITY_EDITOR
// Show on-screen warning about invalid render resolutions.
void OnGUI()
{
#if UNITY_EDITOR
if (!UnityEditor.EditorApplication.isPlaying && !runInEditMode)
return;
#endif
Color oldColor = GUI.color;
GUI.color = Color.red;
Vector2Int renderResolution = Vector2Int.zero;
renderResolution.x = m_Internal.useOffscreenRT ? m_Internal.offscreenRTWidth : m_Camera.pixelWidth;
renderResolution.y = m_Internal.useOffscreenRT ? m_Internal.offscreenRTHeight : m_Camera.pixelHeight;
if (renderResolution.x % 2 != 0 || renderResolution.y % 2 != 0)
{
string warning = string.Format("Rendering at an odd-numbered resolution ({0} * {1}). Pixel Perfect Camera may not work properly in this situation.", renderResolution.x, renderResolution.y);
GUILayout.Box(warning);
}
var targetTexture = m_Camera.targetTexture;
Vector2Int rtSize = targetTexture == null ? new Vector2Int(Screen.width, Screen.height) : new Vector2Int(targetTexture.width, targetTexture.height);
if (rtSize.x < refResolutionX || rtSize.y < refResolutionY)
{
GUILayout.Box("Target resolution is smaller than the reference resolution. Image may appear stretched or cropped.");
}
GUI.color = oldColor;
}
#endif
#if UNITY_EDITOR
void OnPlayModeChanged(UnityEditor.PlayModeStateChange state)
{
// Stop running in edit mode when entering play mode.
if (state == UnityEditor.PlayModeStateChange.ExitingEditMode)
{
runInEditMode = false;
OnDisable();
}
}
#endif
}
}

View File

@@ -0,0 +1,235 @@
using System;
namespace UnityEngine.Experimental.Rendering.Universal
{
internal interface IPixelPerfectCamera
{
int assetsPPU { get; set; }
int refResolutionX { get; set; }
int refResolutionY { get; set; }
bool upscaleRT { get; set; }
bool pixelSnapping { get; set; }
bool cropFrameX { get; set; }
bool cropFrameY { get; set; }
bool stretchFill { get; set; }
}
[Serializable]
internal class PixelPerfectCameraInternal : ISerializationCallbackReceiver
{
// Case 1061634:
// In order for this class to survive hot reloading, we need to make the fields serializable.
// Unity can't serialize an interface object, but does properly serialize UnityEngine.Object.
// So we cast the reference to PixelPerfectCamera (which inherits UnityEngine.Object)
// before serialization happens, and restore the interface reference after deserialization.
[NonSerialized]
IPixelPerfectCamera m_Component;
PixelPerfectCamera m_SerializableComponent;
internal float originalOrthoSize;
internal bool hasPostProcessLayer;
internal bool cropFrameXAndY = false;
internal bool cropFrameXOrY = false;
internal bool useStretchFill = false;
internal int zoom = 1;
internal bool useOffscreenRT = false;
internal int offscreenRTWidth = 0;
internal int offscreenRTHeight = 0;
internal Rect pixelRect = Rect.zero;
internal float orthoSize = 1.0f;
internal float unitsPerPixel = 0.0f;
internal int cinemachineVCamZoom = 1;
internal PixelPerfectCameraInternal(IPixelPerfectCamera component)
{
m_Component = component;
}
public void OnBeforeSerialize()
{
m_SerializableComponent = m_Component as PixelPerfectCamera;
}
public void OnAfterDeserialize()
{
if (m_SerializableComponent != null)
m_Component = m_SerializableComponent;
}
internal void CalculateCameraProperties(int screenWidth, int screenHeight)
{
int assetsPPU = m_Component.assetsPPU;
int refResolutionX = m_Component.refResolutionX;
int refResolutionY = m_Component.refResolutionY;
bool upscaleRT = m_Component.upscaleRT;
bool pixelSnapping = m_Component.pixelSnapping;
bool cropFrameX = m_Component.cropFrameX;
bool cropFrameY = m_Component.cropFrameY;
bool stretchFill = m_Component.stretchFill;
cropFrameXAndY = cropFrameY && cropFrameX;
cropFrameXOrY = cropFrameY || cropFrameX;
useStretchFill = cropFrameXAndY && stretchFill;
// zoom level (PPU scale)
int verticalZoom = screenHeight / refResolutionY;
int horizontalZoom = screenWidth / refResolutionX;
zoom = Math.Max(1, Math.Min(verticalZoom, horizontalZoom));
// off-screen RT
useOffscreenRT = false;
offscreenRTWidth = 0;
offscreenRTHeight = 0;
if (cropFrameXOrY)
{
useOffscreenRT = true;
if (!upscaleRT)
{
if (cropFrameXAndY)
{
offscreenRTWidth = zoom * refResolutionX;
offscreenRTHeight = zoom * refResolutionY;
}
else if (cropFrameY)
{
offscreenRTWidth = screenWidth;
offscreenRTHeight = zoom * refResolutionY;
}
else // crop frame X
{
offscreenRTWidth = zoom * refResolutionX;
offscreenRTHeight = screenHeight;
}
}
else
{
if (cropFrameXAndY)
{
offscreenRTWidth = refResolutionX;
offscreenRTHeight = refResolutionY;
}
else if (cropFrameY)
{
offscreenRTWidth = screenWidth / zoom / 2 * 2; // Make sure it's an even number by / 2 * 2.
offscreenRTHeight = refResolutionY;
}
else // crop frame X
{
offscreenRTWidth = refResolutionX;
offscreenRTHeight = screenHeight / zoom / 2 * 2; // Make sure it's an even number by / 2 * 2.
}
}
}
else if (upscaleRT && zoom > 1)
{
useOffscreenRT = true;
offscreenRTWidth = screenWidth / zoom / 2 * 2; // Make sure it's an even number by / 2 * 2.
offscreenRTHeight = screenHeight / zoom / 2 * 2;
}
// viewport
if (useOffscreenRT)
{
// When we ask the render pipeline to create the offscreen RT for us, the size of the RT is determined by VP size.
// That's why we set the VP size to be (m_OffscreenRTWidth, m_OffscreenRTHeight) here.
pixelRect = new Rect(0.0f, 0.0f, offscreenRTWidth, offscreenRTHeight);
}
else
pixelRect = Rect.zero;
// orthographic size
if (cropFrameY)
orthoSize = (refResolutionY * 0.5f) / assetsPPU;
else if (cropFrameX)
{
float aspect = (pixelRect == Rect.zero) ? (float)screenWidth / screenHeight : pixelRect.width / pixelRect.height;
orthoSize = ((refResolutionX / aspect) * 0.5f) / assetsPPU;
}
else if (upscaleRT && zoom > 1)
orthoSize = (offscreenRTHeight * 0.5f) / assetsPPU;
else
{
float pixelHeight = (pixelRect == Rect.zero) ? screenHeight : pixelRect.height;
orthoSize = (pixelHeight * 0.5f) / (zoom * assetsPPU);
}
// Camera pixel grid spacing
if (upscaleRT || (!upscaleRT && pixelSnapping))
unitsPerPixel = 1.0f / assetsPPU;
else
unitsPerPixel = 1.0f / (zoom * assetsPPU);
}
internal Rect CalculateFinalBlitPixelRect(int screenWidth, int screenHeight)
{
// This VP is used when the internal temp RT is blitted back to screen.
Rect pixelRect = new Rect();
if (useStretchFill)
{
// stretch (fit either width or height)
float screenAspect = (float)screenWidth / screenHeight;
float cameraAspect = (float)m_Component.refResolutionX / m_Component.refResolutionY;
if (screenAspect > cameraAspect)
{
pixelRect.height = screenHeight;
pixelRect.width = screenHeight * cameraAspect;
pixelRect.x = (screenWidth - (int)pixelRect.width) / 2;
pixelRect.y = 0;
}
else
{
pixelRect.width = screenWidth;
pixelRect.height = screenWidth / cameraAspect;
pixelRect.y = (screenHeight - (int)pixelRect.height) / 2;
pixelRect.x = 0;
}
}
else
{
// center
if (m_Component.upscaleRT)
{
pixelRect.height = zoom * offscreenRTHeight;
pixelRect.width = zoom * offscreenRTWidth;
}
else
{
pixelRect.height = offscreenRTHeight;
pixelRect.width = offscreenRTWidth;
}
pixelRect.x = (screenWidth - (int)pixelRect.width) / 2;
pixelRect.y = (screenHeight - (int)pixelRect.height) / 2;
}
return pixelRect;
}
// Find a pixel-perfect orthographic size as close to targetOrthoSize as possible.
internal float CorrectCinemachineOrthoSize(float targetOrthoSize)
{
float correctedOrthoSize;
if (m_Component.upscaleRT)
{
cinemachineVCamZoom = Math.Max(1, Mathf.RoundToInt(orthoSize / targetOrthoSize));
correctedOrthoSize = orthoSize / cinemachineVCamZoom;
}
else
{
cinemachineVCamZoom = Math.Max(1, Mathf.RoundToInt(zoom * orthoSize / targetOrthoSize));
correctedOrthoSize = zoom * orthoSize / cinemachineVCamZoom;
}
// In this case the actual zoom level is cinemachineVCamZoom instead of zoom.
if (!m_Component.upscaleRT && !m_Component.pixelSnapping)
unitsPerPixel = 1.0f / (cinemachineVCamZoom * m_Component.assetsPPU);
return correctedOrthoSize;
}
}
}

View File

@@ -0,0 +1,257 @@
using UnityEngine.Rendering;
using UnityEngine.Rendering.Universal;
using UnityEngine.Rendering.Universal.Internal;
namespace UnityEngine.Experimental.Rendering.Universal
{
internal class Renderer2D : ScriptableRenderer
{
Render2DLightingPass m_Render2DLightingPass;
PixelPerfectBackgroundPass m_PixelPerfectBackgroundPass;
FinalBlitPass m_FinalBlitPass;
Light2DCullResult m_LightCullResult;
private static readonly ProfilingSampler m_ProfilingSampler = new ProfilingSampler("Create Camera Textures");
bool m_UseDepthStencilBuffer = true;
bool m_CreateColorTexture;
bool m_CreateDepthTexture;
readonly RenderTargetHandle k_ColorTextureHandle;
readonly RenderTargetHandle k_DepthTextureHandle;
Material m_BlitMaterial;
Material m_SamplingMaterial;
Renderer2DData m_Renderer2DData;
internal bool createColorTexture => m_CreateColorTexture;
internal bool createDepthTexture => m_CreateDepthTexture;
PostProcessPasses m_PostProcessPasses;
internal ColorGradingLutPass colorGradingLutPass { get => m_PostProcessPasses.colorGradingLutPass; }
internal PostProcessPass postProcessPass { get => m_PostProcessPasses.postProcessPass; }
internal PostProcessPass finalPostProcessPass { get => m_PostProcessPasses.finalPostProcessPass; }
internal RenderTargetHandle afterPostProcessColorHandle { get => m_PostProcessPasses.afterPostProcessColor; }
internal RenderTargetHandle colorGradingLutHandle { get => m_PostProcessPasses.colorGradingLut; }
public Renderer2D(Renderer2DData data) : base(data)
{
m_BlitMaterial = CoreUtils.CreateEngineMaterial(data.blitShader);
m_SamplingMaterial = CoreUtils.CreateEngineMaterial(data.samplingShader);
m_Render2DLightingPass = new Render2DLightingPass(data, m_BlitMaterial, m_SamplingMaterial);
// we should determine why clearing the camera target is set so late in the events... sounds like it could be earlier
m_PixelPerfectBackgroundPass = new PixelPerfectBackgroundPass(RenderPassEvent.AfterRenderingTransparents);
m_FinalBlitPass = new FinalBlitPass(RenderPassEvent.AfterRendering + 1, m_BlitMaterial);
m_PostProcessPasses = new PostProcessPasses(data.postProcessData, m_BlitMaterial);
m_UseDepthStencilBuffer = data.useDepthStencilBuffer;
// We probably should declare these names in the base class,
// as they must be the same across all ScriptableRenderer types for camera stacking to work.
k_ColorTextureHandle.Init("_CameraColorTexture");
k_DepthTextureHandle.Init("_CameraDepthAttachment");
m_Renderer2DData = data;
supportedRenderingFeatures = new RenderingFeatures()
{
cameraStacking = true,
};
m_LightCullResult = new Light2DCullResult();
m_Renderer2DData.lightCullResult = m_LightCullResult;
}
protected override void Dispose(bool disposing)
{
m_PostProcessPasses.Dispose();
}
public Renderer2DData GetRenderer2DData()
{
return m_Renderer2DData;
}
void CreateRenderTextures(
ref CameraData cameraData,
bool forceCreateColorTexture,
FilterMode colorTextureFilterMode,
CommandBuffer cmd,
out RenderTargetHandle colorTargetHandle,
out RenderTargetHandle depthTargetHandle)
{
ref var cameraTargetDescriptor = ref cameraData.cameraTargetDescriptor;
if (cameraData.renderType == CameraRenderType.Base)
{
m_CreateColorTexture = forceCreateColorTexture
|| cameraData.postProcessEnabled
|| cameraData.isHdrEnabled
|| cameraData.isSceneViewCamera
|| !cameraData.isDefaultViewport
|| cameraData.requireSrgbConversion
|| !m_UseDepthStencilBuffer
|| !cameraData.resolveFinalTarget
|| m_Renderer2DData.useCameraSortingLayerTexture
|| !Mathf.Approximately(cameraData.renderScale, 1.0f);
m_CreateDepthTexture = !cameraData.resolveFinalTarget && m_UseDepthStencilBuffer;
colorTargetHandle = m_CreateColorTexture ? k_ColorTextureHandle : RenderTargetHandle.CameraTarget;
depthTargetHandle = m_CreateDepthTexture ? k_DepthTextureHandle : colorTargetHandle;
if (m_CreateColorTexture)
{
var colorDescriptor = cameraTargetDescriptor;
colorDescriptor.depthBufferBits = m_CreateDepthTexture || !m_UseDepthStencilBuffer ? 0 : 32;
cmd.GetTemporaryRT(k_ColorTextureHandle.id, colorDescriptor, colorTextureFilterMode);
}
if (m_CreateDepthTexture)
{
var depthDescriptor = cameraTargetDescriptor;
depthDescriptor.colorFormat = RenderTextureFormat.Depth;
depthDescriptor.depthBufferBits = 32;
depthDescriptor.bindMS = depthDescriptor.msaaSamples > 1 && !SystemInfo.supportsMultisampleAutoResolve && (SystemInfo.supportsMultisampledTextures != 0);
cmd.GetTemporaryRT(k_DepthTextureHandle.id, depthDescriptor, FilterMode.Point);
}
}
else // Overlay camera
{
// These render textures are created by the base camera, but it's the responsibility of the last overlay camera's ScriptableRenderer
// to release the textures in its FinishRendering().
m_CreateColorTexture = true;
m_CreateDepthTexture = true;
colorTargetHandle = k_ColorTextureHandle;
depthTargetHandle = k_DepthTextureHandle;
}
}
public override void Setup(ScriptableRenderContext context, ref RenderingData renderingData)
{
ref CameraData cameraData = ref renderingData.cameraData;
ref var cameraTargetDescriptor = ref cameraData.cameraTargetDescriptor;
bool stackHasPostProcess = renderingData.postProcessingEnabled;
bool lastCameraInStack = cameraData.resolveFinalTarget;
var colorTextureFilterMode = FilterMode.Bilinear;
PixelPerfectCamera ppc = null;
bool ppcUsesOffscreenRT = false;
bool ppcUpscaleRT = false;
#if UNITY_EDITOR
// The scene view camera cannot be uninitialized or skybox when using the 2D renderer.
if (cameraData.cameraType == CameraType.SceneView)
{
renderingData.cameraData.camera.clearFlags = CameraClearFlags.SolidColor;
}
#endif
// Pixel Perfect Camera doesn't support camera stacking.
if (cameraData.renderType == CameraRenderType.Base && lastCameraInStack)
{
cameraData.camera.TryGetComponent(out ppc);
if (ppc != null)
{
if (ppc.offscreenRTSize != Vector2Int.zero)
{
ppcUsesOffscreenRT = true;
// Pixel Perfect Camera may request a different RT size than camera VP size.
// In that case we need to modify cameraTargetDescriptor here so that all the passes would use the same size.
cameraTargetDescriptor.width = ppc.offscreenRTSize.x;
cameraTargetDescriptor.height = ppc.offscreenRTSize.y;
}
colorTextureFilterMode = ppc.finalBlitFilterMode;
ppcUpscaleRT = ppc.upscaleRT && ppc.isRunning;
}
}
RenderTargetHandle colorTargetHandle;
RenderTargetHandle depthTargetHandle;
CommandBuffer cmd = CommandBufferPool.Get();
using (new ProfilingScope(cmd, m_ProfilingSampler))
{
CreateRenderTextures(ref cameraData, ppcUsesOffscreenRT, colorTextureFilterMode, cmd,
out colorTargetHandle, out depthTargetHandle);
}
context.ExecuteCommandBuffer(cmd);
CommandBufferPool.Release(cmd);
ConfigureCameraTarget(colorTargetHandle.Identifier(), depthTargetHandle.Identifier());
// We generate color LUT in the base camera only. This allows us to not break render pass execution for overlay cameras.
if (stackHasPostProcess && cameraData.renderType == CameraRenderType.Base && m_PostProcessPasses.isCreated)
{
colorGradingLutPass.Setup(colorGradingLutHandle);
EnqueuePass(colorGradingLutPass);
}
var hasValidDepth = m_CreateDepthTexture || !m_CreateColorTexture || m_UseDepthStencilBuffer;
m_Render2DLightingPass.Setup(hasValidDepth);
m_Render2DLightingPass.ConfigureTarget(colorTargetHandle.Identifier(), depthTargetHandle.Identifier());
EnqueuePass(m_Render2DLightingPass);
// When using Upscale Render Texture on a Pixel Perfect Camera, we want all post-processing effects done with a low-res RT,
// and only upscale the low-res RT to fullscreen when blitting it to camera target. Also, final post processing pass is not run in this case,
// so FXAA is not supported (you don't want to apply FXAA when everything is intentionally pixelated).
bool requireFinalPostProcessPass =
lastCameraInStack && !ppcUpscaleRT && stackHasPostProcess && cameraData.antialiasing == AntialiasingMode.FastApproximateAntialiasing;
if (stackHasPostProcess && m_PostProcessPasses.isCreated)
{
RenderTargetHandle postProcessDestHandle =
lastCameraInStack && !ppcUpscaleRT && !requireFinalPostProcessPass ? RenderTargetHandle.CameraTarget : afterPostProcessColorHandle;
postProcessPass.Setup(
cameraTargetDescriptor,
colorTargetHandle,
postProcessDestHandle,
depthTargetHandle,
colorGradingLutHandle,
requireFinalPostProcessPass,
postProcessDestHandle == RenderTargetHandle.CameraTarget);
EnqueuePass(postProcessPass);
colorTargetHandle = postProcessDestHandle;
}
if (ppc != null && ppc.isRunning && (ppc.cropFrameX || ppc.cropFrameY))
EnqueuePass(m_PixelPerfectBackgroundPass);
if (requireFinalPostProcessPass && m_PostProcessPasses.isCreated)
{
finalPostProcessPass.SetupFinalPass(colorTargetHandle);
EnqueuePass(finalPostProcessPass);
}
else if (lastCameraInStack && colorTargetHandle != RenderTargetHandle.CameraTarget)
{
m_FinalBlitPass.Setup(cameraTargetDescriptor, colorTargetHandle);
EnqueuePass(m_FinalBlitPass);
}
}
public override void SetupCullingParameters(ref ScriptableCullingParameters cullingParameters, ref CameraData cameraData)
{
cullingParameters.cullingOptions = CullingOptions.None;
cullingParameters.isOrthographic = cameraData.camera.orthographic;
cullingParameters.shadowDistance = 0.0f;
m_LightCullResult.SetupCulling(ref cullingParameters, cameraData.camera);
}
public override void FinishRendering(CommandBuffer cmd)
{
if (m_CreateColorTexture)
cmd.ReleaseTemporaryRT(k_ColorTextureHandle.id);
if (m_CreateDepthTexture)
cmd.ReleaseTemporaryRT(k_DepthTextureHandle.id);
}
}
}

View File

@@ -0,0 +1,160 @@
using System;
using System.Collections.Generic;
using UnityEngine.Rendering;
using UnityEngine.Rendering.Universal;
using UnityEngine.Scripting.APIUpdating;
using UnityEngine.Serialization;
#if UNITY_EDITOR
using UnityEditor;
using UnityEditor.ProjectWindowCallback;
#endif
namespace UnityEngine.Experimental.Rendering.Universal
{
[Serializable, ReloadGroup, ExcludeFromPreset]
[MovedFrom("UnityEngine.Experimental.Rendering.LWRP")]
[HelpURL("https://docs.unity3d.com/Packages/com.unity.render-pipelines.universal@latest/index.html?subfolder=/manual/2DRendererData_overview.html")]
public partial class Renderer2DData : ScriptableRendererData
{
public enum Renderer2DDefaultMaterialType
{
Lit,
Unlit,
Custom
}
[SerializeField]
TransparencySortMode m_TransparencySortMode = TransparencySortMode.Default;
[SerializeField]
Vector3 m_TransparencySortAxis = Vector3.up;
[SerializeField]
float m_HDREmulationScale = 1;
[SerializeField, Range(0.01f, 1.0f)]
float m_LightRenderTextureScale = 0.5f;
[SerializeField, FormerlySerializedAs("m_LightOperations")]
Light2DBlendStyle[] m_LightBlendStyles = null;
[SerializeField]
bool m_UseDepthStencilBuffer = true;
[SerializeField]
bool m_UseCameraSortingLayersTexture = false;
[SerializeField]
int m_CameraSortingLayersTextureBound = 0;
[SerializeField]
Downsampling m_CameraSortingLayerDownsamplingMethod = Downsampling.None;
[SerializeField]
uint m_MaxLightRenderTextureCount = 16;
[SerializeField]
uint m_MaxShadowRenderTextureCount = 1;
[SerializeField, Reload("Shaders/2D/Light2D-Shape.shader")]
Shader m_ShapeLightShader = null;
[SerializeField, Reload("Shaders/2D/Light2D-Shape-Volumetric.shader")]
Shader m_ShapeLightVolumeShader = null;
[SerializeField, Reload("Shaders/2D/Light2D-Point.shader")]
Shader m_PointLightShader = null;
[SerializeField, Reload("Shaders/2D/Light2D-Point-Volumetric.shader")]
Shader m_PointLightVolumeShader = null;
[SerializeField, Reload("Shaders/Utils/Blit.shader")]
Shader m_BlitShader = null;
[SerializeField, Reload("Shaders/Utils/Sampling.shader")]
Shader m_SamplingShader = null;
[SerializeField, Reload("Shaders/2D/ShadowGroup2D.shader")]
Shader m_ShadowGroupShader = null;
[SerializeField, Reload("Shaders/2D/Shadow2DRemoveSelf.shader")]
Shader m_RemoveSelfShadowShader = null;
[SerializeField, Reload("Shaders/Utils/FallbackError.shader")]
Shader m_FallbackErrorShader;
[SerializeField]
PostProcessData m_PostProcessData = null;
[SerializeField, Reload("Runtime/2D/Data/Textures/FalloffLookupTexture.png")]
[HideInInspector]
private Texture2D m_FallOffLookup = null;
public float hdrEmulationScale => m_HDREmulationScale;
internal float lightRenderTextureScale => m_LightRenderTextureScale;
public Light2DBlendStyle[] lightBlendStyles => m_LightBlendStyles;
internal bool useDepthStencilBuffer => m_UseDepthStencilBuffer;
internal Texture2D fallOffLookup => m_FallOffLookup;
internal Shader shapeLightShader => m_ShapeLightShader;
internal Shader shapeLightVolumeShader => m_ShapeLightVolumeShader;
internal Shader pointLightShader => m_PointLightShader;
internal Shader pointLightVolumeShader => m_PointLightVolumeShader;
internal Shader blitShader => m_BlitShader;
internal Shader samplingShader => m_SamplingShader;
internal Shader shadowGroupShader => m_ShadowGroupShader;
internal Shader removeSelfShadowShader => m_RemoveSelfShadowShader;
internal PostProcessData postProcessData { get => m_PostProcessData; set { m_PostProcessData = value; } }
internal TransparencySortMode transparencySortMode => m_TransparencySortMode;
internal Vector3 transparencySortAxis => m_TransparencySortAxis;
internal uint lightRenderTextureMemoryBudget => m_MaxLightRenderTextureCount;
internal uint shadowRenderTextureMemoryBudget => m_MaxShadowRenderTextureCount;
internal bool useCameraSortingLayerTexture => m_UseCameraSortingLayersTexture;
internal int cameraSortingLayerTextureBound => m_CameraSortingLayersTextureBound;
internal Downsampling cameraSortingLayerDownsamplingMethod => m_CameraSortingLayerDownsamplingMethod;
protected override ScriptableRenderer Create()
{
#if UNITY_EDITOR
if (!Application.isPlaying)
{
ReloadAllNullProperties();
}
#endif
return new Renderer2D(this);
}
protected override void OnEnable()
{
base.OnEnable();
for (var i = 0; i < m_LightBlendStyles.Length; ++i)
{
m_LightBlendStyles[i].renderTargetHandle.Init($"_ShapeLightTexture{i}");
}
normalsRenderTarget.Init("_NormalMap");
shadowsRenderTarget.Init("_ShadowTex");
const int totalMaterials = 256;
if (shadowMaterials == null || shadowMaterials.Length == 0)
shadowMaterials = new Material[totalMaterials];
if (removeSelfShadowMaterials == null || removeSelfShadowMaterials.Length == 0)
removeSelfShadowMaterials = new Material[totalMaterials];
}
// transient data
internal Dictionary<uint, Material> lightMaterials { get; } = new Dictionary<uint, Material>();
internal Material[] shadowMaterials { get; private set; }
internal Material[] removeSelfShadowMaterials { get; private set; }
internal bool isNormalsRenderTargetValid { get; set; }
internal float normalsRenderTargetScale { get; set; }
internal RenderTargetHandle normalsRenderTarget;
internal RenderTargetHandle shadowsRenderTarget;
internal RenderTargetHandle cameraSortingLayerRenderTarget;
// this shouldn've been in RenderingData along with other cull results
internal ILight2DCullResult lightCullResult { get; set; }
}
}

View File

@@ -0,0 +1,106 @@
using UnityEditor;
using UnityEngine.Rendering;
using UnityEngine.Rendering.Universal;
namespace UnityEngine.Experimental.Rendering.Universal
{
public partial class Renderer2DData
{
#if UNITY_EDITOR
[SerializeField]
Renderer2DDefaultMaterialType m_DefaultMaterialType = Renderer2DDefaultMaterialType.Lit;
[SerializeField, Reload("Runtime/Materials/Sprite-Lit-Default.mat")]
Material m_DefaultCustomMaterial = null;
[SerializeField, Reload("Runtime/Materials/Sprite-Lit-Default.mat")]
Material m_DefaultLitMaterial = null;
[SerializeField, Reload("Runtime/Materials/Sprite-Unlit-Default.mat")]
Material m_DefaultUnlitMaterial = null;
internal override Shader GetDefaultShader()
{
return Shader.Find("Universal Render Pipeline/2D/Sprite-Lit-Default");
}
internal override Material GetDefaultMaterial(DefaultMaterialType materialType)
{
if (materialType == DefaultMaterialType.Sprite || materialType == DefaultMaterialType.Particle)
{
if (m_DefaultMaterialType == Renderer2DDefaultMaterialType.Lit)
return m_DefaultLitMaterial;
else if (m_DefaultMaterialType == Renderer2DDefaultMaterialType.Unlit)
return m_DefaultUnlitMaterial;
else
return m_DefaultCustomMaterial;
}
return null;
}
private void OnEnableInEditor()
{
// Provide a list of suggested texture property names to Sprite Editor via EditorPrefs.
const string suggestedNamesKey = "SecondarySpriteTexturePropertyNames";
const string maskTex = "_MaskTex";
const string normalMap = "_NormalMap";
string suggestedNamesPrefs = EditorPrefs.GetString(suggestedNamesKey);
if (string.IsNullOrEmpty(suggestedNamesPrefs))
EditorPrefs.SetString(suggestedNamesKey, maskTex + "," + normalMap);
else
{
if (!suggestedNamesPrefs.Contains(maskTex))
suggestedNamesPrefs += ("," + maskTex);
if (!suggestedNamesPrefs.Contains(normalMap))
suggestedNamesPrefs += ("," + normalMap);
EditorPrefs.SetString(suggestedNamesKey, suggestedNamesPrefs);
}
ReloadAllNullProperties();
}
private void ReloadAllNullProperties()
{
ResourceReloader.TryReloadAllNullIn(this, UniversalRenderPipelineAsset.packagePath);
}
private void Awake()
{
if (m_LightBlendStyles != null)
{
for (int i = 0; i < m_LightBlendStyles.Length; ++i)
{
ref var blendStyle = ref m_LightBlendStyles[i];
// Custom blend mode (99) now falls back to Multiply.
if ((int)blendStyle.blendMode == 99)
blendStyle.blendMode = Light2DBlendStyle.BlendMode.Multiply;
}
return;
}
m_LightBlendStyles = new Light2DBlendStyle[4];
m_LightBlendStyles[0].name = "Multiply";
m_LightBlendStyles[0].blendMode = Light2DBlendStyle.BlendMode.Multiply;
m_LightBlendStyles[1].name = "Additive";
m_LightBlendStyles[1].blendMode = Light2DBlendStyle.BlendMode.Additive;
m_LightBlendStyles[2].name = "Multiply with Mask";
m_LightBlendStyles[2].blendMode = Light2DBlendStyle.BlendMode.Multiply;
m_LightBlendStyles[2].maskTextureChannel = Light2DBlendStyle.TextureChannel.R;
m_LightBlendStyles[3].name = "Additive with Mask";
m_LightBlendStyles[3].blendMode = Light2DBlendStyle.BlendMode.Additive;
m_LightBlendStyles[3].maskTextureChannel = Light2DBlendStyle.TextureChannel.R;
}
#endif
}
}

View File

@@ -0,0 +1,22 @@
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
namespace UnityEngine.Experimental.Rendering.Universal
{
[AddComponentMenu("Rendering/2D/Composite Shadow Caster 2D (Experimental)")]
[ExecuteInEditMode]
public class CompositeShadowCaster2D : ShadowCasterGroup2D
{
protected void OnEnable()
{
ShadowCasterGroup2DManager.AddGroup(this);
}
protected void OnDisable()
{
ShadowCasterGroup2DManager.RemoveGroup(this);
}
}
}

View File

@@ -0,0 +1,186 @@
using System;
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
namespace UnityEngine.Experimental.Rendering.Universal
{
/// <summary>
/// Class <c>ShadowCaster2D</c> contains properties used for shadow casting
/// </summary>
[ExecuteInEditMode]
[DisallowMultipleComponent]
[AddComponentMenu("Rendering/2D/Shadow Caster 2D (Experimental)")]
public class ShadowCaster2D : ShadowCasterGroup2D
{
[SerializeField] bool m_HasRenderer = false;
[SerializeField] bool m_UseRendererSilhouette = true;
[SerializeField] bool m_CastsShadows = true;
[SerializeField] bool m_SelfShadows = false;
[SerializeField] int[] m_ApplyToSortingLayers = null;
[SerializeField] Vector3[] m_ShapePath = null;
[SerializeField] int m_ShapePathHash = 0;
[SerializeField] Mesh m_Mesh;
[SerializeField] int m_InstanceId;
internal ShadowCasterGroup2D m_ShadowCasterGroup = null;
internal ShadowCasterGroup2D m_PreviousShadowCasterGroup = null;
internal Mesh mesh => m_Mesh;
internal Vector3[] shapePath => m_ShapePath;
internal int shapePathHash { get { return m_ShapePathHash; } set { m_ShapePathHash = value; } }
int m_PreviousShadowGroup = 0;
bool m_PreviousCastsShadows = true;
int m_PreviousPathHash = 0;
/// <summary>
/// If selfShadows is true, useRendererSilhoutte specifies that the renderer's sihouette should be considered part of the shadow. If selfShadows is false, useRendererSilhoutte specifies that the renderer's sihouette should be excluded from the shadow
/// </summary>
public bool useRendererSilhouette
{
set { m_UseRendererSilhouette = value; }
get { return m_UseRendererSilhouette && m_HasRenderer; }
}
/// <summary>
/// If true, the shadow casting shape is included as part of the shadow. If false, the shadow casting shape is excluded from the shadow.
/// </summary>
public bool selfShadows
{
set { m_SelfShadows = value; }
get { return m_SelfShadows; }
}
/// <summary>
/// Specifies if shadows will be cast.
/// </summary>
public bool castsShadows
{
set { m_CastsShadows = value; }
get { return m_CastsShadows; }
}
static int[] SetDefaultSortingLayers()
{
int layerCount = SortingLayer.layers.Length;
int[] allLayers = new int[layerCount];
for (int layerIndex = 0; layerIndex < layerCount; layerIndex++)
{
allLayers[layerIndex] = SortingLayer.layers[layerIndex].id;
}
return allLayers;
}
internal bool IsShadowedLayer(int layer)
{
return m_ApplyToSortingLayers != null ? Array.IndexOf(m_ApplyToSortingLayers, layer) >= 0 : false;
}
private void Awake()
{
if (m_ApplyToSortingLayers == null)
m_ApplyToSortingLayers = SetDefaultSortingLayers();
Bounds bounds = new Bounds(transform.position, Vector3.one);
Renderer renderer = GetComponent<Renderer>();
if (renderer != null)
{
bounds = renderer.bounds;
}
#if USING_PHYSICS2D_MODULE
else
{
Collider2D collider = GetComponent<Collider2D>();
if (collider != null)
bounds = collider.bounds;
}
#endif
Vector3 inverseScale = Vector3.zero;
Vector3 relOffset = transform.position;
if (transform.lossyScale.x != 0 && transform.lossyScale.y != 0)
{
inverseScale = new Vector3(1 / transform.lossyScale.x, 1 / transform.lossyScale.y);
relOffset = new Vector3(inverseScale.x * -transform.position.x, inverseScale.y * -transform.position.y);
}
if (m_ShapePath == null || m_ShapePath.Length == 0)
{
m_ShapePath = new Vector3[]
{
relOffset + new Vector3(inverseScale.x * bounds.min.x, inverseScale.y * bounds.min.y),
relOffset + new Vector3(inverseScale.x * bounds.min.x, inverseScale.y * bounds.max.y),
relOffset + new Vector3(inverseScale.x * bounds.max.x, inverseScale.y * bounds.max.y),
relOffset + new Vector3(inverseScale.x * bounds.max.x, inverseScale.y * bounds.min.y),
};
}
}
protected void OnEnable()
{
if (m_Mesh == null || m_InstanceId != GetInstanceID())
{
m_Mesh = new Mesh();
ShadowUtility.GenerateShadowMesh(m_Mesh, m_ShapePath);
m_InstanceId = GetInstanceID();
}
m_ShadowCasterGroup = null;
}
protected void OnDisable()
{
ShadowCasterGroup2DManager.RemoveFromShadowCasterGroup(this, m_ShadowCasterGroup);
}
public void Update()
{
Renderer renderer;
m_HasRenderer = TryGetComponent<Renderer>(out renderer);
bool rebuildMesh = LightUtility.CheckForChange(m_ShapePathHash, ref m_PreviousPathHash);
if (rebuildMesh)
ShadowUtility.GenerateShadowMesh(m_Mesh, m_ShapePath);
m_PreviousShadowCasterGroup = m_ShadowCasterGroup;
bool addedToNewGroup = ShadowCasterGroup2DManager.AddToShadowCasterGroup(this, ref m_ShadowCasterGroup);
if (addedToNewGroup && m_ShadowCasterGroup != null)
{
if (m_PreviousShadowCasterGroup == this)
ShadowCasterGroup2DManager.RemoveGroup(this);
ShadowCasterGroup2DManager.RemoveFromShadowCasterGroup(this, m_PreviousShadowCasterGroup);
if (m_ShadowCasterGroup == this)
ShadowCasterGroup2DManager.AddGroup(this);
}
if (LightUtility.CheckForChange(m_ShadowGroup, ref m_PreviousShadowGroup))
{
ShadowCasterGroup2DManager.RemoveGroup(this);
ShadowCasterGroup2DManager.AddGroup(this);
}
if (LightUtility.CheckForChange(m_CastsShadows, ref m_PreviousCastsShadows))
{
if (m_CastsShadows)
ShadowCasterGroup2DManager.AddGroup(this);
else
ShadowCasterGroup2DManager.RemoveGroup(this);
}
}
#if UNITY_EDITOR
void Reset()
{
Awake();
OnEnable();
}
#endif
}
}

View File

@@ -0,0 +1,31 @@
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
namespace UnityEngine.Experimental.Rendering.Universal
{
public abstract class ShadowCasterGroup2D : MonoBehaviour
{
[SerializeField] internal int m_ShadowGroup = 0;
List<ShadowCaster2D> m_ShadowCasters;
public List<ShadowCaster2D> GetShadowCasters() { return m_ShadowCasters; }
public int GetShadowGroup() { return m_ShadowGroup; }
public void RegisterShadowCaster2D(ShadowCaster2D shadowCaster2D)
{
if (m_ShadowCasters == null)
m_ShadowCasters = new List<ShadowCaster2D>();
m_ShadowCasters.Add(shadowCaster2D);
}
public void UnregisterShadowCaster2D(ShadowCaster2D shadowCaster2D)
{
if (m_ShadowCasters != null)
m_ShadowCasters.Remove(shadowCaster2D);
}
}
}

View File

@@ -0,0 +1,88 @@
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
namespace UnityEngine.Experimental.Rendering.Universal
{
internal class ShadowCasterGroup2DManager
{
static List<ShadowCasterGroup2D> s_ShadowCasterGroups = null;
public static List<ShadowCasterGroup2D> shadowCasterGroups { get { return s_ShadowCasterGroups; } }
public static void AddShadowCasterGroupToList(ShadowCasterGroup2D shadowCaster, List<ShadowCasterGroup2D> list)
{
int positionToInsert = 0;
for (positionToInsert = 0; positionToInsert < list.Count; positionToInsert++)
{
if (shadowCaster.GetShadowGroup() == list[positionToInsert].GetShadowGroup())
break;
}
list.Insert(positionToInsert, shadowCaster);
}
public static void RemoveShadowCasterGroupFromList(ShadowCasterGroup2D shadowCaster, List<ShadowCasterGroup2D> list)
{
list.Remove(shadowCaster);
}
static CompositeShadowCaster2D FindTopMostCompositeShadowCaster(ShadowCaster2D shadowCaster)
{
CompositeShadowCaster2D retGroup = null;
Transform transformToCheck = shadowCaster.transform.parent;
while (transformToCheck != null)
{
CompositeShadowCaster2D currentGroup;
if (transformToCheck.TryGetComponent<CompositeShadowCaster2D>(out currentGroup))
retGroup = currentGroup;
transformToCheck = transformToCheck.parent;
}
return retGroup;
}
public static bool AddToShadowCasterGroup(ShadowCaster2D shadowCaster, ref ShadowCasterGroup2D shadowCasterGroup)
{
ShadowCasterGroup2D newShadowCasterGroup = FindTopMostCompositeShadowCaster(shadowCaster) as ShadowCasterGroup2D;
if (newShadowCasterGroup == null)
newShadowCasterGroup = shadowCaster.GetComponent<ShadowCaster2D>();
if (newShadowCasterGroup != null && shadowCasterGroup != newShadowCasterGroup)
{
newShadowCasterGroup.RegisterShadowCaster2D(shadowCaster);
shadowCasterGroup = newShadowCasterGroup;
return true;
}
return false;
}
public static void RemoveFromShadowCasterGroup(ShadowCaster2D shadowCaster, ShadowCasterGroup2D shadowCasterGroup)
{
if (shadowCasterGroup != null)
shadowCasterGroup.UnregisterShadowCaster2D(shadowCaster);
}
public static void AddGroup(ShadowCasterGroup2D group)
{
if (group == null)
return;
if (s_ShadowCasterGroups == null)
s_ShadowCasterGroups = new List<ShadowCasterGroup2D>();
AddShadowCasterGroupToList(group, s_ShadowCasterGroups);
}
public static void RemoveGroup(ShadowCasterGroup2D group)
{
if (group != null && s_ShadowCasterGroups != null)
RemoveShadowCasterGroupFromList(group, s_ShadowCasterGroups);
}
}
}

View File

@@ -0,0 +1,181 @@
using UnityEngine.Rendering;
using UnityEngine.Rendering.Universal;
using Unity.Mathematics;
namespace UnityEngine.Experimental.Rendering.Universal
{
internal static class ShadowRendering
{
private static readonly int k_LightPosID = Shader.PropertyToID("_LightPos");
private static readonly int k_ShadowStencilGroupID = Shader.PropertyToID("_ShadowStencilGroup");
private static readonly int k_ShadowIntensityID = Shader.PropertyToID("_ShadowIntensity");
private static readonly int k_ShadowVolumeIntensityID = Shader.PropertyToID("_ShadowVolumeIntensity");
private static readonly int k_ShadowRadiusID = Shader.PropertyToID("_ShadowRadius");
private static RenderTargetHandle[] m_RenderTargets = null;
public static uint maxTextureCount { get; private set; }
public static void InitializeBudget(uint maxTextureCount)
{
if (m_RenderTargets == null || m_RenderTargets.Length != maxTextureCount)
{
m_RenderTargets = new RenderTargetHandle[maxTextureCount];
ShadowRendering.maxTextureCount = maxTextureCount;
for (int i = 0; i < maxTextureCount; i++)
{
unsafe
{
m_RenderTargets[i].id = Shader.PropertyToID($"ShadowTex_{i}");
}
}
}
}
public static void CreateShadowRenderTexture(IRenderPass2D pass, RenderingData renderingData, CommandBuffer cmdBuffer, int shadowIndex)
{
CreateShadowRenderTexture(pass, m_RenderTargets[shadowIndex], renderingData, cmdBuffer);
}
public static void PrerenderShadows(IRenderPass2D pass, RenderingData renderingData, CommandBuffer cmdBuffer, int layerToRender, Light2D light, int shadowIndex, float shadowIntensity)
{
// Render the shadows for this light
RenderShadows(pass, renderingData, cmdBuffer, layerToRender, light, shadowIntensity, m_RenderTargets[shadowIndex].Identifier());
}
public static void SetGlobalShadowTexture(CommandBuffer cmdBuffer, Light2D light, int shadowIndex)
{
cmdBuffer.SetGlobalTexture("_ShadowTex", m_RenderTargets[shadowIndex].Identifier());
cmdBuffer.SetGlobalFloat(k_ShadowIntensityID, 1 - light.shadowIntensity);
cmdBuffer.SetGlobalFloat(k_ShadowVolumeIntensityID, 1 - light.shadowVolumeIntensity);
}
public static void DisableGlobalShadowTexture(CommandBuffer cmdBuffer)
{
cmdBuffer.SetGlobalFloat(k_ShadowIntensityID, 1);
cmdBuffer.SetGlobalFloat(k_ShadowVolumeIntensityID, 1);
}
private static void CreateShadowRenderTexture(IRenderPass2D pass, RenderTargetHandle rtHandle, RenderingData renderingData, CommandBuffer cmdBuffer)
{
var renderTextureScale = Mathf.Clamp(pass.rendererData.lightRenderTextureScale, 0.01f, 1.0f);
var width = (int)(renderingData.cameraData.cameraTargetDescriptor.width * renderTextureScale);
var height = (int)(renderingData.cameraData.cameraTargetDescriptor.height * renderTextureScale);
var descriptor = new RenderTextureDescriptor(width, height);
descriptor.useMipMap = false;
descriptor.autoGenerateMips = false;
descriptor.depthBufferBits = 24;
descriptor.graphicsFormat = GraphicsFormat.R8G8B8A8_UNorm;
descriptor.msaaSamples = 1;
descriptor.dimension = TextureDimension.Tex2D;
cmdBuffer.GetTemporaryRT(rtHandle.id, descriptor, FilterMode.Bilinear);
}
public static void ReleaseShadowRenderTexture(CommandBuffer cmdBuffer, int shadowIndex)
{
cmdBuffer.ReleaseTemporaryRT(m_RenderTargets[shadowIndex].id);
}
private static Material GetShadowMaterial(this Renderer2DData rendererData, int index)
{
var shadowMaterialIndex = index % 255;
if (rendererData.shadowMaterials[shadowMaterialIndex] == null)
{
rendererData.shadowMaterials[shadowMaterialIndex] = CoreUtils.CreateEngineMaterial(rendererData.shadowGroupShader);
rendererData.shadowMaterials[shadowMaterialIndex].SetFloat(k_ShadowStencilGroupID, index);
}
return rendererData.shadowMaterials[shadowMaterialIndex];
}
private static Material GetRemoveSelfShadowMaterial(this Renderer2DData rendererData, int index)
{
var shadowMaterialIndex = index % 255;
if (rendererData.removeSelfShadowMaterials[shadowMaterialIndex] == null)
{
rendererData.removeSelfShadowMaterials[shadowMaterialIndex] = CoreUtils.CreateEngineMaterial(rendererData.removeSelfShadowShader);
rendererData.removeSelfShadowMaterials[shadowMaterialIndex].SetFloat(k_ShadowStencilGroupID, index);
}
return rendererData.removeSelfShadowMaterials[shadowMaterialIndex];
}
public static void RenderShadows(IRenderPass2D pass, RenderingData renderingData, CommandBuffer cmdBuffer, int layerToRender, Light2D light, float shadowIntensity, RenderTargetIdentifier renderTexture)
{
cmdBuffer.SetRenderTarget(renderTexture, RenderBufferLoadAction.DontCare, RenderBufferStoreAction.Store, RenderBufferLoadAction.DontCare, RenderBufferStoreAction.DontCare);
cmdBuffer.ClearRenderTarget(true, true, Color.black); // clear stencil
var shadowRadius = 1.42f * light.boundingSphere.radius;
cmdBuffer.SetGlobalVector(k_LightPosID, light.transform.position);
cmdBuffer.SetGlobalFloat(k_ShadowRadiusID, shadowRadius);
var shadowMaterial = pass.rendererData.GetShadowMaterial(1);
var removeSelfShadowMaterial = pass.rendererData.GetRemoveSelfShadowMaterial(1);
var shadowCasterGroups = ShadowCasterGroup2DManager.shadowCasterGroups;
if (shadowCasterGroups != null && shadowCasterGroups.Count > 0)
{
var previousShadowGroupIndex = -1;
var incrementingGroupIndex = 0;
for (var group = 0; group < shadowCasterGroups.Count; group++)
{
var shadowCasterGroup = shadowCasterGroups[group];
var shadowCasters = shadowCasterGroup.GetShadowCasters();
var shadowGroupIndex = shadowCasterGroup.GetShadowGroup();
if (LightUtility.CheckForChange(shadowGroupIndex, ref previousShadowGroupIndex) || shadowGroupIndex == 0)
{
incrementingGroupIndex++;
shadowMaterial = pass.rendererData.GetShadowMaterial(incrementingGroupIndex);
removeSelfShadowMaterial = pass.rendererData.GetRemoveSelfShadowMaterial(incrementingGroupIndex);
}
if (shadowCasters != null)
{
// Draw the shadow casting group first, then draw the silhouettes..
for (var i = 0; i < shadowCasters.Count; i++)
{
var shadowCaster = shadowCasters[i];
if (shadowCaster != null && shadowMaterial != null && shadowCaster.IsShadowedLayer(layerToRender))
{
if (shadowCaster.castsShadows)
cmdBuffer.DrawMesh(shadowCaster.mesh, shadowCaster.transform.localToWorldMatrix, shadowMaterial);
}
}
for (var i = 0; i < shadowCasters.Count; i++)
{
var shadowCaster = shadowCasters[i];
if (shadowCaster != null && shadowMaterial != null && shadowCaster.IsShadowedLayer(layerToRender))
{
if (shadowCaster.useRendererSilhouette)
{
var renderer = shadowCaster.GetComponent<Renderer>();
if (renderer != null)
{
if (!shadowCaster.selfShadows)
cmdBuffer.DrawRenderer(renderer, removeSelfShadowMaterial);
else
cmdBuffer.DrawRenderer(renderer, shadowMaterial, 0, 1);
}
}
else
{
if (!shadowCaster.selfShadows)
{
var meshMat = shadowCaster.transform.localToWorldMatrix;
cmdBuffer.DrawMesh(shadowCaster.mesh, meshMat, removeSelfShadowMaterial);
}
}
}
}
}
}
}
}
}
}

View File

@@ -0,0 +1,178 @@
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using System;
using Unity.Collections;
using System.Linq;
using UnityEngine.Experimental.Rendering.Universal.LibTessDotNet;
namespace UnityEngine.Experimental.Rendering.Universal
{
internal class ShadowUtility
{
internal struct Edge : IComparable<Edge>
{
public int vertexIndex0;
public int vertexIndex1;
public Vector4 tangent;
private bool compareReversed; // This is done so that edge AB can equal edge BA
public void AssignVertexIndices(int vi0, int vi1)
{
vertexIndex0 = vi0;
vertexIndex1 = vi1;
compareReversed = vi0 > vi1;
}
public int Compare(Edge a, Edge b)
{
int adjustedVertexIndex0A = a.compareReversed ? a.vertexIndex1 : a.vertexIndex0;
int adjustedVertexIndex1A = a.compareReversed ? a.vertexIndex0 : a.vertexIndex1;
int adjustedVertexIndex0B = b.compareReversed ? b.vertexIndex1 : b.vertexIndex0;
int adjustedVertexIndex1B = b.compareReversed ? b.vertexIndex0 : b.vertexIndex1;
// Sort first by VI0 then by VI1
int deltaVI0 = adjustedVertexIndex0A - adjustedVertexIndex0B;
int deltaVI1 = adjustedVertexIndex1A - adjustedVertexIndex1B;
if (deltaVI0 == 0)
return deltaVI1;
else
return deltaVI0;
}
public int CompareTo(Edge edgeToCompare)
{
return Compare(this, edgeToCompare);
}
}
static Edge CreateEdge(int triangleIndexA, int triangleIndexB, List<Vector3> vertices, List<int> triangles)
{
Edge retEdge = new Edge();
retEdge.AssignVertexIndices(triangles[triangleIndexA], triangles[triangleIndexB]);
Vector3 vertex0 = vertices[retEdge.vertexIndex0];
vertex0.z = 0;
Vector3 vertex1 = vertices[retEdge.vertexIndex1];
vertex1.z = 0;
Vector3 edgeDir = Vector3.Normalize(vertex1 - vertex0);
retEdge.tangent = Vector3.Cross(-Vector3.forward, edgeDir);
return retEdge;
}
static void PopulateEdgeArray(List<Vector3> vertices, List<int> triangles, List<Edge> edges)
{
for (int triangleIndex = 0; triangleIndex < triangles.Count; triangleIndex += 3)
{
edges.Add(CreateEdge(triangleIndex, triangleIndex + 1, vertices, triangles));
edges.Add(CreateEdge(triangleIndex + 1, triangleIndex + 2, vertices, triangles));
edges.Add(CreateEdge(triangleIndex + 2, triangleIndex, vertices, triangles));
}
}
static bool IsOutsideEdge(int edgeIndex, List<Edge> edgesToProcess)
{
int previousIndex = edgeIndex - 1;
int nextIndex = edgeIndex + 1;
int numberOfEdges = edgesToProcess.Count;
Edge currentEdge = edgesToProcess[edgeIndex];
return (previousIndex < 0 || (currentEdge.CompareTo(edgesToProcess[edgeIndex - 1]) != 0)) && (nextIndex >= numberOfEdges || (currentEdge.CompareTo(edgesToProcess[edgeIndex + 1]) != 0));
}
static void SortEdges(List<Edge> edgesToProcess)
{
edgesToProcess.Sort();
}
static void CreateShadowTriangles(List<Vector3> vertices, List<Color> colors, List<int> triangles, List<Vector4> tangents, List<Edge> edges)
{
for (int edgeIndex = 0; edgeIndex < edges.Count; edgeIndex++)
{
if (IsOutsideEdge(edgeIndex, edges))
{
Edge edge = edges[edgeIndex];
tangents[edge.vertexIndex1] = -edge.tangent;
int newVertexIndex = vertices.Count;
vertices.Add(vertices[edge.vertexIndex0]);
colors.Add(colors[edge.vertexIndex0]);
tangents.Add(-edge.tangent);
triangles.Add(edge.vertexIndex0);
triangles.Add(newVertexIndex);
triangles.Add(edge.vertexIndex1);
}
}
}
static object InterpCustomVertexData(Vec3 position, object[] data, float[] weights)
{
return data[0];
}
static void InitializeTangents(int tangentsToAdd, List<Vector4> tangents)
{
for (int i = 0; i < tangentsToAdd; i++)
tangents.Add(Vector4.zero);
}
public static void GenerateShadowMesh(Mesh mesh, Vector3[] shapePath)
{
List<Vector3> vertices = new List<Vector3>();
List<int> triangles = new List<int>();
List<Vector4> tangents = new List<Vector4>();
List<Color> extrusion = new List<Color>();
// Create interior geometry
int pointCount = shapePath.Length;
var inputs = new ContourVertex[2 * pointCount];
for (int i = 0; i < pointCount; i++)
{
Color extrusionData = new Color(shapePath[i].x, shapePath[i].y, shapePath[i].x, shapePath[i].y);
int nextPoint = (i + 1) % pointCount;
inputs[2 * i] = new ContourVertex() { Position = new Vec3() { X = shapePath[i].x, Y = shapePath[i].y, Z = 0 }, Data = extrusionData };
extrusionData = new Color(shapePath[i].x, shapePath[i].y, shapePath[nextPoint].x, shapePath[nextPoint].y);
Vector2 midPoint = 0.5f * (shapePath[i] + shapePath[nextPoint]);
inputs[2 * i + 1] = new ContourVertex() { Position = new Vec3() { X = midPoint.x, Y = midPoint.y, Z = 0}, Data = extrusionData };
}
Tess tessI = new Tess();
tessI.AddContour(inputs, ContourOrientation.Original);
tessI.Tessellate(WindingRule.EvenOdd, ElementType.Polygons, 3, InterpCustomVertexData);
var indicesI = tessI.Elements.Select(i => i).ToArray();
var verticesI = tessI.Vertices.Select(v => new Vector3(v.Position.X, v.Position.Y, 0)).ToArray();
var extrusionI = tessI.Vertices.Select(v => new Color(((Color)v.Data).r, ((Color)v.Data).g, ((Color)v.Data).b, ((Color)v.Data).a)).ToArray();
vertices.AddRange(verticesI);
triangles.AddRange(indicesI);
extrusion.AddRange(extrusionI);
InitializeTangents(vertices.Count, tangents);
List<Edge> edges = new List<Edge>();
PopulateEdgeArray(vertices, triangles, edges);
SortEdges(edges);
CreateShadowTriangles(vertices, extrusion, triangles, tangents, edges);
Color[] finalExtrusion = extrusion.ToArray();
Vector3[] finalVertices = vertices.ToArray();
int[] finalTriangles = triangles.ToArray();
Vector4[] finalTangents = tangents.ToArray();
mesh.Clear();
mesh.vertices = finalVertices;
mesh.triangles = finalTriangles;
mesh.tangents = finalTangents;
mesh.colors = finalExtrusion;
}
}
}

View File

@@ -0,0 +1,7 @@
using System.Runtime.CompilerServices;
[assembly: InternalsVisibleTo("UniversalGraphicsTests")]
[assembly: InternalsVisibleTo("Unity.RenderPipelines.Universal.Editor")]
[assembly: InternalsVisibleTo("Unity.RenderPipelines.Universal.Editor.Tests")]
[assembly: InternalsVisibleTo("Unity.RenderPipelines.Universal.Runtime.Tests")]
[assembly: InternalsVisibleTo("Unity.GraphicTests.Performance.Universal.Runtime")]

View File

@@ -0,0 +1,17 @@
namespace UnityEngine.Rendering.Universal
{
public static class ComponentUtility
{
/// <summary> Check if the provided camera is compatible with Universal Render Pipeline </summary>
/// <param name="camera">The Camera to check</param>
/// <returns>True if it is compatible, false otherwise</returns>
public static bool IsUniversalCamera(Camera camera)
=> camera.GetComponent<UniversalAdditionalCameraData>() != null;
/// <summary> Check if the provided light is compatible with Universal Render Pipeline </summary>
/// <param name="light">The Light to check</param>
/// <returns>True if it is compatible, false otherwise</returns>
public static bool IsUniversalLight(Light light)
=> light.GetComponent<UniversalAdditionalLightData>() != null;
}
}

View File

@@ -0,0 +1,35 @@
%YAML 1.1
%TAG !u! tag:unity3d.com,2011:
--- !u!114 &11400000
MonoBehaviour:
m_ObjectHideFlags: 0
m_CorrespondingSourceObject: {fileID: 0}
m_PrefabInstance: {fileID: 0}
m_PrefabAsset: {fileID: 0}
m_GameObject: {fileID: 0}
m_Enabled: 1
m_EditorHideFlags: 0
m_Script: {fileID: 11500000, guid: de640fe3d0db1804a85f9fc8f5cadab6, type: 3}
m_Name: ForwardRendererData
m_EditorClassIdentifier:
m_RendererFeatures: []
postProcessData: {fileID: 11400000, guid: 41439944d30ece34e96484bdb6645b55, type: 2}
shaders:
blitPS: {fileID: 4800000, guid: c17132b1f77d20942aa75f8429c0f8bc, type: 3}
copyDepthPS: {fileID: 4800000, guid: d6dae50ee9e1bfa4db75f19f99355220, type: 3}
screenSpaceShadowPS: {fileID: 4800000, guid: 0f854b35a0cf61a429bd5dcfea30eddd,
type: 3}
samplingPS: {fileID: 4800000, guid: 04c410c9937594faa893a11dceb85f7e, type: 3}
m_OpaqueLayerMask:
serializedVersion: 2
m_Bits: 4294967295
m_TransparentLayerMask:
serializedVersion: 2
m_Bits: 4294967295
m_DefaultStencilState:
overrideStencilState: 0
stencilReference: 0
stencilCompareFunction: 8
passOperation: 0
failOperation: 0
zFailOperation: 0

View File

@@ -0,0 +1,78 @@
%YAML 1.1
%TAG !u! tag:unity3d.com,2011:
--- !u!114 &11400000
MonoBehaviour:
m_ObjectHideFlags: 0
m_CorrespondingSourceObject: {fileID: 0}
m_PrefabInstance: {fileID: 0}
m_PrefabAsset: {fileID: 0}
m_GameObject: {fileID: 0}
m_Enabled: 1
m_EditorHideFlags: 0
m_Script: {fileID: 11500000, guid: 572910c10080c0945a0ef731ccedc739, type: 3}
m_Name: PostProcessData
m_EditorClassIdentifier:
shaders:
stopNanPS: {fileID: 4800000, guid: 1121bb4e615ca3c48b214e79e841e823, type: 3}
subpixelMorphologicalAntialiasingPS: {fileID: 4800000, guid: 63eaba0ebfb82cc43bde059b4a8c65f6,
type: 3}
gaussianDepthOfFieldPS: {fileID: 4800000, guid: 5e7134d6e63e0bc47a1dd2669cedb379,
type: 3}
bokehDepthOfFieldPS: {fileID: 4800000, guid: 2aed67ad60045d54ba3a00c91e2d2631,
type: 3}
cameraMotionBlurPS: {fileID: 4800000, guid: 1edcd131364091c46a17cbff0b1de97a,
type: 3}
paniniProjectionPS: {fileID: 4800000, guid: a15b78cf8ca26ca4fb2090293153c62c,
type: 3}
lutBuilderLdrPS: {fileID: 4800000, guid: 65df88701913c224d95fc554db28381a, type: 3}
lutBuilderHdrPS: {fileID: 4800000, guid: ec9fec698a3456d4fb18cf8bacb7a2bc, type: 3}
bloomPS: {fileID: 4800000, guid: 5f1864addb451f54bae8c86d230f736e, type: 3}
uberPostPS: {fileID: 4800000, guid: e7857e9d0c934dc4f83f270f8447b006, type: 3}
finalPostPassPS: {fileID: 4800000, guid: c49e63ed1bbcb334780a3bd19dfed403, type: 3}
textures:
blueNoise16LTex:
- {fileID: 2800000, guid: 81200413a40918d4d8702e94db29911c, type: 3}
- {fileID: 2800000, guid: d50c5e07c9911a74982bddf7f3075e7b, type: 3}
- {fileID: 2800000, guid: 1134690bf9216164dbc75050e35b7900, type: 3}
- {fileID: 2800000, guid: 7ce2118f74614a94aa8a0cdf2e6062c3, type: 3}
- {fileID: 2800000, guid: 2ca97df9d1801e84a8a8f2c53cb744f0, type: 3}
- {fileID: 2800000, guid: e63eef8f54aa9dc4da9a5ac094b503b5, type: 3}
- {fileID: 2800000, guid: 39451254daebd6d40b52899c1f1c0c1b, type: 3}
- {fileID: 2800000, guid: c94ad916058dff743b0f1c969ddbe660, type: 3}
- {fileID: 2800000, guid: ed5ea7ce59ca8ec4f9f14bf470a30f35, type: 3}
- {fileID: 2800000, guid: 071e954febf155243a6c81e48f452644, type: 3}
- {fileID: 2800000, guid: 96aaab9cc247d0b4c98132159688c1af, type: 3}
- {fileID: 2800000, guid: fc3fa8f108657e14486697c9a84ccfc5, type: 3}
- {fileID: 2800000, guid: bfed3e498947fcb4890b7f40f54d85b9, type: 3}
- {fileID: 2800000, guid: d512512f4af60a442ab3458489412954, type: 3}
- {fileID: 2800000, guid: 47a45908f6db0cb44a0d5e961143afec, type: 3}
- {fileID: 2800000, guid: 4dcc0502f8586f941b5c4a66717205e8, type: 3}
- {fileID: 2800000, guid: 9d92991794bb5864c8085468b97aa067, type: 3}
- {fileID: 2800000, guid: 14381521ff11cb74abe3fe65401c23be, type: 3}
- {fileID: 2800000, guid: d36f0fe53425e08499a2333cf423634c, type: 3}
- {fileID: 2800000, guid: d4044ea2490d63b43aa1765f8efbf8a9, type: 3}
- {fileID: 2800000, guid: c9bd74624d8070f429e3f46d161f9204, type: 3}
- {fileID: 2800000, guid: d5c9b274310e5524ebe32a4e4da3df1f, type: 3}
- {fileID: 2800000, guid: f69770e54f2823f43badf77916acad83, type: 3}
- {fileID: 2800000, guid: 10b6c6d22e73dea46a8ab36b6eebd629, type: 3}
- {fileID: 2800000, guid: a2ec5cbf5a9b64345ad3fab0912ddf7b, type: 3}
- {fileID: 2800000, guid: 1c3c6d69a645b804fa232004b96b7ad3, type: 3}
- {fileID: 2800000, guid: d18a24d7b4ed50f4387993566d9d3ae2, type: 3}
- {fileID: 2800000, guid: c989e1ed85cf7154caa922fec53e6af6, type: 3}
- {fileID: 2800000, guid: ff47e5a0f105eb34883b973e51f4db62, type: 3}
- {fileID: 2800000, guid: fa042edbfc40fbd4bad0ab9d505b1223, type: 3}
- {fileID: 2800000, guid: 896d9004736809c4fb5973b7c12eb8b9, type: 3}
- {fileID: 2800000, guid: 179f794063d2a66478e6e726f84a65bc, type: 3}
filmGrainTex:
- {fileID: 2800000, guid: 654c582f7f8a5a14dbd7d119cbde215d, type: 3}
- {fileID: 2800000, guid: dd77ffd079630404e879388999033049, type: 3}
- {fileID: 2800000, guid: 1097e90e1306e26439701489f391a6c0, type: 3}
- {fileID: 2800000, guid: f0b67500f7fad3b4c9f2b13e8f41ba6e, type: 3}
- {fileID: 2800000, guid: 9930fb4528622b34687b00bbe6883de7, type: 3}
- {fileID: 2800000, guid: bd9e8c758250ef449a4b4bfaad7a2133, type: 3}
- {fileID: 2800000, guid: 510a2f57334933e4a8dbabe4c30204e4, type: 3}
- {fileID: 2800000, guid: b4db8180660810945bf8d55ab44352ad, type: 3}
- {fileID: 2800000, guid: fd2fd78b392986e42a12df2177d3b89c, type: 3}
- {fileID: 2800000, guid: 5cdee82a77d13994f83b8fdabed7c301, type: 3}
smaaAreaTex: {fileID: 2800000, guid: d1f1048909d55cd4fa1126ab998f617e, type: 3}
smaaSearchTex: {fileID: 2800000, guid: 51eee22c2a633ef4aada830eed57c3fd, type: 3}

View File

@@ -0,0 +1,109 @@
#if UNITY_EDITOR
using UnityEditor;
using UnityEditor.ProjectWindowCallback;
#endif
using System;
namespace UnityEngine.Rendering.Universal
{
[Serializable]
public class PostProcessData : ScriptableObject
{
#if UNITY_EDITOR
[System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Performance", "CA1812")]
internal class CreatePostProcessDataAsset : EndNameEditAction
{
public override void Action(int instanceId, string pathName, string resourceFile)
{
var instance = CreateInstance<PostProcessData>();
AssetDatabase.CreateAsset(instance, pathName);
ResourceReloader.ReloadAllNullIn(instance, UniversalRenderPipelineAsset.packagePath);
Selection.activeObject = instance;
}
}
[MenuItem("Assets/Create/Rendering/Universal Render Pipeline/Post-process Data", priority = CoreUtils.assetCreateMenuPriority3)]
static void CreatePostProcessData()
{
ProjectWindowUtil.StartNameEditingIfProjectWindowExists(0, CreateInstance<CreatePostProcessDataAsset>(), "CustomPostProcessData.asset", null, null);
}
internal static PostProcessData GetDefaultPostProcessData()
{
var path = System.IO.Path.Combine(UniversalRenderPipelineAsset.packagePath, "Runtime/Data/PostProcessData.asset");
return AssetDatabase.LoadAssetAtPath<PostProcessData>(path);
}
#endif
[Serializable, ReloadGroup]
public sealed class ShaderResources
{
[Reload("Shaders/PostProcessing/StopNaN.shader")]
public Shader stopNanPS;
[Reload("Shaders/PostProcessing/SubpixelMorphologicalAntialiasing.shader")]
public Shader subpixelMorphologicalAntialiasingPS;
[Reload("Shaders/PostProcessing/GaussianDepthOfField.shader")]
public Shader gaussianDepthOfFieldPS;
[Reload("Shaders/PostProcessing/BokehDepthOfField.shader")]
public Shader bokehDepthOfFieldPS;
[Reload("Shaders/PostProcessing/CameraMotionBlur.shader")]
public Shader cameraMotionBlurPS;
[Reload("Shaders/PostProcessing/PaniniProjection.shader")]
public Shader paniniProjectionPS;
[Reload("Shaders/PostProcessing/LutBuilderLdr.shader")]
public Shader lutBuilderLdrPS;
[Reload("Shaders/PostProcessing/LutBuilderHdr.shader")]
public Shader lutBuilderHdrPS;
[Reload("Shaders/PostProcessing/Bloom.shader")]
public Shader bloomPS;
[Reload("Shaders/PostProcessing/UberPost.shader")]
public Shader uberPostPS;
[Reload("Shaders/PostProcessing/FinalPost.shader")]
public Shader finalPostPassPS;
}
[Serializable, ReloadGroup]
public sealed class TextureResources
{
// Pre-baked noise
[Reload("Textures/BlueNoise16/L/LDR_LLL1_{0}.png", 0, 32)]
public Texture2D[] blueNoise16LTex;
// Post-processing
[Reload(new[]
{
"Textures/FilmGrain/Thin01.png",
"Textures/FilmGrain/Thin02.png",
"Textures/FilmGrain/Medium01.png",
"Textures/FilmGrain/Medium02.png",
"Textures/FilmGrain/Medium03.png",
"Textures/FilmGrain/Medium04.png",
"Textures/FilmGrain/Medium05.png",
"Textures/FilmGrain/Medium06.png",
"Textures/FilmGrain/Large01.png",
"Textures/FilmGrain/Large02.png"
})]
public Texture2D[] filmGrainTex;
[Reload("Textures/SMAA/AreaTex.tga")]
public Texture2D smaaAreaTex;
[Reload("Textures/SMAA/SearchTex.tga")]
public Texture2D smaaSearchTex;
}
public ShaderResources shaders;
public TextureResources textures;
}
}

View File

@@ -0,0 +1,15 @@
using UnityEngine.Scripting.APIUpdating;
namespace UnityEngine.Rendering.Universal
{
[System.Serializable]
[MovedFrom("UnityEngine.Rendering.LWRP")] public class StencilStateData
{
public bool overrideStencilState = false;
public int stencilReference = 0;
public CompareFunction stencilCompareFunction = CompareFunction.Always;
public StencilOp passOperation = StencilOp.Keep;
public StencilOp failOperation = StencilOp.Keep;
public StencilOp zFailOperation = StencilOp.Keep;
}
}

View File

@@ -0,0 +1,35 @@
%YAML 1.1
%TAG !u! tag:unity3d.com,2011:
--- !u!114 &11400000
MonoBehaviour:
m_ObjectHideFlags: 0
m_CorrespondingSourceObject: {fileID: 0}
m_PrefabInstance: {fileID: 0}
m_PrefabAsset: {fileID: 0}
m_GameObject: {fileID: 0}
m_Enabled: 1
m_EditorHideFlags: 0
m_Script: {fileID: 11500000, guid: 76e49a5b88430df478c504fe5a5c1a62, type: 3}
m_Name: UniversalRenderPipelineEditorResources
m_EditorClassIdentifier:
shaders:
autodeskInteractivePS: {fileID: 4800000, guid: 0e9d5a909a1f7e84882a534d0d11e49f,
type: 3}
autodeskInteractiveTransparentPS: {fileID: 4800000, guid: 5c81372d981403744adbdda4433c9c11,
type: 3}
autodeskInteractiveMaskedPS: {fileID: 4800000, guid: 80aa867ac363ac043847b06ad71604cd,
type: 3}
terrainDetailLitPS: {fileID: 4800000, guid: f6783ab646d374f94b199774402a5144,
type: 3}
terrainDetailGrassPS: {fileID: 4800000, guid: e507fdfead5ca47e8b9a768b51c291a1,
type: 3}
terrainDetailGrassBillboardPS: {fileID: 4800000, guid: 29868e73b638e48ca99a19ea58c48d90,
type: 3}
defaultSpeedTree7PS: {fileID: 4800000, guid: 0f4122b9a743b744abe2fb6a0a88868b,
type: 3}
defaultSpeedTree8PS: {fileID: 4800000, guid: 99134b1f0c27d54469a840832a28fadf,
type: 3}
materials:
lit: {fileID: 2100000, guid: 31321ba15b8f8eb4c954353edc038b1d, type: 2}
particleLit: {fileID: 2100000, guid: e823cd5b5d27c0f4b8256e7c12ee3e6d, type: 2}
terrainLit: {fileID: 2100000, guid: 594ea882c5a793440b60ff72d896021e, type: 2}

View File

@@ -0,0 +1,81 @@
using System;
using UnityEngine.Scripting.APIUpdating;
namespace UnityEngine.Rendering.LWRP
{
[Obsolete("LWRP -> Universal (UnityUpgradable) -> UnityEngine.Rendering.Universal.UniversalRenderPipelineEditorResources", true)]
public class LightweightRenderPipelineEditorResources
{
}
}
namespace UnityEngine.Rendering.Universal
{
[MovedFrom("UnityEngine.Rendering.LWRP")] public class UniversalRenderPipelineEditorResources : ScriptableObject
{
[Serializable, ReloadGroup]
public sealed class ShaderResources
{
[Reload("Shaders/Autodesk Interactive/Autodesk Interactive.shadergraph")]
public Shader autodeskInteractivePS;
[Reload("Shaders/Autodesk Interactive/Autodesk Interactive Transparent.shadergraph")]
public Shader autodeskInteractiveTransparentPS;
[Reload("Shaders/Autodesk Interactive/Autodesk Interactive Masked.shadergraph")]
public Shader autodeskInteractiveMaskedPS;
[Reload("Shaders/Terrain/TerrainDetailLit.shader")]
public Shader terrainDetailLitPS;
[Reload("Shaders/Terrain/WavingGrass.shader")]
public Shader terrainDetailGrassPS;
[Reload("Shaders/Terrain/WavingGrassBillboard.shader")]
public Shader terrainDetailGrassBillboardPS;
[Reload("Shaders/Nature/SpeedTree7.shader")]
public Shader defaultSpeedTree7PS;
[Reload("Shaders/Nature/SpeedTree8.shader")]
public Shader defaultSpeedTree8PS;
}
[Serializable, ReloadGroup]
public sealed class MaterialResources
{
[Reload("Runtime/Materials/Lit.mat")]
public Material lit;
[Reload("Runtime/Materials/ParticlesLit.mat")]
public Material particleLit;
[Reload("Runtime/Materials/TerrainLit.mat")]
public Material terrainLit;
}
public ShaderResources shaders;
public MaterialResources materials;
}
#if UNITY_EDITOR
[UnityEditor.CustomEditor(typeof(UniversalRenderPipelineEditorResources), true)]
class UniversalRenderPipelineEditorResourcesEditor : UnityEditor.Editor
{
public override void OnInspectorGUI()
{
DrawDefaultInspector();
// Add a "Reload All" button in inspector when we are in developer's mode
if (UnityEditor.EditorPrefs.GetBool("DeveloperMode") && GUILayout.Button("Reload All"))
{
var resources = target as UniversalRenderPipelineEditorResources;
resources.materials = null;
resources.shaders = null;
ResourceReloader.ReloadAllNullIn(target, UniversalRenderPipelineAsset.packagePath);
}
}
}
#endif
}

View File

@@ -0,0 +1,17 @@
%YAML 1.1
%TAG !u! tag:unity3d.com,2011:
--- !u!114 &11400000
MonoBehaviour:
m_ObjectHideFlags: 0
m_CorrespondingSourceObject: {fileID: 0}
m_PrefabInstance: {fileID: 0}
m_PrefabAsset: {fileID: 0}
m_GameObject: {fileID: 0}
m_Enabled: 1
m_EditorHideFlags: 0
m_Script: {fileID: 11500000, guid: 307a736764ebbce44b54d72f8467b6fd, type: 3}
m_Name: XRSystemData
m_EditorClassIdentifier:
shaders:
xrOcclusionMeshPS: {fileID: 4800000, guid: 4431b1f1f743fbf4eb310a967890cbea, type: 3}
xrMirrorViewPS: {fileID: 4800000, guid: d5a307c014552314b9f560906d708772, type: 3}

View File

@@ -0,0 +1,45 @@
#if UNITY_EDITOR
using UnityEditor;
using UnityEditor.ProjectWindowCallback;
#endif
using System;
namespace UnityEngine.Rendering.Universal
{
[Serializable]
public class XRSystemData : ScriptableObject
{
#if UNITY_EDITOR
[System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Performance", "CA1812")]
internal class CreateXRSystemDataAsset : EndNameEditAction
{
public override void Action(int instanceId, string pathName, string resourceFile)
{
var instance = CreateInstance<XRSystemData>();
AssetDatabase.CreateAsset(instance, pathName);
ResourceReloader.ReloadAllNullIn(instance, UniversalRenderPipelineAsset.packagePath);
Selection.activeObject = instance;
}
}
[MenuItem("Assets/Create/Rendering/Universal Render Pipeline/XR System Data", priority = CoreUtils.assetCreateMenuPriority3)]
static void CreateXRSystemData()
{
ProjectWindowUtil.StartNameEditingIfProjectWindowExists(0, CreateInstance<CreateXRSystemDataAsset>(), "CustomXRSystemData.asset", null, null);
}
#endif
[Serializable, ReloadGroup]
public sealed class ShaderResources
{
[Reload("Shaders/XR/XROcclusionMesh.shader")]
public Shader xrOcclusionMeshPS;
[Reload("Shaders/XR/XRMirrorView.shader")]
public Shader xrMirrorViewPS;
}
public ShaderResources shaders;
}
}

View File

@@ -0,0 +1,175 @@
using System;
using System.Runtime.InteropServices;
using Unity.Collections;
using UnityEngine.Rendering.Universal.Internal;
namespace UnityEngine.Rendering.Universal
{
class DeferredShaderData : IDisposable
{
static DeferredShaderData m_Instance = null;
struct ComputeBufferInfo
{
public uint frameUsed;
public ComputeBufferType type; // There is no interface to retrieve the type of a ComputeBuffer, so we must save it on our side
}
// Precomputed tiles (for each tiler).
NativeArray<PreTile>[] m_PreTiles = null;
// Structured buffers and constant buffers are all allocated from this array.
ComputeBuffer[] m_Buffers = null;
// We need to store extra info per ComputeBuffer.
ComputeBufferInfo[] m_BufferInfos;
// How many buffers have been created so far. This is <= than m_Buffers.Length.
int m_BufferCount = 0;
// Remember index of last buffer used. This optimizes the search for available buffer.
int m_CachedBufferIndex = 0;
// This counter is allowed to cycle back to 0.
uint m_FrameIndex = 0;
DeferredShaderData()
{
m_PreTiles = new NativeArray<PreTile>[DeferredConfig.kTilerDepth];
m_Buffers = new ComputeBuffer[64];
m_BufferInfos = new ComputeBufferInfo[64];
}
internal static DeferredShaderData instance
{
get
{
if (m_Instance == null)
m_Instance = new DeferredShaderData();
return m_Instance;
}
}
public void Dispose()
{
DisposeNativeArrays(ref m_PreTiles);
for (int i = 0; i < m_Buffers.Length; ++i)
{
if (m_Buffers[i] != null)
{
m_Buffers[i].Dispose();
m_Buffers[i] = null;
}
}
m_BufferCount = 0;
}
internal void ResetBuffers()
{
++m_FrameIndex; // Allowed to cycle back to 0.
}
internal NativeArray<PreTile> GetPreTiles(int level, int count)
{
return GetOrUpdateNativeArray<PreTile>(ref m_PreTiles, level, count);
}
internal ComputeBuffer ReserveBuffer<T>(int count, bool asCBuffer) where T : struct
{
int stride = Marshal.SizeOf<T>();
int paddedCount = asCBuffer ? Align(stride * count, 16) / stride : count;
return GetOrUpdateBuffer(paddedCount, stride, asCBuffer);
}
NativeArray<T> GetOrUpdateNativeArray<T>(ref NativeArray<T>[] nativeArrays, int level, int count) where T : struct
{
if (!nativeArrays[level].IsCreated)
{
nativeArrays[level] = new NativeArray<T>(count, Allocator.Persistent);
}
else if (count > nativeArrays[level].Length)
{
nativeArrays[level].Dispose();
nativeArrays[level] = new NativeArray<T>(count, Allocator.Persistent);
}
return nativeArrays[level];
}
void DisposeNativeArrays<T>(ref NativeArray<T>[] nativeArrays) where T : struct
{
for (int i = 0; i < nativeArrays.Length; ++i)
{
if (nativeArrays[i].IsCreated)
nativeArrays[i].Dispose();
}
}
ComputeBuffer GetOrUpdateBuffer(int count, int stride, bool isConstantBuffer)
{
ComputeBufferType type = isConstantBuffer ? ComputeBufferType.Constant : ComputeBufferType.Structured;
#if UNITY_SWITCH // maxQueuedFrames returns -1 on Switch!
int maxQueuedFrames = 3;
#else
int maxQueuedFrames = QualitySettings.maxQueuedFrames;
Assertions.Assert.IsTrue(maxQueuedFrames >= 1, "invalid QualitySettings.maxQueuedFrames");
#endif
for (int i = 0; i < m_BufferCount; ++i)
{
int bufferIndex = (m_CachedBufferIndex + i + 1) % m_BufferCount;
if (IsLessCircular(m_BufferInfos[bufferIndex].frameUsed + (uint)maxQueuedFrames, m_FrameIndex)
&& m_BufferInfos[bufferIndex].type == type && m_Buffers[bufferIndex].count == count && m_Buffers[bufferIndex].stride == stride)
{
m_BufferInfos[bufferIndex].frameUsed = m_FrameIndex;
m_CachedBufferIndex = bufferIndex;
return m_Buffers[bufferIndex];
}
}
if (m_BufferCount == m_Buffers.Length) // If all buffers used: allocate more space.
{
ComputeBuffer[] newBuffers = new ComputeBuffer[m_BufferCount * 2];
for (int i = 0; i < m_BufferCount; ++i)
newBuffers[i] = m_Buffers[i];
m_Buffers = newBuffers;
ComputeBufferInfo[] newBufferInfos = new ComputeBufferInfo[m_BufferCount * 2];
for (int i = 0; i < m_BufferCount; ++i)
newBufferInfos[i] = m_BufferInfos[i];
m_BufferInfos = newBufferInfos;
}
// Create new buffer.
m_Buffers[m_BufferCount] = new ComputeBuffer(count, stride, type, ComputeBufferMode.Immutable);
m_BufferInfos[m_BufferCount].frameUsed = m_FrameIndex;
m_BufferInfos[m_BufferCount].type = type;
m_CachedBufferIndex = m_BufferCount;
return m_Buffers[m_BufferCount++];
}
void DisposeBuffers(ComputeBuffer[,] buffers)
{
for (int i = 0; i < buffers.GetLength(0); ++i)
{
for (int j = 0; j < buffers.GetLength(1); ++j)
{
if (buffers[i, j] != null)
{
buffers[i, j].Dispose();
buffers[i, j] = null;
}
}
}
}
static bool IsLessCircular(uint a, uint b)
{
return a != b ? (b - a) < 0x80000000 : false;
}
static int Align(int s, int alignment)
{
return ((s + alignment - 1) / alignment) * alignment;
}
}
}

View File

@@ -0,0 +1,684 @@
using Unity.Collections;
using Unity.Collections.LowLevel.Unsafe;
using System.Runtime.CompilerServices;
using Unity.Mathematics;
using static Unity.Mathematics.math;
namespace UnityEngine.Rendering.Universal.Internal
{
// This structure is designed to be Burst friendly.
// It can be copied by value.
internal struct DeferredTiler
{
// Precomputed light data
internal struct PrePunctualLight
{
// view-space position.
public float3 posVS;
// Radius in world unit.
public float radius;
// Distance between closest bound of the light and the camera. Used for sorting lights front-to-back.
public float minDist;
// Projected position of the sphere centre on the screen (near plane).
public float2 screenPos;
// Index into renderingData.lightData.visibleLights native array.
public ushort visLightIndex;
}
enum ClipResult
{
Unknown,
In,
Out,
}
int m_TilePixelWidth;
int m_TilePixelHeight;
int m_TileXCount;
int m_TileYCount;
// Fixed header size in uint in m_TileHeader.
// Only finest tiler requires to store extra per-tile information (light list depth range, bitmask for 2.5D culling).
int m_TileHeaderSize;
// Indicative average lights per tile. Only used when initializing the size of m_DataTile for the first time.
int m_AvgLightPerTile;
// 0, 1 or 2 (see DeferredConfig.kTilerDepth)
int m_TilerLevel;
// Camera frustum planes, adjusted to account for tile size.
FrustumPlanes m_FrustumPlanes;
// Are we dealing with an orthographic projection.
bool m_IsOrthographic;
// Atomic counters are put in a NativeArray so they can be accessed/shared from jobs.
// [0] maxLightPerTile: Only valid for finest tiler: max light counter per tile. Reset every frame.
// [1] tileDataSize: reset every frame.
// [2] tileDataCapacity: extra amount of memory required by each tiler (depends on number of lights visible). Externally maintained.
[Unity.Collections.LowLevel.Unsafe.NativeDisableContainerSafetyRestriction]
NativeArray<int> m_Counters;
// Store all visible light indices for all tiles.
// (currently) Contains sequential blocks of ushort values (light indices and optionally lightDepthRange), for each tile
// For example for platforms using 16x16px tiles:
// in a finest tiler DeferredLights.m_Tilers[0] ( 16x16px tiles), each tile will use a block of 1 * 1 * 32 = 32 ushort values
// in an intermediate tiler DeferredLights.m_Tilers[1] ( 64x64px tiles), each tile will use a block of 4 * 4 * 32 = 512 ushort values
// in a coarsest tiler DeferredLights.m_Tilers[2] (256x256px tiles), each tile will use a block of 16 * 16 * 32 = 8192 ushort values
[Unity.Collections.LowLevel.Unsafe.NativeDisableContainerSafetyRestriction]
NativeArray<ushort> m_TileData;
// Store tile header (fixed size per tile)
// light offset, light count, optionally additional per-tile "header" values.
[Unity.Collections.LowLevel.Unsafe.NativeDisableContainerSafetyRestriction]
NativeArray<uint> m_TileHeaders;
// Precompute tile data.
[Unity.Collections.LowLevel.Unsafe.NativeDisableContainerSafetyRestriction]
NativeArray<PreTile> m_PreTiles;
public DeferredTiler(int tilePixelWidth, int tilePixelHeight, int avgLightPerTile, int tilerLevel)
{
m_TilePixelWidth = tilePixelWidth;
m_TilePixelHeight = tilePixelHeight;
m_TileXCount = 0;
m_TileYCount = 0;
// Finest tiler (at index 0) computes extra tile data stored into the header, so it requires more space. See CullFinalLights() vs CullIntermediateLights().
// Finest tiler: lightListOffset, lightCount, listDepthRange, listBitMask
// Coarse tilers: lightListOffset, lightCount
m_TileHeaderSize = tilerLevel == 0 ? 4 : 2;
m_AvgLightPerTile = avgLightPerTile;
m_TilerLevel = tilerLevel;
m_FrustumPlanes = new FrustumPlanes { left = 0, right = 0, bottom = 0, top = 0, zNear = 0, zFar = 0 };
m_IsOrthographic = false;
m_Counters = new NativeArray<int>();
m_TileData = new NativeArray<ushort>();
m_TileHeaders = new NativeArray<uint>();
m_PreTiles = new NativeArray<PreTile>();
}
public int TilerLevel
{
get { return m_TilerLevel; }
}
public int TileXCount
{
get { return m_TileXCount; }
}
public int TileYCount
{
get { return m_TileYCount; }
}
public int TilePixelWidth
{
get { return m_TilePixelWidth; }
}
public int TilePixelHeight
{
get { return m_TilePixelHeight; }
}
public int TileHeaderSize
{
get { return m_TileHeaderSize; }
}
public int MaxLightPerTile
{
get { return m_Counters.IsCreated ? m_Counters[0] : 0; }
}
public int TileDataCapacity
{
get { return m_Counters.IsCreated ? m_Counters[2] : 0; }
}
public NativeArray<ushort> Tiles
{
get { return m_TileData; }
}
public NativeArray<uint> TileHeaders
{
get { return m_TileHeaders; }
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public void GetTileOffsetAndCount(int i, int j, out int offset, out int count)
{
int headerOffset = GetTileHeaderOffset(i, j);
offset = (int)m_TileHeaders[headerOffset + 0];
count = (int)m_TileHeaders[headerOffset + 1];
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public int GetTileHeaderOffset(int i, int j)
{
return (i + j * m_TileXCount) * m_TileHeaderSize;
}
public void Setup(int tileDataCapacity)
{
if (tileDataCapacity <= 0)
tileDataCapacity = m_TileXCount * m_TileYCount * m_AvgLightPerTile;
m_Counters = new NativeArray<int>(3, Allocator.Temp, NativeArrayOptions.UninitializedMemory);
m_TileData = new NativeArray<ushort>(tileDataCapacity, Allocator.Temp, NativeArrayOptions.UninitializedMemory);
m_TileHeaders = new NativeArray<uint>(m_TileXCount * m_TileYCount * m_TileHeaderSize, Allocator.Temp, NativeArrayOptions.UninitializedMemory);
m_Counters[0] = 0;
m_Counters[1] = 0;
m_Counters[2] = tileDataCapacity;
}
public void OnCameraCleanup()
{
if (m_TileHeaders.IsCreated)
m_TileHeaders.Dispose();
if (m_TileData.IsCreated)
m_TileData.Dispose();
if (m_Counters.IsCreated)
m_Counters.Dispose();
}
public void PrecomputeTiles(Matrix4x4 proj, bool isOrthographic, int renderWidth, int renderHeight)
{
m_TileXCount = (renderWidth + m_TilePixelWidth - 1) / m_TilePixelWidth;
m_TileYCount = (renderHeight + m_TilePixelHeight - 1) / m_TilePixelHeight;
m_PreTiles = DeferredShaderData.instance.GetPreTiles(m_TilerLevel, m_TileXCount * m_TileYCount);
// Adjust render width and height to account for tile size expanding over the screen (tiles have a fixed pixel size).
int adjustedRenderWidth = Align(renderWidth, m_TilePixelWidth);
int adjustedRenderHeight = Align(renderHeight, m_TilePixelHeight);
// Now adjust the right and bottom clipping planes.
m_FrustumPlanes = proj.decomposeProjection;
m_FrustumPlanes.right = m_FrustumPlanes.left + (m_FrustumPlanes.right - m_FrustumPlanes.left) * (adjustedRenderWidth / (float)renderWidth);
m_FrustumPlanes.bottom = m_FrustumPlanes.top + (m_FrustumPlanes.bottom - m_FrustumPlanes.top) * (adjustedRenderHeight / (float)renderHeight);
m_IsOrthographic = isOrthographic;
// Tile size in world units.
float tileWidthWS = (m_FrustumPlanes.right - m_FrustumPlanes.left) / m_TileXCount;
float tileHeightWS = (m_FrustumPlanes.top - m_FrustumPlanes.bottom) / m_TileYCount;
if (!isOrthographic) // perspective
{
for (int j = 0; j < m_TileYCount; ++j)
{
float tileTop = m_FrustumPlanes.top - tileHeightWS * j;
float tileBottom = tileTop - tileHeightWS;
for (int i = 0; i < m_TileXCount; ++i)
{
float tileLeft = m_FrustumPlanes.left + tileWidthWS * i;
float tileRight = tileLeft + tileWidthWS;
// Camera view space is always OpenGL RH coordinates system.
// In view space with perspective projection, all planes pass by (0,0,0).
PreTile preTile;
preTile.planeLeft = MakePlane(new float3(tileLeft, tileBottom, -m_FrustumPlanes.zNear), new float3(tileLeft, tileTop, -m_FrustumPlanes.zNear));
preTile.planeRight = MakePlane(new float3(tileRight, tileTop, -m_FrustumPlanes.zNear), new float3(tileRight, tileBottom, -m_FrustumPlanes.zNear));
preTile.planeBottom = MakePlane(new float3(tileRight, tileBottom, -m_FrustumPlanes.zNear), new float3(tileLeft, tileBottom, -m_FrustumPlanes.zNear));
preTile.planeTop = MakePlane(new float3(tileLeft, tileTop, -m_FrustumPlanes.zNear), new float3(tileRight, tileTop, -m_FrustumPlanes.zNear));
m_PreTiles[i + j * m_TileXCount] = preTile;
}
}
}
else
{
for (int j = 0; j < m_TileYCount; ++j)
{
float tileTop = m_FrustumPlanes.top - tileHeightWS * j;
float tileBottom = tileTop - tileHeightWS;
for (int i = 0; i < m_TileXCount; ++i)
{
float tileLeft = m_FrustumPlanes.left + tileWidthWS * i;
float tileRight = tileLeft + tileWidthWS;
// Camera view space is always OpenGL RH coordinates system.
PreTile preTile;
preTile.planeLeft = MakePlane(new float3(tileLeft, tileBottom, -m_FrustumPlanes.zNear), new float3(tileLeft, tileBottom, -m_FrustumPlanes.zNear - 1.0f), new float3(tileLeft, tileTop, -m_FrustumPlanes.zNear));
preTile.planeRight = MakePlane(new float3(tileRight, tileTop, -m_FrustumPlanes.zNear), new float3(tileRight, tileTop, -m_FrustumPlanes.zNear - 1.0f), new float3(tileRight, tileBottom, -m_FrustumPlanes.zNear));
preTile.planeBottom = MakePlane(new float3(tileRight, tileBottom, -m_FrustumPlanes.zNear), new float3(tileRight, tileBottom, -m_FrustumPlanes.zNear - 1.0f), new float3(tileLeft, tileBottom, -m_FrustumPlanes.zNear));
preTile.planeTop = MakePlane(new float3(tileLeft, tileTop, -m_FrustumPlanes.zNear), new float3(tileLeft, tileTop, -m_FrustumPlanes.zNear - 1.0f), new float3(tileRight, tileTop, -m_FrustumPlanes.zNear));
m_PreTiles[i + j * m_TileXCount] = preTile;
}
}
}
}
// This differs from CullIntermediateLights in 3 ways:
// - tile-frustums/light intersection use different algorithm
// - depth range of the light shape intersecting the tile-frustums is output in the tile list header section
// - light indices written out are indexing visible_lights, rather than the array of PrePunctualLights.
unsafe public void CullFinalLights(ref NativeArray<PrePunctualLight> punctualLights,
ref NativeArray<ushort> lightIndices, int lightStartIndex, int lightCount,
int istart, int iend, int jstart, int jend)
{
// Interestingly, 2-3% faster when using unsafe arrays.
PrePunctualLight* _punctualLights = (PrePunctualLight*)NativeArrayUnsafeUtility.GetUnsafeBufferPointerWithoutChecks(punctualLights);
ushort* _lightIndices = (ushort*)NativeArrayUnsafeUtility.GetUnsafeBufferPointerWithoutChecks(lightIndices);
uint* _tileHeaders = (uint*)NativeArrayUnsafeUtility.GetUnsafeBufferPointerWithoutChecks(m_TileHeaders);
if (lightCount == 0)
{
for (int j = jstart; j < jend; ++j)
for (int i = istart; i < iend; ++i)
{
int headerOffset = GetTileHeaderOffset(i, j);
_tileHeaders[headerOffset + 0] = 0;
_tileHeaders[headerOffset + 1] = 0;
_tileHeaders[headerOffset + 2] = 0;
_tileHeaders[headerOffset + 3] = 0;
}
return;
}
// Store culled lights in temporary buffer. Additionally store depth range of each light for a given tile too.
// the depth range is a 32bit mask, but packed into a 16bits value since the range of the light is continuous
// (only need to store first bit enabled, and count of enabled bits).
ushort* tiles = stackalloc ushort[lightCount * 2];
float2* depthRanges = stackalloc float2[lightCount];
int maxLightPerTile = 0; // for stats
int lightEndIndex = lightStartIndex + lightCount;
float2 tileSize = new float2((m_FrustumPlanes.right - m_FrustumPlanes.left) / m_TileXCount, (m_FrustumPlanes.top - m_FrustumPlanes.bottom) / m_TileYCount);
float2 tileExtents = tileSize * 0.5f;
float2 tileExtentsInv = new float2(1.0f / tileExtents.x, 1.0f / tileExtents.y);
for (int j = jstart; j < jend; ++j)
{
float tileYCentre = m_FrustumPlanes.top - (tileExtents.y + j * tileSize.y);
for (int i = istart; i < iend; ++i)
{
float tileXCentre = m_FrustumPlanes.left + tileExtents.x + i * tileSize.x;
PreTile preTile = m_PreTiles[i + j * m_TileXCount];
int culledLightCount = 0;
// For the current tile's light list, min&max depth range (absolute values).
float listMinDepth = float.MaxValue;
float listMaxDepth = -float.MaxValue;
// Duplicate the inner loop twice. Testing for the ortographic case inside the inner loop would cost an extra 8% otherwise.
// Missing C++ template argument here!
if (!m_IsOrthographic)
{
for (int vi = lightStartIndex; vi < lightEndIndex; ++vi)
{
ushort lightIndex = _lightIndices[vi];
PrePunctualLight ppl = _punctualLights[lightIndex];
// Offset tileCentre toward the light to calculate a more conservative minMax depth bound,
// but it must remains inside the tile and must not pass further than the light centre.
float2 tileCentre = new float2(tileXCentre, tileYCentre);
float2 dir = ppl.screenPos - tileCentre;
float2 d = abs(dir * tileExtentsInv);
float sInv = 1.0f / max3(d.x, d.y, 1.0f);
float3 tileOffCentre = new float3(tileCentre.x + dir.x * sInv, tileCentre.y + dir.y * sInv, -m_FrustumPlanes.zNear);
float3 tileOrigin = new float3(0.0f);
float t0, t1;
// This is more expensive than Clip() but allow to compute min&max depth range for the part of the light inside the tile.
if (!IntersectionLineSphere(ppl.posVS, ppl.radius, tileOrigin, tileOffCentre, out t0, out t1))
continue;
listMinDepth = listMinDepth < t0 ? listMinDepth : t0;
listMaxDepth = listMaxDepth > t1 ? listMaxDepth : t1;
depthRanges[culledLightCount] = new float2(t0, t1);
// Because this always output to the finest tiles, contrary to CullLights(),
// the result are indices into visibleLights, instead of indices into punctualLights.
tiles[culledLightCount] = ppl.visLightIndex;
++culledLightCount;
}
}
else
{
for (int vi = lightStartIndex; vi < lightEndIndex; ++vi)
{
ushort lightIndex = _lightIndices[vi];
PrePunctualLight ppl = _punctualLights[lightIndex];
// Offset tileCentre toward the light to calculate a more conservative minMax depth bound,
// but it must remains inside the tile and must not pass further than the light centre.
float2 tileCentre = new float2(tileXCentre, tileYCentre);
float2 dir = ppl.screenPos - tileCentre;
float2 d = abs(dir * tileExtentsInv);
float sInv = 1.0f / max3(d.x, d.y, 1.0f);
float3 tileOffCentre = new float3(0, 0, -m_FrustumPlanes.zNear);
float3 tileOrigin = new float3(tileCentre.x + dir.x * sInv, tileCentre.y + dir.y * sInv, 0.0f);
float t0, t1;
// This is more expensive than Clip() but allow to compute min&max depth range for the part of the light inside the tile.
if (!IntersectionLineSphere(ppl.posVS, ppl.radius, tileOrigin, tileOffCentre, out t0, out t1))
continue;
listMinDepth = listMinDepth < t0 ? listMinDepth : t0;
listMaxDepth = listMaxDepth > t1 ? listMaxDepth : t1;
depthRanges[culledLightCount] = new float2(t0, t1);
// Because this always output to the finest tiles, contrary to CullLights(),
// the result are indices into visibleLights, instead of indices into punctualLights.
tiles[culledLightCount] = ppl.visLightIndex;
++culledLightCount;
}
}
// Post-multiply by zNear to get actual world unit absolute depth values, then clamp to valid depth range.
listMinDepth = max2(listMinDepth * m_FrustumPlanes.zNear, m_FrustumPlanes.zNear);
listMaxDepth = min2(listMaxDepth * m_FrustumPlanes.zNear, m_FrustumPlanes.zFar);
// Calculate bitmask for 2.5D culling.
uint bitMask = 0;
float depthRangeInv = 1.0f / (listMaxDepth - listMinDepth);
for (int culledLightIndex = 0; culledLightIndex < culledLightCount; ++culledLightIndex)
{
float lightMinDepth = max2(depthRanges[culledLightIndex].x * m_FrustumPlanes.zNear, m_FrustumPlanes.zNear);
float lightMaxDepth = min2(depthRanges[culledLightIndex].y * m_FrustumPlanes.zNear, m_FrustumPlanes.zFar);
int firstBit = (int)((lightMinDepth - listMinDepth) * 32.0f * depthRangeInv);
int lastBit = (int)((lightMaxDepth - listMinDepth) * 32.0f * depthRangeInv);
int bitCount = min(lastBit - firstBit + 1, 32 - firstBit);
bitMask |= (uint)((0xFFFFFFFF >> (32 - bitCount)) << firstBit);
tiles[culledLightCount + culledLightIndex] = (ushort)((uint)firstBit | (uint)(bitCount << 8));
}
// As listMinDepth and listMaxDepth are used to calculate the geometry 2.5D bitmask,
// we can optimize the shader execution (TileDepthInfo.shader) by refactoring the calculation.
// int bitIndex = 32.0h * (geoDepth - listMinDepth) / (listMaxDepth - listMinDepth);
// Equivalent to:
// a = 32.0 / (listMaxDepth - listMinDepth)
// b = -listMinDepth * 32.0 / (listMaxDepth - listMinDepth)
// int bitIndex = geoDepth * a + b;
float a = 32.0f * depthRangeInv;
float b = -listMinDepth * a;
int tileDataSize = culledLightCount * 2;
int tileOffset = culledLightCount > 0 ? AddTileData(tiles, ref tileDataSize) : 0;
int headerOffset = GetTileHeaderOffset(i, j);
_tileHeaders[headerOffset + 0] = (uint)tileOffset;
_tileHeaders[headerOffset + 1] = (uint)(tileDataSize == 0 ? 0 : culledLightCount);
_tileHeaders[headerOffset + 2] = _f32tof16(a) | (_f32tof16(b) << 16);
_tileHeaders[headerOffset + 3] = bitMask;
maxLightPerTile = max(maxLightPerTile, culledLightCount);
}
}
m_Counters[0] = max(m_Counters[0], maxLightPerTile); // TODO make it atomic
}
// TODO: finer culling for spot lights
unsafe public void CullIntermediateLights(ref NativeArray<PrePunctualLight> punctualLights,
ref NativeArray<ushort> lightIndices, int lightStartIndex, int lightCount,
int istart, int iend, int jstart, int jend)
{
// Interestingly, 2-3% faster when using unsafe arrays.
PrePunctualLight* _punctualLights = (PrePunctualLight*)NativeArrayUnsafeUtility.GetUnsafeBufferPointerWithoutChecks(punctualLights);
ushort* _lightIndices = (ushort*)NativeArrayUnsafeUtility.GetUnsafeBufferPointerWithoutChecks(lightIndices);
uint* _tileHeaders = (uint*)NativeArrayUnsafeUtility.GetUnsafeBufferPointerWithoutChecks(m_TileHeaders);
if (lightCount == 0)
{
for (int j = jstart; j < jend; ++j)
for (int i = istart; i < iend; ++i)
{
int headerOffset = GetTileHeaderOffset(i, j);
_tileHeaders[headerOffset + 0] = 0;
_tileHeaders[headerOffset + 1] = 0;
}
return;
}
// Store culled result in temporary buffer.
ushort* tiles = stackalloc ushort[lightCount];
int lightEndIndex = lightStartIndex + lightCount;
for (int j = jstart; j < jend; ++j)
{
for (int i = istart; i < iend; ++i)
{
PreTile preTile = m_PreTiles[i + j * m_TileXCount];
int culledLightCount = 0;
for (int vi = lightStartIndex; vi < lightEndIndex; ++vi)
{
ushort lightIndex = _lightIndices[vi];
PrePunctualLight ppl = _punctualLights[lightIndex];
// This is slightly faster than IntersectionLineSphere().
if (!Clip(ref preTile, ppl.posVS, ppl.radius))
continue;
tiles[culledLightCount] = lightIndex;
++culledLightCount;
}
// Copy the culled light list.
int tileOffset = culledLightCount > 0 ? AddTileData(tiles, ref culledLightCount) : 0;
int headerOffset = GetTileHeaderOffset(i, j);
_tileHeaders[headerOffset + 0] = (uint)tileOffset;
_tileHeaders[headerOffset + 1] = (uint)culledLightCount;
}
}
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
unsafe int AddTileData(ushort* lightData, ref int size)
{
int* _Counters = (int*)m_Counters.GetUnsafePtr();
int tileDataSize = System.Threading.Interlocked.Add(ref _Counters[1], size);
int offset = tileDataSize - size;
if (tileDataSize <= m_TileData.Length)
{
ushort* _TileData = (ushort*)m_TileData.GetUnsafePtr();
UnsafeUtility.MemCpy(_TileData + offset, lightData, size * 2);
return offset;
}
else
{
// Buffer overflow. Ignore data to add.
// Gracefully increasing the buffer size is possible but costs extra CPU time (see commented code below) due to the needed critical section.
m_Counters[2] = max(m_Counters[2], tileDataSize); // use an atomic max instead?
size = 0;
return 0;
}
/*
lock (this)
{
int offset = m_TileDataSize;
m_TileDataSize += size;
ushort* _TileData = (ushort*)m_TileData.GetUnsafePtr();
if (m_TileDataSize > m_TileDataCapacity)
{
m_TileDataCapacity = max(m_TileDataSize, m_TileDataCapacity * 2);
NativeArray<ushort> newTileData = new NativeArray<ushort>(m_TileDataCapacity, Allocator.Temp, NativeArrayOptions.UninitializedMemory);
ushort* _newTileData = (ushort*)newTileData.GetUnsafePtr();
UnsafeUtility.MemCpy(_newTileData, _TileData, offset * 2);
m_TileData.Dispose();
m_TileData = newTileData;
_TileData = _newTileData;
}
UnsafeUtility.MemCpy(_TileData + offset, lightData, size * 2);
return offset;
}
*/
}
// Return parametric intersection between a sphere and a line.
// The intersections points P0 and P1 are:
// P0 = raySource + rayDirection * t0.
// P1 = raySource + rayDirection * t1.
[MethodImpl(MethodImplOptions.AggressiveInlining)]
unsafe static bool IntersectionLineSphere(float3 centre, float radius, float3 raySource, float3 rayDirection, out float t0, out float t1)
{
float A = dot(rayDirection, rayDirection); // always >= 0
float B = dot(raySource - centre, rayDirection);
float C = dot(raySource, raySource)
+ dot(centre, centre)
- (radius * radius)
- 2 * dot(raySource, centre);
float discriminant = (B * B) - A * C;
if (discriminant > 0)
{
float sqrt_discriminant = sqrt(discriminant);
float A_inv = 1.0f / A;
t0 = (-B - sqrt_discriminant) * A_inv;
t1 = (-B + sqrt_discriminant) * A_inv;
return true;
}
else
{
t0 = 0.0f; // invalid
t1 = 0.0f; // invalid
return false;
}
}
// Clip a sphere against a 2D tile. Near and far planes are ignored (already tested).
[MethodImpl(MethodImplOptions.AggressiveInlining)]
static bool Clip(ref PreTile tile, float3 posVS, float radius)
{
// Simplified clipping code, only deals with 4 clipping planes.
// zNear and zFar clipping planes are ignored as presumably the light is already visible to the camera frustum.
float radiusSq = radius * radius;
int insideCount = 0;
ClipResult res;
res = ClipPartial(tile.planeLeft, tile.planeBottom, tile.planeTop, posVS, radius, radiusSq, ref insideCount);
if (res != ClipResult.Unknown)
return res == ClipResult.In;
res = ClipPartial(tile.planeRight, tile.planeBottom, tile.planeTop, posVS, radius, radiusSq, ref insideCount);
if (res != ClipResult.Unknown)
return res == ClipResult.In;
res = ClipPartial(tile.planeTop, tile.planeLeft, tile.planeRight, posVS, radius, radiusSq, ref insideCount);
if (res != ClipResult.Unknown)
return res == ClipResult.In;
res = ClipPartial(tile.planeBottom, tile.planeLeft, tile.planeRight, posVS, radius, radiusSq, ref insideCount);
if (res != ClipResult.Unknown)
return res == ClipResult.In;
return insideCount == 4;
}
// Internal function to clip against 1 plane of a cube, with additional 2 side planes for false-positive detection (normally 4 planes, but near and far planes are ignored).
[MethodImpl(MethodImplOptions.AggressiveInlining)]
static ClipResult ClipPartial(float4 plane, float4 sidePlaneA, float4 sidePlaneB, float3 posVS, float radius, float radiusSq, ref int insideCount)
{
float d = DistanceToPlane(plane, posVS);
if (d + radius <= 0.0f) // completely outside
return ClipResult.Out;
else if (d < 0.0f) // intersection: further check: only need to consider case where more than half the sphere is outside
{
float3 p = posVS - plane.xyz * d;
float rSq = radiusSq - d * d;
if (SignedSq(DistanceToPlane(sidePlaneA, p)) >= -rSq
&& SignedSq(DistanceToPlane(sidePlaneB, p)) >= -rSq)
return ClipResult.In;
}
else // consider as good as completely inside
++insideCount;
return ClipResult.Unknown;
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
static float4 MakePlane(float3 pb, float3 pc)
{
float3 v0 = pb;
float3 v1 = pc;
float3 n = cross(v0, v1);
n = normalize(n);
// The planes pass all by the origin.
return new float4(n.x, n.y, n.z, 0.0f);
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
static float4 MakePlane(float3 pa, float3 pb, float3 pc)
{
float3 v0 = pb - pa;
float3 v1 = pc - pa;
float3 n = cross(v0, v1);
n = normalize(n);
return new float4(n.x, n.y, n.z, -dot(n, pa));
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
static float DistanceToPlane(float4 plane, float3 p)
{
return plane.x * p.x + plane.y * p.y + plane.z * p.z + plane.w;
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
static float SignedSq(float f)
{
// slower!
//return Mathf.Sign(f) * (f * f);
return (f < 0.0f ? -1.0f : 1.0f) * (f * f);
}
// Unity.Mathematics.max() function calls Single_IsNan() which significantly slow down the code (up to 20% of CullFinalLights())!
[MethodImpl(MethodImplOptions.AggressiveInlining)]
static float min2(float a, float b)
{
return a < b ? a : b;
}
// Unity.Mathematics.min() function calls Single_IsNan() which significantly slow down the code (up to 20% of CullFinalLights())!
[MethodImpl(MethodImplOptions.AggressiveInlining)]
static float max2(float a, float b)
{
return a > b ? a : b;
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
static float max3(float a, float b, float c)
{
return a > b ? (a > c ? a : c) : (b > c ? b : c);
}
// This is copy-pasted from Unity.Mathematics.math.f32tof16(), but use min2() function that does not check for NaN (which would consume 10% of the execution time of CullFinalLights()).
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public static uint _f32tof16(float x)
{
const int infinity_32 = 255 << 23;
const uint msk = 0x7FFFF000u;
uint ux = asuint(x);
uint uux = ux & msk;
uint h = (uint)(asuint(min2(asfloat(uux) * 1.92592994e-34f, 260042752.0f)) + 0x1000) >> 13; // Clamp to signed infinity if overflowed
h = select(h, select(0x7c00u, 0x7e00u, (int)uux > infinity_32), (int)uux >= infinity_32); // NaN->qNaN and Inf->Inf
return h | (ux & ~msk) >> 16;
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
static int Align(int s, int alignment)
{
return ((s + alignment - 1) / alignment) * alignment;
}
}
}

View File

@@ -0,0 +1,86 @@
// This file should be used as a container for things on its
// way to being deprecated and removed in future releases
using System;
using System.ComponentModel;
using UnityEngine.Scripting.APIUpdating;
namespace UnityEngine.Rendering.Universal
{
public abstract partial class ScriptableRenderPass
{
// This callback method will be removed. Please use OnCameraCleanup() instead.
[EditorBrowsable(EditorBrowsableState.Never)]
public virtual void FrameCleanup(CommandBuffer cmd) => OnCameraCleanup(cmd);
}
namespace Internal
{
public partial class AdditionalLightsShadowCasterPass
{
[Obsolete("AdditionalLightsShadowCasterPass.m_AdditionalShadowsBufferId was deprecated. Shadow slice matrix is now passed to the GPU using an entry in buffer m_AdditionalLightsWorldToShadow_SSBO", false)]
public static int m_AdditionalShadowsBufferId;
[Obsolete("AdditionalLightsShadowCasterPass.m_AdditionalShadowsIndicesId was deprecated. Shadow slice index is now passed to the GPU using last member of an entry in buffer m_AdditionalShadowParams_SSBO", false)]
public static int m_AdditionalShadowsIndicesId;
}
}
[Obsolete("This is obsolete, please use shadowCascadeCount instead.", false)]
[MovedFrom("UnityEngine.Rendering.LWRP")] public enum ShadowCascadesOption
{
NoCascades,
TwoCascades,
FourCascades,
}
public partial class UniversalRenderPipelineAsset
{
#pragma warning disable 618 // Obsolete warning
[Obsolete("This is obsolete, please use shadowCascadeCount instead.", false)]
[SerializeField] ShadowCascadesOption m_ShadowCascades = ShadowCascadesOption.NoCascades;
[Obsolete("This is obsolete, please use shadowCascadeCount instead.", false)]
public ShadowCascadesOption shadowCascadeOption
{
get
{
switch (shadowCascadeCount)
{
case 1: return ShadowCascadesOption.NoCascades;
case 2: return ShadowCascadesOption.TwoCascades;
case 4: return ShadowCascadesOption.FourCascades;
default: throw new InvalidOperationException("Cascade count is not compatible with obsolete API, please use shadowCascadeCount instead.");
}
;
}
set
{
switch (value)
{
case ShadowCascadesOption.NoCascades:
shadowCascadeCount = 1;
break;
case ShadowCascadesOption.TwoCascades:
shadowCascadeCount = 2;
break;
case ShadowCascadesOption.FourCascades:
shadowCascadeCount = 4;
break;
default:
throw new InvalidOperationException("Cascade count is not compatible with obsolete API, please use shadowCascadeCount instead.");
}
}
}
#pragma warning restore 618 // Obsolete warning
}
[MovedFrom("UnityEngine.Rendering.LWRP")]
public abstract partial class ScriptableRenderer
{
// Deprecated in 10.x
[Obsolete("cameraDepth has been renamed to cameraDepthTarget. (UnityUpgradable) -> cameraDepthTarget")]
[EditorBrowsable(EditorBrowsableState.Never)]
public RenderTargetIdentifier cameraDepth
{
get => m_CameraDepthTarget;
}
}
}

View File

@@ -0,0 +1,111 @@
/*
** SGI FREE SOFTWARE LICENSE B (Version 2.0, Sept. 18, 2008)
** Copyright (C) 2011 Silicon Graphics, Inc.
** All Rights Reserved.
**
** Permission is hereby granted, free of charge, to any person obtaining a copy
** of this software and associated documentation files (the "Software"), to deal
** in the Software without restriction, including without limitation the rights
** to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
** of the Software, and to permit persons to whom the Software is furnished to do so,
** subject to the following conditions:
**
** The above copyright notice including the dates of first publication and either this
** permission notice or a reference to http://oss.sgi.com/projects/FreeB/ shall be
** included in all copies or substantial portions of the Software.
**
** THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
** INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
** PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL SILICON GRAPHICS, INC.
** BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
** TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE
** OR OTHER DEALINGS IN THE SOFTWARE.
**
** Except as contained in this notice, the name of Silicon Graphics, Inc. shall not
** be used in advertising or otherwise to promote the sale, use or other dealings in
** this Software without prior written authorization from Silicon Graphics, Inc.
*/
/*
** Original Author: Eric Veach, July 1994.
** libtess2: Mikko Mononen, http://code.google.com/p/libtess2/.
** LibTessDotNet: Remi Gillig, https://github.com/speps/LibTessDotNet
*/
using UnityEngine.Scripting.APIUpdating;
namespace UnityEngine.Experimental.Rendering.Universal
{
namespace LibTessDotNet
{
internal class Dict<TValue> where TValue : class
{
public class Node
{
internal TValue _key;
internal Node _prev, _next;
public TValue Key { get { return _key; } }
public Node Prev { get { return _prev; } }
public Node Next { get { return _next; } }
}
public delegate bool LessOrEqual(TValue lhs, TValue rhs);
private LessOrEqual _leq;
Node _head;
public Dict(LessOrEqual leq)
{
_leq = leq;
_head = new Node { _key = null };
_head._prev = _head;
_head._next = _head;
}
public Node Insert(TValue key)
{
return InsertBefore(_head, key);
}
public Node InsertBefore(Node node, TValue key)
{
do
{
node = node._prev;
}
while (node._key != null && !_leq(node._key, key));
var newNode = new Node { _key = key };
newNode._next = node._next;
node._next._prev = newNode;
newNode._prev = node;
node._next = newNode;
return newNode;
}
public Node Find(TValue key)
{
var node = _head;
do
{
node = node._next;
}
while (node._key != null && !_leq(key, node._key));
return node;
}
public Node Min()
{
return _head._next;
}
public void Remove(Node node)
{
node._next._prev = node._prev;
node._prev._next = node._next;
}
}
}
}

View File

@@ -0,0 +1,301 @@
/*
** SGI FREE SOFTWARE LICENSE B (Version 2.0, Sept. 18, 2008)
** Copyright (C) 2011 Silicon Graphics, Inc.
** All Rights Reserved.
**
** Permission is hereby granted, free of charge, to any person obtaining a copy
** of this software and associated documentation files (the "Software"), to deal
** in the Software without restriction, including without limitation the rights
** to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
** of the Software, and to permit persons to whom the Software is furnished to do so,
** subject to the following conditions:
**
** The above copyright notice including the dates of first publication and either this
** permission notice or a reference to http://oss.sgi.com/projects/FreeB/ shall be
** included in all copies or substantial portions of the Software.
**
** THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
** INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
** PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL SILICON GRAPHICS, INC.
** BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
** TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE
** OR OTHER DEALINGS IN THE SOFTWARE.
**
** Except as contained in this notice, the name of Silicon Graphics, Inc. shall not
** be used in advertising or otherwise to promote the sale, use or other dealings in
** this Software without prior written authorization from Silicon Graphics, Inc.
*/
/*
** Original Author: Eric Veach, July 1994.
** libtess2: Mikko Mononen, http://code.google.com/p/libtess2/.
** LibTessDotNet: Remi Gillig, https://github.com/speps/LibTessDotNet
*/
using System;
using System.Diagnostics;
namespace UnityEngine.Experimental.Rendering.Universal
{
using Real = System.Single;
namespace LibTessDotNet
{
internal static class Geom
{
public static bool IsWindingInside(WindingRule rule, int n)
{
switch (rule)
{
case WindingRule.EvenOdd:
return (n & 1) == 1;
case WindingRule.NonZero:
return n != 0;
case WindingRule.Positive:
return n > 0;
case WindingRule.Negative:
return n < 0;
case WindingRule.AbsGeqTwo:
return n >= 2 || n <= -2;
}
throw new Exception("Wrong winding rule");
}
public static bool VertCCW(MeshUtils.Vertex u, MeshUtils.Vertex v, MeshUtils.Vertex w)
{
return (u._s * (v._t - w._t) + v._s * (w._t - u._t) + w._s * (u._t - v._t)) >= 0.0f;
}
public static bool VertEq(MeshUtils.Vertex lhs, MeshUtils.Vertex rhs)
{
return lhs._s == rhs._s && lhs._t == rhs._t;
}
public static bool VertLeq(MeshUtils.Vertex lhs, MeshUtils.Vertex rhs)
{
return (lhs._s < rhs._s) || (lhs._s == rhs._s && lhs._t <= rhs._t);
}
/// <summary>
/// Given three vertices u,v,w such that VertLeq(u,v) && VertLeq(v,w),
/// evaluates the t-coord of the edge uw at the s-coord of the vertex v.
/// Returns v->t - (uw)(v->s), ie. the signed distance from uw to v.
/// If uw is vertical (and thus passes thru v), the result is zero.
///
/// The calculation is extremely accurate and stable, even when v
/// is very close to u or w. In particular if we set v->t = 0 and
/// let r be the negated result (this evaluates (uw)(v->s)), then
/// r is guaranteed to satisfy MIN(u->t,w->t) <= r <= MAX(u->t,w->t).
/// </summary>
public static Real EdgeEval(MeshUtils.Vertex u, MeshUtils.Vertex v, MeshUtils.Vertex w)
{
Debug.Assert(VertLeq(u, v) && VertLeq(v, w));
var gapL = v._s - u._s;
var gapR = w._s - v._s;
if (gapL + gapR > 0.0f)
{
if (gapL < gapR)
{
return (v._t - u._t) + (u._t - w._t) * (gapL / (gapL + gapR));
}
else
{
return (v._t - w._t) + (w._t - u._t) * (gapR / (gapL + gapR));
}
}
/* vertical line */
return 0;
}
/// <summary>
/// Returns a number whose sign matches EdgeEval(u,v,w) but which
/// is cheaper to evaluate. Returns > 0, == 0 , or < 0
/// as v is above, on, or below the edge uw.
/// </summary>
public static Real EdgeSign(MeshUtils.Vertex u, MeshUtils.Vertex v, MeshUtils.Vertex w)
{
Debug.Assert(VertLeq(u, v) && VertLeq(v, w));
var gapL = v._s - u._s;
var gapR = w._s - v._s;
if (gapL + gapR > 0.0f)
{
return (v._t - w._t) * gapL + (v._t - u._t) * gapR;
}
/* vertical line */
return 0;
}
public static bool TransLeq(MeshUtils.Vertex lhs, MeshUtils.Vertex rhs)
{
return (lhs._t < rhs._t) || (lhs._t == rhs._t && lhs._s <= rhs._s);
}
public static Real TransEval(MeshUtils.Vertex u, MeshUtils.Vertex v, MeshUtils.Vertex w)
{
Debug.Assert(TransLeq(u, v) && TransLeq(v, w));
var gapL = v._t - u._t;
var gapR = w._t - v._t;
if (gapL + gapR > 0.0f)
{
if (gapL < gapR)
{
return (v._s - u._s) + (u._s - w._s) * (gapL / (gapL + gapR));
}
else
{
return (v._s - w._s) + (w._s - u._s) * (gapR / (gapL + gapR));
}
}
/* vertical line */
return 0;
}
public static Real TransSign(MeshUtils.Vertex u, MeshUtils.Vertex v, MeshUtils.Vertex w)
{
Debug.Assert(TransLeq(u, v) && TransLeq(v, w));
var gapL = v._t - u._t;
var gapR = w._t - v._t;
if (gapL + gapR > 0.0f)
{
return (v._s - w._s) * gapL + (v._s - u._s) * gapR;
}
/* vertical line */
return 0;
}
public static bool EdgeGoesLeft(MeshUtils.Edge e)
{
return VertLeq(e._Dst, e._Org);
}
public static bool EdgeGoesRight(MeshUtils.Edge e)
{
return VertLeq(e._Org, e._Dst);
}
public static Real VertL1dist(MeshUtils.Vertex u, MeshUtils.Vertex v)
{
return Math.Abs(u._s - v._s) + Math.Abs(u._t - v._t);
}
public static void AddWinding(MeshUtils.Edge eDst, MeshUtils.Edge eSrc)
{
eDst._winding += eSrc._winding;
eDst._Sym._winding += eSrc._Sym._winding;
}
public static Real Interpolate(Real a, Real x, Real b, Real y)
{
if (a < 0.0f)
{
a = 0.0f;
}
if (b < 0.0f)
{
b = 0.0f;
}
return ((a <= b) ? ((b == 0.0f) ? ((x + y) / 2.0f)
: (x + (y - x) * (a / (a + b))))
: (y + (x - y) * (b / (a + b))));
}
static void Swap(ref MeshUtils.Vertex a, ref MeshUtils.Vertex b)
{
var tmp = a;
a = b;
b = tmp;
}
/// <summary>
/// Given edges (o1,d1) and (o2,d2), compute their point of intersection.
/// The computed point is guaranteed to lie in the intersection of the
/// bounding rectangles defined by each edge.
/// </summary>
public static void EdgeIntersect(MeshUtils.Vertex o1, MeshUtils.Vertex d1, MeshUtils.Vertex o2, MeshUtils.Vertex d2, MeshUtils.Vertex v)
{
// This is certainly not the most efficient way to find the intersection
// of two line segments, but it is very numerically stable.
//
// Strategy: find the two middle vertices in the VertLeq ordering,
// and interpolate the intersection s-value from these. Then repeat
// using the TransLeq ordering to find the intersection t-value.
if (!VertLeq(o1, d1)) { Swap(ref o1, ref d1); }
if (!VertLeq(o2, d2)) { Swap(ref o2, ref d2); }
if (!VertLeq(o1, o2)) { Swap(ref o1, ref o2); Swap(ref d1, ref d2); }
if (!VertLeq(o2, d1))
{
// Technically, no intersection -- do our best
v._s = (o2._s + d1._s) / 2.0f;
}
else if (VertLeq(d1, d2))
{
// Interpolate between o2 and d1
var z1 = EdgeEval(o1, o2, d1);
var z2 = EdgeEval(o2, d1, d2);
if (z1 + z2 < 0.0f)
{
z1 = -z1;
z2 = -z2;
}
v._s = Interpolate(z1, o2._s, z2, d1._s);
}
else
{
// Interpolate between o2 and d2
var z1 = EdgeSign(o1, o2, d1);
var z2 = -EdgeSign(o1, d2, d1);
if (z1 + z2 < 0.0f)
{
z1 = -z1;
z2 = -z2;
}
v._s = Interpolate(z1, o2._s, z2, d2._s);
}
// Now repeat the process for t
if (!TransLeq(o1, d1)) { Swap(ref o1, ref d1); }
if (!TransLeq(o2, d2)) { Swap(ref o2, ref d2); }
if (!TransLeq(o1, o2)) { Swap(ref o1, ref o2); Swap(ref d1, ref d2); }
if (!TransLeq(o2, d1))
{
// Technically, no intersection -- do our best
v._t = (o2._t + d1._t) / 2.0f;
}
else if (TransLeq(d1, d2))
{
// Interpolate between o2 and d1
var z1 = TransEval(o1, o2, d1);
var z2 = TransEval(o2, d1, d2);
if (z1 + z2 < 0.0f)
{
z1 = -z1;
z2 = -z2;
}
v._t = Interpolate(z1, o2._t, z2, d1._t);
}
else
{
// Interpolate between o2 and d2
var z1 = TransSign(o1, o2, d1);
var z2 = -TransSign(o1, d2, d1);
if (z1 + z2 < 0.0f)
{
z1 = -z1;
z2 = -z2;
}
v._t = Interpolate(z1, o2._t, z2, d2._t);
}
}
}
}
}

View File

@@ -0,0 +1,25 @@
** SGI FREE SOFTWARE LICENSE B (Version 2.0, Sept. 18, 2008)
** Copyright (C) 2011 Silicon Graphics, Inc.
** All Rights Reserved.
**
** Permission is hereby granted, free of charge, to any person obtaining a copy
** of this software and associated documentation files (the "Software"), to deal
** in the Software without restriction, including without limitation the rights
** to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
** of the Software, and to permit persons to whom the Software is furnished to do so,
** subject to the following conditions:
**
** The above copyright notice including the dates of first publication and either this
** permission notice or a reference to http://oss.sgi.com/projects/FreeB/ shall be
** included in all copies or substantial portions of the Software.
**
** THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
** INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
** PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL SILICON GRAPHICS, INC.
** BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
** TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE
** OR OTHER DEALINGS IN THE SOFTWARE.
**
** Except as contained in this notice, the name of Silicon Graphics, Inc. shall not
** be used in advertising or otherwise to promote the sale, use or other dealings in
** this Software without prior written authorization from Silicon Graphics, Inc.

View File

@@ -0,0 +1,503 @@
/*
** SGI FREE SOFTWARE LICENSE B (Version 2.0, Sept. 18, 2008)
** Copyright (C) 2011 Silicon Graphics, Inc.
** All Rights Reserved.
**
** Permission is hereby granted, free of charge, to any person obtaining a copy
** of this software and associated documentation files (the "Software"), to deal
** in the Software without restriction, including without limitation the rights
** to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
** of the Software, and to permit persons to whom the Software is furnished to do so,
** subject to the following conditions:
**
** The above copyright notice including the dates of first publication and either this
** permission notice or a reference to http://oss.sgi.com/projects/FreeB/ shall be
** included in all copies or substantial portions of the Software.
**
** THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
** INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
** PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL SILICON GRAPHICS, INC.
** BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
** TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE
** OR OTHER DEALINGS IN THE SOFTWARE.
**
** Except as contained in this notice, the name of Silicon Graphics, Inc. shall not
** be used in advertising or otherwise to promote the sale, use or other dealings in
** this Software without prior written authorization from Silicon Graphics, Inc.
*/
/*
** Original Author: Eric Veach, July 1994.
** libtess2: Mikko Mononen, http://code.google.com/p/libtess2/.
** LibTessDotNet: Remi Gillig, https://github.com/speps/LibTessDotNet
*/
using System;
using System.Diagnostics;
namespace UnityEngine.Experimental.Rendering.Universal
{
namespace LibTessDotNet
{
internal class Mesh : MeshUtils.Pooled<Mesh>
{
internal MeshUtils.Vertex _vHead;
internal MeshUtils.Face _fHead;
internal MeshUtils.Edge _eHead, _eHeadSym;
public Mesh()
{
var v = _vHead = MeshUtils.Vertex.Create();
var f = _fHead = MeshUtils.Face.Create();
var pair = MeshUtils.EdgePair.Create();
var e = _eHead = pair._e;
var eSym = _eHeadSym = pair._eSym;
v._next = v._prev = v;
v._anEdge = null;
f._next = f._prev = f;
f._anEdge = null;
f._trail = null;
f._marked = false;
f._inside = false;
e._next = e;
e._Sym = eSym;
e._Onext = null;
e._Lnext = null;
e._Org = null;
e._Lface = null;
e._winding = 0;
e._activeRegion = null;
eSym._next = eSym;
eSym._Sym = e;
eSym._Onext = null;
eSym._Lnext = null;
eSym._Org = null;
eSym._Lface = null;
eSym._winding = 0;
eSym._activeRegion = null;
}
public override void Reset()
{
_vHead = null;
_fHead = null;
_eHead = _eHeadSym = null;
}
public override void OnFree()
{
for (MeshUtils.Face f = _fHead._next, fNext = _fHead; f != _fHead; f = fNext)
{
fNext = f._next;
f.Free();
}
for (MeshUtils.Vertex v = _vHead._next, vNext = _vHead; v != _vHead; v = vNext)
{
vNext = v._next;
v.Free();
}
for (MeshUtils.Edge e = _eHead._next, eNext = _eHead; e != _eHead; e = eNext)
{
eNext = e._next;
e.Free();
}
}
/// <summary>
/// Creates one edge, two vertices and a loop (face).
/// The loop consists of the two new half-edges.
/// </summary>
public MeshUtils.Edge MakeEdge()
{
var e = MeshUtils.MakeEdge(_eHead);
MeshUtils.MakeVertex(e, _vHead);
MeshUtils.MakeVertex(e._Sym, _vHead);
MeshUtils.MakeFace(e, _fHead);
return e;
}
/// <summary>
/// Splice is the basic operation for changing the
/// mesh connectivity and topology. It changes the mesh so that
/// eOrg->Onext = OLD( eDst->Onext )
/// eDst->Onext = OLD( eOrg->Onext )
/// where OLD(...) means the value before the meshSplice operation.
///
/// This can have two effects on the vertex structure:
/// - if eOrg->Org != eDst->Org, the two vertices are merged together
/// - if eOrg->Org == eDst->Org, the origin is split into two vertices
/// In both cases, eDst->Org is changed and eOrg->Org is untouched.
///
/// Similarly (and independently) for the face structure,
/// - if eOrg->Lface == eDst->Lface, one loop is split into two
/// - if eOrg->Lface != eDst->Lface, two distinct loops are joined into one
/// In both cases, eDst->Lface is changed and eOrg->Lface is unaffected.
///
/// Some special cases:
/// If eDst == eOrg, the operation has no effect.
/// If eDst == eOrg->Lnext, the new face will have a single edge.
/// If eDst == eOrg->Lprev, the old face will have a single edge.
/// If eDst == eOrg->Onext, the new vertex will have a single edge.
/// If eDst == eOrg->Oprev, the old vertex will have a single edge.
/// </summary>
public void Splice(MeshUtils.Edge eOrg, MeshUtils.Edge eDst)
{
if (eOrg == eDst)
{
return;
}
bool joiningVertices = false;
if (eDst._Org != eOrg._Org)
{
// We are merging two disjoint vertices -- destroy eDst->Org
joiningVertices = true;
MeshUtils.KillVertex(eDst._Org, eOrg._Org);
}
bool joiningLoops = false;
if (eDst._Lface != eOrg._Lface)
{
// We are connecting two disjoint loops -- destroy eDst->Lface
joiningLoops = true;
MeshUtils.KillFace(eDst._Lface, eOrg._Lface);
}
// Change the edge structure
MeshUtils.Splice(eDst, eOrg);
if (!joiningVertices)
{
// We split one vertex into two -- the new vertex is eDst->Org.
// Make sure the old vertex points to a valid half-edge.
MeshUtils.MakeVertex(eDst, eOrg._Org);
eOrg._Org._anEdge = eOrg;
}
if (!joiningLoops)
{
// We split one loop into two -- the new loop is eDst->Lface.
// Make sure the old face points to a valid half-edge.
MeshUtils.MakeFace(eDst, eOrg._Lface);
eOrg._Lface._anEdge = eOrg;
}
}
/// <summary>
/// Removes the edge eDel. There are several cases:
/// if (eDel->Lface != eDel->Rface), we join two loops into one; the loop
/// eDel->Lface is deleted. Otherwise, we are splitting one loop into two;
/// the newly created loop will contain eDel->Dst. If the deletion of eDel
/// would create isolated vertices, those are deleted as well.
/// </summary>
public void Delete(MeshUtils.Edge eDel)
{
var eDelSym = eDel._Sym;
// First step: disconnect the origin vertex eDel->Org. We make all
// changes to get a consistent mesh in this "intermediate" state.
bool joiningLoops = false;
if (eDel._Lface != eDel._Rface)
{
// We are joining two loops into one -- remove the left face
joiningLoops = true;
MeshUtils.KillFace(eDel._Lface, eDel._Rface);
}
if (eDel._Onext == eDel)
{
MeshUtils.KillVertex(eDel._Org, null);
}
else
{
// Make sure that eDel->Org and eDel->Rface point to valid half-edges
eDel._Rface._anEdge = eDel._Oprev;
eDel._Org._anEdge = eDel._Onext;
MeshUtils.Splice(eDel, eDel._Oprev);
if (!joiningLoops)
{
// We are splitting one loop into two -- create a new loop for eDel.
MeshUtils.MakeFace(eDel, eDel._Lface);
}
}
// Claim: the mesh is now in a consistent state, except that eDel->Org
// may have been deleted. Now we disconnect eDel->Dst.
if (eDelSym._Onext == eDelSym)
{
MeshUtils.KillVertex(eDelSym._Org, null);
MeshUtils.KillFace(eDelSym._Lface, null);
}
else
{
// Make sure that eDel->Dst and eDel->Lface point to valid half-edges
eDel._Lface._anEdge = eDelSym._Oprev;
eDelSym._Org._anEdge = eDelSym._Onext;
MeshUtils.Splice(eDelSym, eDelSym._Oprev);
}
// Any isolated vertices or faces have already been freed.
MeshUtils.KillEdge(eDel);
}
/// <summary>
/// Creates a new edge such that eNew == eOrg.Lnext and eNew.Dst is a newly created vertex.
/// eOrg and eNew will have the same left face.
/// </summary>
public MeshUtils.Edge AddEdgeVertex(MeshUtils.Edge eOrg)
{
var eNew = MeshUtils.MakeEdge(eOrg);
var eNewSym = eNew._Sym;
// Connect the new edge appropriately
MeshUtils.Splice(eNew, eOrg._Lnext);
// Set vertex and face information
eNew._Org = eOrg._Dst;
MeshUtils.MakeVertex(eNewSym, eNew._Org);
eNew._Lface = eNewSym._Lface = eOrg._Lface;
return eNew;
}
/// <summary>
/// Splits eOrg into two edges eOrg and eNew such that eNew == eOrg.Lnext.
/// The new vertex is eOrg.Dst == eNew.Org.
/// eOrg and eNew will have the same left face.
/// </summary>
public MeshUtils.Edge SplitEdge(MeshUtils.Edge eOrg)
{
var eTmp = AddEdgeVertex(eOrg);
var eNew = eTmp._Sym;
// Disconnect eOrg from eOrg->Dst and connect it to eNew->Org
MeshUtils.Splice(eOrg._Sym, eOrg._Sym._Oprev);
MeshUtils.Splice(eOrg._Sym, eNew);
// Set the vertex and face information
eOrg._Dst = eNew._Org;
eNew._Dst._anEdge = eNew._Sym; // may have pointed to eOrg->Sym
eNew._Rface = eOrg._Rface;
eNew._winding = eOrg._winding; // copy old winding information
eNew._Sym._winding = eOrg._Sym._winding;
return eNew;
}
/// <summary>
/// Creates a new edge from eOrg->Dst to eDst->Org, and returns the corresponding half-edge eNew.
/// If eOrg->Lface == eDst->Lface, this splits one loop into two,
/// and the newly created loop is eNew->Lface. Otherwise, two disjoint
/// loops are merged into one, and the loop eDst->Lface is destroyed.
///
/// If (eOrg == eDst), the new face will have only two edges.
/// If (eOrg->Lnext == eDst), the old face is reduced to a single edge.
/// If (eOrg->Lnext->Lnext == eDst), the old face is reduced to two edges.
/// </summary>
public MeshUtils.Edge Connect(MeshUtils.Edge eOrg, MeshUtils.Edge eDst)
{
var eNew = MeshUtils.MakeEdge(eOrg);
var eNewSym = eNew._Sym;
bool joiningLoops = false;
if (eDst._Lface != eOrg._Lface)
{
// We are connecting two disjoint loops -- destroy eDst->Lface
joiningLoops = true;
MeshUtils.KillFace(eDst._Lface, eOrg._Lface);
}
// Connect the new edge appropriately
MeshUtils.Splice(eNew, eOrg._Lnext);
MeshUtils.Splice(eNewSym, eDst);
// Set the vertex and face information
eNew._Org = eOrg._Dst;
eNewSym._Org = eDst._Org;
eNew._Lface = eNewSym._Lface = eOrg._Lface;
// Make sure the old face points to a valid half-edge
eOrg._Lface._anEdge = eNewSym;
if (!joiningLoops)
{
MeshUtils.MakeFace(eNew, eOrg._Lface);
}
return eNew;
}
/// <summary>
/// Destroys a face and removes it from the global face list. All edges of
/// fZap will have a NULL pointer as their left face. Any edges which
/// also have a NULL pointer as their right face are deleted entirely
/// (along with any isolated vertices this produces).
/// An entire mesh can be deleted by zapping its faces, one at a time,
/// in any order. Zapped faces cannot be used in further mesh operations!
/// </summary>
public void ZapFace(MeshUtils.Face fZap)
{
var eStart = fZap._anEdge;
// walk around face, deleting edges whose right face is also NULL
var eNext = eStart._Lnext;
MeshUtils.Edge e, eSym;
do
{
e = eNext;
eNext = e._Lnext;
e._Lface = null;
if (e._Rface == null)
{
// delete the edge -- see TESSmeshDelete above
if (e._Onext == e)
{
MeshUtils.KillVertex(e._Org, null);
}
else
{
// Make sure that e._Org points to a valid half-edge
e._Org._anEdge = e._Onext;
MeshUtils.Splice(e, e._Oprev);
}
eSym = e._Sym;
if (eSym._Onext == eSym)
{
MeshUtils.KillVertex(eSym._Org, null);
}
else
{
// Make sure that eSym._Org points to a valid half-edge
eSym._Org._anEdge = eSym._Onext;
MeshUtils.Splice(eSym, eSym._Oprev);
}
MeshUtils.KillEdge(e);
}
}
while (e != eStart);
/* delete from circular doubly-linked list */
var fPrev = fZap._prev;
var fNext = fZap._next;
fNext._prev = fPrev;
fPrev._next = fNext;
fZap.Free();
}
public void MergeConvexFaces(int maxVertsPerFace)
{
for (var f = _fHead._next; f != _fHead; f = f._next)
{
// Skip faces which are outside the result
if (!f._inside)
{
continue;
}
var eCur = f._anEdge;
var vStart = eCur._Org;
while (true)
{
var eNext = eCur._Lnext;
var eSym = eCur._Sym;
if (eSym != null && eSym._Lface != null && eSym._Lface._inside)
{
// Try to merge the neighbour faces if the resulting polygons
// does not exceed maximum number of vertices.
int curNv = f.VertsCount;
int symNv = eSym._Lface.VertsCount;
if ((curNv + symNv - 2) <= maxVertsPerFace)
{
// Merge if the resulting poly is convex.
if (Geom.VertCCW(eCur._Lprev._Org, eCur._Org, eSym._Lnext._Lnext._Org) &&
Geom.VertCCW(eSym._Lprev._Org, eSym._Org, eCur._Lnext._Lnext._Org))
{
eNext = eSym._Lnext;
Delete(eSym);
eCur = null;
}
}
}
if (eCur != null && eCur._Lnext._Org == vStart)
break;
// Continue to next edge.
eCur = eNext;
}
}
}
[Conditional("DEBUG")]
public void Check()
{
MeshUtils.Edge e;
MeshUtils.Face fPrev = _fHead, f;
for (fPrev = _fHead; (f = fPrev._next) != _fHead; fPrev = f)
{
e = f._anEdge;
do
{
Debug.Assert(e._Sym != e);
Debug.Assert(e._Sym._Sym == e);
Debug.Assert(e._Lnext._Onext._Sym == e);
Debug.Assert(e._Onext._Sym._Lnext == e);
Debug.Assert(e._Lface == f);
e = e._Lnext;
}
while (e != f._anEdge);
}
Debug.Assert(f._prev == fPrev && f._anEdge == null);
MeshUtils.Vertex vPrev = _vHead, v;
for (vPrev = _vHead; (v = vPrev._next) != _vHead; vPrev = v)
{
Debug.Assert(v._prev == vPrev);
e = v._anEdge;
do
{
Debug.Assert(e._Sym != e);
Debug.Assert(e._Sym._Sym == e);
Debug.Assert(e._Lnext._Onext._Sym == e);
Debug.Assert(e._Onext._Sym._Lnext == e);
Debug.Assert(e._Org == v);
e = e._Onext;
}
while (e != v._anEdge);
}
Debug.Assert(v._prev == vPrev && v._anEdge == null);
MeshUtils.Edge ePrev = _eHead;
for (ePrev = _eHead; (e = ePrev._next) != _eHead; ePrev = e)
{
Debug.Assert(e._Sym._next == ePrev._Sym);
Debug.Assert(e._Sym != e);
Debug.Assert(e._Sym._Sym == e);
Debug.Assert(e._Org != null);
Debug.Assert(e._Dst != null);
Debug.Assert(e._Lnext._Onext._Sym == e);
Debug.Assert(e._Onext._Sym._Lnext == e);
}
Debug.Assert(e._Sym._next == ePrev._Sym
&& e._Sym == _eHeadSym
&& e._Sym._Sym == e
&& e._Org == null && e._Dst == null
&& e._Lface == null && e._Rface == null);
}
}
}
}

View File

@@ -0,0 +1,475 @@
/*
** SGI FREE SOFTWARE LICENSE B (Version 2.0, Sept. 18, 2008)
** Copyright (C) 2011 Silicon Graphics, Inc.
** All Rights Reserved.
**
** Permission is hereby granted, free of charge, to any person obtaining a copy
** of this software and associated documentation files (the "Software"), to deal
** in the Software without restriction, including without limitation the rights
** to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
** of the Software, and to permit persons to whom the Software is furnished to do so,
** subject to the following conditions:
**
** The above copyright notice including the dates of first publication and either this
** permission notice or a reference to http://oss.sgi.com/projects/FreeB/ shall be
** included in all copies or substantial portions of the Software.
**
** THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
** INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
** PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL SILICON GRAPHICS, INC.
** BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
** TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE
** OR OTHER DEALINGS IN THE SOFTWARE.
**
** Except as contained in this notice, the name of Silicon Graphics, Inc. shall not
** be used in advertising or otherwise to promote the sale, use or other dealings in
** this Software without prior written authorization from Silicon Graphics, Inc.
*/
/*
** Original Author: Eric Veach, July 1994.
** libtess2: Mikko Mononen, http://code.google.com/p/libtess2/.
** LibTessDotNet: Remi Gillig, https://github.com/speps/LibTessDotNet
*/
using System;
using System.Collections.Generic;
using System.Diagnostics;
using UnityEngine.Scripting.APIUpdating;
namespace UnityEngine.Experimental.Rendering.Universal
{
using Real = System.Single;
namespace LibTessDotNet
{
internal struct Vec3
{
public readonly static Vec3 Zero = new Vec3();
public Real X, Y, Z;
public Real this[int index]
{
get
{
if (index == 0) return X;
if (index == 1) return Y;
if (index == 2) return Z;
throw new IndexOutOfRangeException();
}
set
{
if (index == 0) X = value;
else if (index == 1) Y = value;
else if (index == 2) Z = value;
else throw new IndexOutOfRangeException();
}
}
public static void Sub(ref Vec3 lhs, ref Vec3 rhs, out Vec3 result)
{
result.X = lhs.X - rhs.X;
result.Y = lhs.Y - rhs.Y;
result.Z = lhs.Z - rhs.Z;
}
public static void Neg(ref Vec3 v)
{
v.X = -v.X;
v.Y = -v.Y;
v.Z = -v.Z;
}
public static void Dot(ref Vec3 u, ref Vec3 v, out Real dot)
{
dot = u.X * v.X + u.Y * v.Y + u.Z * v.Z;
}
public static void Normalize(ref Vec3 v)
{
var len = v.X * v.X + v.Y * v.Y + v.Z * v.Z;
Debug.Assert(len >= 0.0f);
len = 1.0f / (Real)Math.Sqrt(len);
v.X *= len;
v.Y *= len;
v.Z *= len;
}
public static int LongAxis(ref Vec3 v)
{
int i = 0;
if (Math.Abs(v.Y) > Math.Abs(v.X)) i = 1;
if (Math.Abs(v.Z) > Math.Abs(i == 0 ? v.X : v.Y)) i = 2;
return i;
}
public override string ToString()
{
return string.Format("{0}, {1}, {2}", X, Y, Z);
}
}
internal static class MeshUtils
{
public const int Undef = ~0;
public abstract class Pooled<T> where T : Pooled<T>, new()
{
private static Stack<T> _stack;
public abstract void Reset();
public virtual void OnFree() {}
public static T Create()
{
if (_stack != null && _stack.Count > 0)
{
return _stack.Pop();
}
return new T();
}
public void Free()
{
OnFree();
Reset();
if (_stack == null)
{
_stack = new Stack<T>();
}
_stack.Push((T)this);
}
}
public class Vertex : Pooled<Vertex>
{
internal Vertex _prev, _next;
internal Edge _anEdge;
internal Vec3 _coords;
internal Real _s, _t;
internal PQHandle _pqHandle;
internal int _n;
internal object _data;
public override void Reset()
{
_prev = _next = null;
_anEdge = null;
_coords = Vec3.Zero;
_s = 0;
_t = 0;
_pqHandle = new PQHandle();
_n = 0;
_data = null;
}
}
public class Face : Pooled<Face>
{
internal Face _prev, _next;
internal Edge _anEdge;
internal Face _trail;
internal int _n;
internal bool _marked, _inside;
internal int VertsCount
{
get
{
int n = 0;
var eCur = _anEdge;
do
{
n++;
eCur = eCur._Lnext;
}
while (eCur != _anEdge);
return n;
}
}
public override void Reset()
{
_prev = _next = null;
_anEdge = null;
_trail = null;
_n = 0;
_marked = false;
_inside = false;
}
}
public struct EdgePair
{
internal Edge _e, _eSym;
public static EdgePair Create()
{
var pair = new MeshUtils.EdgePair();
pair._e = MeshUtils.Edge.Create();
pair._e._pair = pair;
pair._eSym = MeshUtils.Edge.Create();
pair._eSym._pair = pair;
return pair;
}
public void Reset()
{
_e = _eSym = null;
}
}
public class Edge : Pooled<Edge>
{
internal EdgePair _pair;
internal Edge _next, _Sym, _Onext, _Lnext;
internal Vertex _Org;
internal Face _Lface;
internal Tess.ActiveRegion _activeRegion;
internal int _winding;
internal Face _Rface { get { return _Sym._Lface; } set { _Sym._Lface = value; } }
internal Vertex _Dst { get { return _Sym._Org; } set { _Sym._Org = value; } }
internal Edge _Oprev { get { return _Sym._Lnext; } set { _Sym._Lnext = value; } }
internal Edge _Lprev { get { return _Onext._Sym; } set { _Onext._Sym = value; } }
internal Edge _Dprev { get { return _Lnext._Sym; } set { _Lnext._Sym = value; } }
internal Edge _Rprev { get { return _Sym._Onext; } set { _Sym._Onext = value; } }
internal Edge _Dnext { get { return _Rprev._Sym; } set { _Rprev._Sym = value; } }
internal Edge _Rnext { get { return _Oprev._Sym; } set { _Oprev._Sym = value; } }
internal static void EnsureFirst(ref Edge e)
{
if (e == e._pair._eSym)
{
e = e._Sym;
}
}
public override void Reset()
{
_pair.Reset();
_next = _Sym = _Onext = _Lnext = null;
_Org = null;
_Lface = null;
_activeRegion = null;
_winding = 0;
}
}
/// <summary>
/// MakeEdge creates a new pair of half-edges which form their own loop.
/// No vertex or face structures are allocated, but these must be assigned
/// before the current edge operation is completed.
/// </summary>
public static Edge MakeEdge(Edge eNext)
{
Debug.Assert(eNext != null);
var pair = EdgePair.Create();
var e = pair._e;
var eSym = pair._eSym;
// Make sure eNext points to the first edge of the edge pair
Edge.EnsureFirst(ref eNext);
// Insert in circular doubly-linked list before eNext.
// Note that the prev pointer is stored in Sym->next.
var ePrev = eNext._Sym._next;
eSym._next = ePrev;
ePrev._Sym._next = e;
e._next = eNext;
eNext._Sym._next = eSym;
e._Sym = eSym;
e._Onext = e;
e._Lnext = eSym;
e._Org = null;
e._Lface = null;
e._winding = 0;
e._activeRegion = null;
eSym._Sym = e;
eSym._Onext = eSym;
eSym._Lnext = e;
eSym._Org = null;
eSym._Lface = null;
eSym._winding = 0;
eSym._activeRegion = null;
return e;
}
/// <summary>
/// Splice( a, b ) is best described by the Guibas/Stolfi paper or the
/// CS348a notes (see Mesh.cs). Basically it modifies the mesh so that
/// a->Onext and b->Onext are exchanged. This can have various effects
/// depending on whether a and b belong to different face or vertex rings.
/// For more explanation see Mesh.Splice().
/// </summary>
public static void Splice(Edge a, Edge b)
{
var aOnext = a._Onext;
var bOnext = b._Onext;
aOnext._Sym._Lnext = b;
bOnext._Sym._Lnext = a;
a._Onext = bOnext;
b._Onext = aOnext;
}
/// <summary>
/// MakeVertex( eOrig, vNext ) attaches a new vertex and makes it the
/// origin of all edges in the vertex loop to which eOrig belongs. "vNext" gives
/// a place to insert the new vertex in the global vertex list. We insert
/// the new vertex *before* vNext so that algorithms which walk the vertex
/// list will not see the newly created vertices.
/// </summary>
public static void MakeVertex(Edge eOrig, Vertex vNext)
{
var vNew = MeshUtils.Vertex.Create();
// insert in circular doubly-linked list before vNext
var vPrev = vNext._prev;
vNew._prev = vPrev;
vPrev._next = vNew;
vNew._next = vNext;
vNext._prev = vNew;
vNew._anEdge = eOrig;
// leave coords, s, t undefined
// fix other edges on this vertex loop
var e = eOrig;
do
{
e._Org = vNew;
e = e._Onext;
}
while (e != eOrig);
}
/// <summary>
/// MakeFace( eOrig, fNext ) attaches a new face and makes it the left
/// face of all edges in the face loop to which eOrig belongs. "fNext" gives
/// a place to insert the new face in the global face list. We insert
/// the new face *before* fNext so that algorithms which walk the face
/// list will not see the newly created faces.
/// </summary>
public static void MakeFace(Edge eOrig, Face fNext)
{
var fNew = MeshUtils.Face.Create();
// insert in circular doubly-linked list before fNext
var fPrev = fNext._prev;
fNew._prev = fPrev;
fPrev._next = fNew;
fNew._next = fNext;
fNext._prev = fNew;
fNew._anEdge = eOrig;
fNew._trail = null;
fNew._marked = false;
// The new face is marked "inside" if the old one was. This is a
// convenience for the common case where a face has been split in two.
fNew._inside = fNext._inside;
// fix other edges on this face loop
var e = eOrig;
do
{
e._Lface = fNew;
e = e._Lnext;
}
while (e != eOrig);
}
/// <summary>
/// KillEdge( eDel ) destroys an edge (the half-edges eDel and eDel->Sym),
/// and removes from the global edge list.
/// </summary>
public static void KillEdge(Edge eDel)
{
// Half-edges are allocated in pairs, see EdgePair above
Edge.EnsureFirst(ref eDel);
// delete from circular doubly-linked list
var eNext = eDel._next;
var ePrev = eDel._Sym._next;
eNext._Sym._next = ePrev;
ePrev._Sym._next = eNext;
eDel.Free();
}
/// <summary>
/// KillVertex( vDel ) destroys a vertex and removes it from the global
/// vertex list. It updates the vertex loop to point to a given new vertex.
/// </summary>
public static void KillVertex(Vertex vDel, Vertex newOrg)
{
var eStart = vDel._anEdge;
// change the origin of all affected edges
var e = eStart;
do
{
e._Org = newOrg;
e = e._Onext;
}
while (e != eStart);
// delete from circular doubly-linked list
var vPrev = vDel._prev;
var vNext = vDel._next;
vNext._prev = vPrev;
vPrev._next = vNext;
vDel.Free();
}
/// <summary>
/// KillFace( fDel ) destroys a face and removes it from the global face
/// list. It updates the face loop to point to a given new face.
/// </summary>
public static void KillFace(Face fDel, Face newLFace)
{
var eStart = fDel._anEdge;
// change the left face of all affected edges
var e = eStart;
do
{
e._Lface = newLFace;
e = e._Lnext;
}
while (e != eStart);
// delete from circular doubly-linked list
var fPrev = fDel._prev;
var fNext = fDel._next;
fNext._prev = fPrev;
fPrev._next = fNext;
fDel.Free();
}
/// <summary>
/// Return signed area of face.
/// </summary>
public static Real FaceArea(Face f)
{
Real area = 0;
var e = f._anEdge;
do
{
area += (e._Org._s - e._Dst._s) * (e._Org._t + e._Dst._t);
e = e._Lnext;
}
while (e != f._anEdge);
return area;
}
}
}
}

View File

@@ -0,0 +1,244 @@
/*
** SGI FREE SOFTWARE LICENSE B (Version 2.0, Sept. 18, 2008)
** Copyright (C) 2011 Silicon Graphics, Inc.
** All Rights Reserved.
**
** Permission is hereby granted, free of charge, to any person obtaining a copy
** of this software and associated documentation files (the "Software"), to deal
** in the Software without restriction, including without limitation the rights
** to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
** of the Software, and to permit persons to whom the Software is furnished to do so,
** subject to the following conditions:
**
** The above copyright notice including the dates of first publication and either this
** permission notice or a reference to http://oss.sgi.com/projects/FreeB/ shall be
** included in all copies or substantial portions of the Software.
**
** THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
** INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
** PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL SILICON GRAPHICS, INC.
** BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
** TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE
** OR OTHER DEALINGS IN THE SOFTWARE.
**
** Except as contained in this notice, the name of Silicon Graphics, Inc. shall not
** be used in advertising or otherwise to promote the sale, use or other dealings in
** this Software without prior written authorization from Silicon Graphics, Inc.
*/
/*
** Original Author: Eric Veach, July 1994.
** libtess2: Mikko Mononen, http://code.google.com/p/libtess2/.
** LibTessDotNet: Remi Gillig, https://github.com/speps/LibTessDotNet
*/
using System;
using System.Diagnostics;
namespace UnityEngine.Experimental.Rendering.Universal
{
namespace LibTessDotNet
{
internal struct PQHandle
{
public static readonly int Invalid = 0x0fffffff;
internal int _handle;
}
internal class PriorityHeap<TValue> where TValue : class
{
public delegate bool LessOrEqual(TValue lhs, TValue rhs);
protected class HandleElem
{
internal TValue _key;
internal int _node;
}
private LessOrEqual _leq;
private int[] _nodes;
private HandleElem[] _handles;
private int _size, _max;
private int _freeList;
private bool _initialized;
public bool Empty { get { return _size == 0; } }
public PriorityHeap(int initialSize, LessOrEqual leq)
{
_leq = leq;
_nodes = new int[initialSize + 1];
_handles = new HandleElem[initialSize + 1];
_size = 0;
_max = initialSize;
_freeList = 0;
_initialized = false;
_nodes[1] = 1;
_handles[1] = new HandleElem { _key = null };
}
private void FloatDown(int curr)
{
int child;
int hCurr, hChild;
hCurr = _nodes[curr];
while (true)
{
child = curr << 1;
if (child < _size && _leq(_handles[_nodes[child + 1]]._key, _handles[_nodes[child]]._key))
{
++child;
}
Debug.Assert(child <= _max);
hChild = _nodes[child];
if (child > _size || _leq(_handles[hCurr]._key, _handles[hChild]._key))
{
_nodes[curr] = hCurr;
_handles[hCurr]._node = curr;
break;
}
_nodes[curr] = hChild;
_handles[hChild]._node = curr;
curr = child;
}
}
private void FloatUp(int curr)
{
int parent;
int hCurr, hParent;
hCurr = _nodes[curr];
while (true)
{
parent = curr >> 1;
hParent = _nodes[parent];
if (parent == 0 || _leq(_handles[hParent]._key, _handles[hCurr]._key))
{
_nodes[curr] = hCurr;
_handles[hCurr]._node = curr;
break;
}
_nodes[curr] = hParent;
_handles[hParent]._node = curr;
curr = parent;
}
}
public void Init()
{
for (int i = _size; i >= 1; --i)
{
FloatDown(i);
}
_initialized = true;
}
public PQHandle Insert(TValue value)
{
int curr = ++_size;
if ((curr * 2) > _max)
{
_max <<= 1;
Array.Resize(ref _nodes, _max + 1);
Array.Resize(ref _handles, _max + 1);
}
int free;
if (_freeList == 0)
{
free = curr;
}
else
{
free = _freeList;
_freeList = _handles[free]._node;
}
_nodes[curr] = free;
if (_handles[free] == null)
{
_handles[free] = new HandleElem { _key = value, _node = curr };
}
else
{
_handles[free]._node = curr;
_handles[free]._key = value;
}
if (_initialized)
{
FloatUp(curr);
}
Debug.Assert(free != PQHandle.Invalid);
return new PQHandle { _handle = free };
}
public TValue ExtractMin()
{
Debug.Assert(_initialized);
int hMin = _nodes[1];
TValue min = _handles[hMin]._key;
if (_size > 0)
{
_nodes[1] = _nodes[_size];
_handles[_nodes[1]]._node = 1;
_handles[hMin]._key = null;
_handles[hMin]._node = _freeList;
_freeList = hMin;
if (--_size > 0)
{
FloatDown(1);
}
}
return min;
}
public TValue Minimum()
{
Debug.Assert(_initialized);
return _handles[_nodes[1]]._key;
}
public void Remove(PQHandle handle)
{
Debug.Assert(_initialized);
int hCurr = handle._handle;
Debug.Assert(hCurr >= 1 && hCurr <= _max && _handles[hCurr]._key != null);
int curr = _handles[hCurr]._node;
_nodes[curr] = _nodes[_size];
_handles[_nodes[curr]]._node = curr;
if (curr <= --_size)
{
if (curr <= 1 || _leq(_handles[_nodes[curr >> 1]]._key, _handles[_nodes[curr]]._key))
{
FloatDown(curr);
}
else
{
FloatUp(curr);
}
}
_handles[hCurr]._key = null;
_handles[hCurr]._node = _freeList;
_freeList = hCurr;
}
}
}
}

View File

@@ -0,0 +1,233 @@
/*
** SGI FREE SOFTWARE LICENSE B (Version 2.0, Sept. 18, 2008)
** Copyright (C) 2011 Silicon Graphics, Inc.
** All Rights Reserved.
**
** Permission is hereby granted, free of charge, to any person obtaining a copy
** of this software and associated documentation files (the "Software"), to deal
** in the Software without restriction, including without limitation the rights
** to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
** of the Software, and to permit persons to whom the Software is furnished to do so,
** subject to the following conditions:
**
** The above copyright notice including the dates of first publication and either this
** permission notice or a reference to http://oss.sgi.com/projects/FreeB/ shall be
** included in all copies or substantial portions of the Software.
**
** THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
** INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
** PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL SILICON GRAPHICS, INC.
** BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
** TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE
** OR OTHER DEALINGS IN THE SOFTWARE.
**
** Except as contained in this notice, the name of Silicon Graphics, Inc. shall not
** be used in advertising or otherwise to promote the sale, use or other dealings in
** this Software without prior written authorization from Silicon Graphics, Inc.
*/
/*
** Original Author: Eric Veach, July 1994.
** libtess2: Mikko Mononen, http://code.google.com/p/libtess2/.
** LibTessDotNet: Remi Gillig, https://github.com/speps/LibTessDotNet
*/
using System;
using System.Collections.Generic;
using System.Diagnostics;
namespace UnityEngine.Experimental.Rendering.Universal
{
namespace LibTessDotNet
{
internal class PriorityQueue<TValue> where TValue : class
{
private PriorityHeap<TValue>.LessOrEqual _leq;
private PriorityHeap<TValue> _heap;
private TValue[] _keys;
private int[] _order;
private int _size, _max;
private bool _initialized;
public bool Empty { get { return _size == 0 && _heap.Empty; } }
public PriorityQueue(int initialSize, PriorityHeap<TValue>.LessOrEqual leq)
{
_leq = leq;
_heap = new PriorityHeap<TValue>(initialSize, leq);
_keys = new TValue[initialSize];
_size = 0;
_max = initialSize;
_initialized = false;
}
class StackItem
{
internal int p, r;
};
static void Swap(ref int a, ref int b)
{
int tmp = a;
a = b;
b = tmp;
}
public void Init()
{
var stack = new Stack<StackItem>();
int p, r, i, j, piv;
uint seed = 2016473283;
p = 0;
r = _size - 1;
_order = new int[_size + 1];
for (piv = 0, i = p; i <= r; ++piv, ++i)
{
_order[i] = piv;
}
stack.Push(new StackItem { p = p, r = r });
while (stack.Count > 0)
{
var top = stack.Pop();
p = top.p;
r = top.r;
while (r > p + 10)
{
seed = seed * 1539415821 + 1;
i = p + (int)(seed % (r - p + 1));
piv = _order[i];
_order[i] = _order[p];
_order[p] = piv;
i = p - 1;
j = r + 1;
do
{
do { ++i; } while (!_leq(_keys[_order[i]], _keys[piv]));
do { --j; } while (!_leq(_keys[piv], _keys[_order[j]]));
Swap(ref _order[i], ref _order[j]);
}
while (i < j);
Swap(ref _order[i], ref _order[j]);
if (i - p < r - j)
{
stack.Push(new StackItem { p = j + 1, r = r });
r = i - 1;
}
else
{
stack.Push(new StackItem { p = p, r = i - 1 });
p = j + 1;
}
}
for (i = p + 1; i <= r; ++i)
{
piv = _order[i];
for (j = i; j > p && !_leq(_keys[piv], _keys[_order[j - 1]]); --j)
{
_order[j] = _order[j - 1];
}
_order[j] = piv;
}
}
#if DEBUG
p = 0;
r = _size - 1;
for (i = p; i < r; ++i)
{
Debug.Assert(_leq(_keys[_order[i + 1]], _keys[_order[i]]), "Wrong sort");
}
#endif
_max = _size;
_initialized = true;
_heap.Init();
}
public PQHandle Insert(TValue value)
{
if (_initialized)
{
return _heap.Insert(value);
}
int curr = _size;
if (++_size >= _max)
{
_max <<= 1;
Array.Resize(ref _keys, _max);
}
_keys[curr] = value;
return new PQHandle { _handle = -(curr + 1) };
}
public TValue ExtractMin()
{
Debug.Assert(_initialized);
if (_size == 0)
{
return _heap.ExtractMin();
}
TValue sortMin = _keys[_order[_size - 1]];
if (!_heap.Empty)
{
TValue heapMin = _heap.Minimum();
if (_leq(heapMin, sortMin))
return _heap.ExtractMin();
}
do
{
--_size;
}
while (_size > 0 && _keys[_order[_size - 1]] == null);
return sortMin;
}
public TValue Minimum()
{
Debug.Assert(_initialized);
if (_size == 0)
{
return _heap.Minimum();
}
TValue sortMin = _keys[_order[_size - 1]];
if (!_heap.Empty)
{
TValue heapMin = _heap.Minimum();
if (_leq(heapMin, sortMin))
return heapMin;
}
return sortMin;
}
public void Remove(PQHandle handle)
{
Debug.Assert(_initialized);
int curr = handle._handle;
if (curr >= 0)
{
_heap.Remove(handle);
return;
}
curr = -(curr + 1);
Debug.Assert(curr < _max && _keys[curr] != null);
_keys[curr] = null;
while (_size > 0 && _keys[_order[_size - 1]] == null)
{
--_size;
}
}
}
}
}

View File

@@ -0,0 +1,752 @@
/*
** SGI FREE SOFTWARE LICENSE B (Version 2.0, Sept. 18, 2008)
** Copyright (C) 2011 Silicon Graphics, Inc.
** All Rights Reserved.
**
** Permission is hereby granted, free of charge, to any person obtaining a copy
** of this software and associated documentation files (the "Software"), to deal
** in the Software without restriction, including without limitation the rights
** to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
** of the Software, and to permit persons to whom the Software is furnished to do so,
** subject to the following conditions:
**
** The above copyright notice including the dates of first publication and either this
** permission notice or a reference to http://oss.sgi.com/projects/FreeB/ shall be
** included in all copies or substantial portions of the Software.
**
** THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
** INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
** PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL SILICON GRAPHICS, INC.
** BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
** TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE
** OR OTHER DEALINGS IN THE SOFTWARE.
**
** Except as contained in this notice, the name of Silicon Graphics, Inc. shall not
** be used in advertising or otherwise to promote the sale, use or other dealings in
** this Software without prior written authorization from Silicon Graphics, Inc.
*/
/*
** Original Author: Eric Veach, July 1994.
** libtess2: Mikko Mononen, http://code.google.com/p/libtess2/.
** LibTessDotNet: Remi Gillig, https://github.com/speps/LibTessDotNet
*/
using System;
using System.Diagnostics;
namespace UnityEngine.Experimental.Rendering.Universal
{
using Real = System.Single;
namespace LibTessDotNet
{
internal enum WindingRule
{
EvenOdd,
NonZero,
Positive,
Negative,
AbsGeqTwo
}
internal enum ElementType
{
Polygons,
ConnectedPolygons,
BoundaryContours
}
internal enum ContourOrientation
{
Original,
Clockwise,
CounterClockwise
}
internal struct ContourVertex
{
public Vec3 Position;
public object Data;
public override string ToString()
{
return string.Format("{0}, {1}", Position, Data);
}
}
internal delegate object CombineCallback(Vec3 position, object[] data, Real[] weights);
internal partial class Tess
{
private Mesh _mesh;
private Vec3 _normal;
private Vec3 _sUnit;
private Vec3 _tUnit;
private Real _bminX, _bminY, _bmaxX, _bmaxY;
private WindingRule _windingRule;
private Dict<ActiveRegion> _dict;
private PriorityQueue<MeshUtils.Vertex> _pq;
private MeshUtils.Vertex _event;
private CombineCallback _combineCallback;
private ContourVertex[] _vertices;
private int _vertexCount;
private int[] _elements;
private int _elementCount;
public Vec3 Normal { get { return _normal; } set { _normal = value; } }
public Real SUnitX = 1;
public Real SUnitY = 0;
#if DOUBLE
public Real SentinelCoord = 4e150;
#else
public Real SentinelCoord = 4e30f;
#endif
/// <summary>
/// If true, will remove empty (zero area) polygons.
/// </summary>
public bool NoEmptyPolygons = false;
/// <summary>
/// If true, will use pooling to reduce GC (compare performance with/without, can vary wildly).
/// </summary>
public bool UsePooling = false;
public ContourVertex[] Vertices { get { return _vertices; } }
public int VertexCount { get { return _vertexCount; } }
public int[] Elements { get { return _elements; } }
public int ElementCount { get { return _elementCount; } }
public Tess()
{
_normal = Vec3.Zero;
_bminX = _bminY = _bmaxX = _bmaxY = 0;
_windingRule = WindingRule.EvenOdd;
_mesh = null;
_vertices = null;
_vertexCount = 0;
_elements = null;
_elementCount = 0;
}
private void ComputeNormal(ref Vec3 norm)
{
var v = _mesh._vHead._next;
var minVal = new Real[3] { v._coords.X, v._coords.Y, v._coords.Z };
var minVert = new MeshUtils.Vertex[3] { v, v, v };
var maxVal = new Real[3] { v._coords.X, v._coords.Y, v._coords.Z };
var maxVert = new MeshUtils.Vertex[3] { v, v, v };
for (; v != _mesh._vHead; v = v._next)
{
if (v._coords.X < minVal[0]) { minVal[0] = v._coords.X; minVert[0] = v; }
if (v._coords.Y < minVal[1]) { minVal[1] = v._coords.Y; minVert[1] = v; }
if (v._coords.Z < minVal[2]) { minVal[2] = v._coords.Z; minVert[2] = v; }
if (v._coords.X > maxVal[0]) { maxVal[0] = v._coords.X; maxVert[0] = v; }
if (v._coords.Y > maxVal[1]) { maxVal[1] = v._coords.Y; maxVert[1] = v; }
if (v._coords.Z > maxVal[2]) { maxVal[2] = v._coords.Z; maxVert[2] = v; }
}
// Find two vertices separated by at least 1/sqrt(3) of the maximum
// distance between any two vertices
int i = 0;
if (maxVal[1] - minVal[1] > maxVal[0] - minVal[0]) { i = 1; }
if (maxVal[2] - minVal[2] > maxVal[i] - minVal[i]) { i = 2; }
if (minVal[i] >= maxVal[i])
{
// All vertices are the same -- normal doesn't matter
norm = new Vec3 { X = 0, Y = 0, Z = 1 };
return;
}
// Look for a third vertex which forms the triangle with maximum area
// (Length of normal == twice the triangle area)
Real maxLen2 = 0, tLen2;
var v1 = minVert[i];
var v2 = maxVert[i];
Vec3 d1, d2, tNorm;
Vec3.Sub(ref v1._coords, ref v2._coords, out d1);
for (v = _mesh._vHead._next; v != _mesh._vHead; v = v._next)
{
Vec3.Sub(ref v._coords, ref v2._coords, out d2);
tNorm.X = d1.Y * d2.Z - d1.Z * d2.Y;
tNorm.Y = d1.Z * d2.X - d1.X * d2.Z;
tNorm.Z = d1.X * d2.Y - d1.Y * d2.X;
tLen2 = tNorm.X * tNorm.X + tNorm.Y * tNorm.Y + tNorm.Z * tNorm.Z;
if (tLen2 > maxLen2)
{
maxLen2 = tLen2;
norm = tNorm;
}
}
if (maxLen2 <= 0.0f)
{
// All points lie on a single line -- any decent normal will do
norm = Vec3.Zero;
i = Vec3.LongAxis(ref d1);
norm[i] = 1;
}
}
private void CheckOrientation()
{
// When we compute the normal automatically, we choose the orientation
// so that the the sum of the signed areas of all contours is non-negative.
Real area = 0.0f;
for (var f = _mesh._fHead._next; f != _mesh._fHead; f = f._next)
{
if (f._anEdge._winding <= 0)
{
continue;
}
area += MeshUtils.FaceArea(f);
}
if (area < 0.0f)
{
// Reverse the orientation by flipping all the t-coordinates
for (var v = _mesh._vHead._next; v != _mesh._vHead; v = v._next)
{
v._t = -v._t;
}
Vec3.Neg(ref _tUnit);
}
}
private void ProjectPolygon()
{
var norm = _normal;
bool computedNormal = false;
if (norm.X == 0.0f && norm.Y == 0.0f && norm.Z == 0.0f)
{
ComputeNormal(ref norm);
_normal = norm;
computedNormal = true;
}
int i = Vec3.LongAxis(ref norm);
_sUnit[i] = 0;
_sUnit[(i + 1) % 3] = SUnitX;
_sUnit[(i + 2) % 3] = SUnitY;
_tUnit[i] = 0;
_tUnit[(i + 1) % 3] = norm[i] > 0.0f ? -SUnitY : SUnitY;
_tUnit[(i + 2) % 3] = norm[i] > 0.0f ? SUnitX : -SUnitX;
// Project the vertices onto the sweep plane
for (var v = _mesh._vHead._next; v != _mesh._vHead; v = v._next)
{
Vec3.Dot(ref v._coords, ref _sUnit, out v._s);
Vec3.Dot(ref v._coords, ref _tUnit, out v._t);
}
if (computedNormal)
{
CheckOrientation();
}
// Compute ST bounds.
bool first = true;
for (var v = _mesh._vHead._next; v != _mesh._vHead; v = v._next)
{
if (first)
{
_bminX = _bmaxX = v._s;
_bminY = _bmaxY = v._t;
first = false;
}
else
{
if (v._s < _bminX) _bminX = v._s;
if (v._s > _bmaxX) _bmaxX = v._s;
if (v._t < _bminY) _bminY = v._t;
if (v._t > _bmaxY) _bmaxY = v._t;
}
}
}
/// <summary>
/// TessellateMonoRegion( face ) tessellates a monotone region
/// (what else would it do??) The region must consist of a single
/// loop of half-edges (see mesh.h) oriented CCW. "Monotone" in this
/// case means that any vertical line intersects the interior of the
/// region in a single interval.
///
/// Tessellation consists of adding interior edges (actually pairs of
/// half-edges), to split the region into non-overlapping triangles.
///
/// The basic idea is explained in Preparata and Shamos (which I don't
/// have handy right now), although their implementation is more
/// complicated than this one. The are two edge chains, an upper chain
/// and a lower chain. We process all vertices from both chains in order,
/// from right to left.
///
/// The algorithm ensures that the following invariant holds after each
/// vertex is processed: the untessellated region consists of two
/// chains, where one chain (say the upper) is a single edge, and
/// the other chain is concave. The left vertex of the single edge
/// is always to the left of all vertices in the concave chain.
///
/// Each step consists of adding the rightmost unprocessed vertex to one
/// of the two chains, and forming a fan of triangles from the rightmost
/// of two chain endpoints. Determining whether we can add each triangle
/// to the fan is a simple orientation test. By making the fan as large
/// as possible, we restore the invariant (check it yourself).
/// </summary>
private void TessellateMonoRegion(MeshUtils.Face face)
{
// All edges are oriented CCW around the boundary of the region.
// First, find the half-edge whose origin vertex is rightmost.
// Since the sweep goes from left to right, face->anEdge should
// be close to the edge we want.
var up = face._anEdge;
Debug.Assert(up._Lnext != up && up._Lnext._Lnext != up);
while (Geom.VertLeq(up._Dst, up._Org)) up = up._Lprev;
while (Geom.VertLeq(up._Org, up._Dst)) up = up._Lnext;
var lo = up._Lprev;
while (up._Lnext != lo)
{
if (Geom.VertLeq(up._Dst, lo._Org))
{
// up.Dst is on the left. It is safe to form triangles from lo.Org.
// The EdgeGoesLeft test guarantees progress even when some triangles
// are CW, given that the upper and lower chains are truly monotone.
while (lo._Lnext != up && (Geom.EdgeGoesLeft(lo._Lnext)
|| Geom.EdgeSign(lo._Org, lo._Dst, lo._Lnext._Dst) <= 0.0f))
{
lo = _mesh.Connect(lo._Lnext, lo)._Sym;
}
lo = lo._Lprev;
}
else
{
// lo.Org is on the left. We can make CCW triangles from up.Dst.
while (lo._Lnext != up && (Geom.EdgeGoesRight(up._Lprev)
|| Geom.EdgeSign(up._Dst, up._Org, up._Lprev._Org) >= 0.0f))
{
up = _mesh.Connect(up, up._Lprev)._Sym;
}
up = up._Lnext;
}
}
// Now lo.Org == up.Dst == the leftmost vertex. The remaining region
// can be tessellated in a fan from this leftmost vertex.
Debug.Assert(lo._Lnext != up);
while (lo._Lnext._Lnext != up)
{
lo = _mesh.Connect(lo._Lnext, lo)._Sym;
}
}
/// <summary>
/// TessellateInterior( mesh ) tessellates each region of
/// the mesh which is marked "inside" the polygon. Each such region
/// must be monotone.
/// </summary>
private void TessellateInterior()
{
MeshUtils.Face f, next;
for (f = _mesh._fHead._next; f != _mesh._fHead; f = next)
{
// Make sure we don't try to tessellate the new triangles.
next = f._next;
if (f._inside)
{
TessellateMonoRegion(f);
}
}
}
/// <summary>
/// DiscardExterior zaps (ie. sets to null) all faces
/// which are not marked "inside" the polygon. Since further mesh operations
/// on NULL faces are not allowed, the main purpose is to clean up the
/// mesh so that exterior loops are not represented in the data structure.
/// </summary>
private void DiscardExterior()
{
MeshUtils.Face f, next;
for (f = _mesh._fHead._next; f != _mesh._fHead; f = next)
{
// Since f will be destroyed, save its next pointer.
next = f._next;
if (!f._inside)
{
_mesh.ZapFace(f);
}
}
}
/// <summary>
/// SetWindingNumber( value, keepOnlyBoundary ) resets the
/// winding numbers on all edges so that regions marked "inside" the
/// polygon have a winding number of "value", and regions outside
/// have a winding number of 0.
///
/// If keepOnlyBoundary is TRUE, it also deletes all edges which do not
/// separate an interior region from an exterior one.
/// </summary>
private void SetWindingNumber(int value, bool keepOnlyBoundary)
{
MeshUtils.Edge e, eNext;
for (e = _mesh._eHead._next; e != _mesh._eHead; e = eNext)
{
eNext = e._next;
if (e._Rface._inside != e._Lface._inside)
{
/* This is a boundary edge (one side is interior, one is exterior). */
e._winding = (e._Lface._inside) ? value : -value;
}
else
{
/* Both regions are interior, or both are exterior. */
if (!keepOnlyBoundary)
{
e._winding = 0;
}
else
{
_mesh.Delete(e);
}
}
}
}
private int GetNeighbourFace(MeshUtils.Edge edge)
{
if (edge._Rface == null)
return MeshUtils.Undef;
if (!edge._Rface._inside)
return MeshUtils.Undef;
return edge._Rface._n;
}
private void OutputPolymesh(ElementType elementType, int polySize)
{
MeshUtils.Vertex v;
MeshUtils.Face f;
MeshUtils.Edge edge;
int maxFaceCount = 0;
int maxVertexCount = 0;
int faceVerts, i;
if (polySize < 3)
{
polySize = 3;
}
// Assume that the input data is triangles now.
// Try to merge as many polygons as possible
if (polySize > 3)
{
_mesh.MergeConvexFaces(polySize);
}
// Mark unused
for (v = _mesh._vHead._next; v != _mesh._vHead; v = v._next)
v._n = MeshUtils.Undef;
// Create unique IDs for all vertices and faces.
for (f = _mesh._fHead._next; f != _mesh._fHead; f = f._next)
{
f._n = MeshUtils.Undef;
if (!f._inside) continue;
if (NoEmptyPolygons)
{
var area = MeshUtils.FaceArea(f);
if (Math.Abs(area) < Real.Epsilon)
{
continue;
}
}
edge = f._anEdge;
faceVerts = 0;
do
{
v = edge._Org;
if (v._n == MeshUtils.Undef)
{
v._n = maxVertexCount;
maxVertexCount++;
}
faceVerts++;
edge = edge._Lnext;
}
while (edge != f._anEdge);
Debug.Assert(faceVerts <= polySize);
f._n = maxFaceCount;
++maxFaceCount;
}
_elementCount = maxFaceCount;
if (elementType == ElementType.ConnectedPolygons)
maxFaceCount *= 2;
_elements = new int[maxFaceCount * polySize];
_vertexCount = maxVertexCount;
_vertices = new ContourVertex[_vertexCount];
// Output vertices.
for (v = _mesh._vHead._next; v != _mesh._vHead; v = v._next)
{
if (v._n != MeshUtils.Undef)
{
// Store coordinate
_vertices[v._n].Position = v._coords;
_vertices[v._n].Data = v._data;
}
}
// Output indices.
int elementIndex = 0;
for (f = _mesh._fHead._next; f != _mesh._fHead; f = f._next)
{
if (!f._inside) continue;
if (NoEmptyPolygons)
{
var area = MeshUtils.FaceArea(f);
if (Math.Abs(area) < Real.Epsilon)
{
continue;
}
}
// Store polygon
edge = f._anEdge;
faceVerts = 0;
do
{
v = edge._Org;
_elements[elementIndex++] = v._n;
faceVerts++;
edge = edge._Lnext;
}
while (edge != f._anEdge);
// Fill unused.
for (i = faceVerts; i < polySize; ++i)
{
_elements[elementIndex++] = MeshUtils.Undef;
}
// Store polygon connectivity
if (elementType == ElementType.ConnectedPolygons)
{
edge = f._anEdge;
do
{
_elements[elementIndex++] = GetNeighbourFace(edge);
edge = edge._Lnext;
}
while (edge != f._anEdge);
// Fill unused.
for (i = faceVerts; i < polySize; ++i)
{
_elements[elementIndex++] = MeshUtils.Undef;
}
}
}
}
private void OutputContours()
{
MeshUtils.Face f;
MeshUtils.Edge edge, start;
int startVert = 0;
int vertCount = 0;
_vertexCount = 0;
_elementCount = 0;
for (f = _mesh._fHead._next; f != _mesh._fHead; f = f._next)
{
if (!f._inside) continue;
start = edge = f._anEdge;
do
{
++_vertexCount;
edge = edge._Lnext;
}
while (edge != start);
++_elementCount;
}
_elements = new int[_elementCount * 2];
_vertices = new ContourVertex[_vertexCount];
int vertIndex = 0;
int elementIndex = 0;
startVert = 0;
for (f = _mesh._fHead._next; f != _mesh._fHead; f = f._next)
{
if (!f._inside) continue;
vertCount = 0;
start = edge = f._anEdge;
do
{
_vertices[vertIndex].Position = edge._Org._coords;
_vertices[vertIndex].Data = edge._Org._data;
++vertIndex;
++vertCount;
edge = edge._Lnext;
}
while (edge != start);
_elements[elementIndex++] = startVert;
_elements[elementIndex++] = vertCount;
startVert += vertCount;
}
}
private Real SignedArea(ContourVertex[] vertices)
{
Real area = 0.0f;
for (int i = 0; i < vertices.Length; i++)
{
var v0 = vertices[i];
var v1 = vertices[(i + 1) % vertices.Length];
area += v0.Position.X * v1.Position.Y;
area -= v0.Position.Y * v1.Position.X;
}
return 0.5f * area;
}
public void AddContour(ContourVertex[] vertices)
{
AddContour(vertices, ContourOrientation.Original);
}
public void AddContour(ContourVertex[] vertices, ContourOrientation forceOrientation)
{
if (_mesh == null)
{
_mesh = new Mesh();
}
bool reverse = false;
if (forceOrientation != ContourOrientation.Original)
{
var area = SignedArea(vertices);
reverse = (forceOrientation == ContourOrientation.Clockwise && area < 0.0f) || (forceOrientation == ContourOrientation.CounterClockwise && area > 0.0f);
}
MeshUtils.Edge e = null;
for (int i = 0; i < vertices.Length; ++i)
{
if (e == null)
{
e = _mesh.MakeEdge();
_mesh.Splice(e, e._Sym);
}
else
{
// Create a new vertex and edge which immediately follow e
// in the ordering around the left face.
_mesh.SplitEdge(e);
e = e._Lnext;
}
int index = reverse ? vertices.Length - 1 - i : i;
// The new vertex is now e._Org.
e._Org._coords = vertices[index].Position;
e._Org._data = vertices[index].Data;
// The winding of an edge says how the winding number changes as we
// cross from the edge's right face to its left face. We add the
// vertices in such an order that a CCW contour will add +1 to
// the winding number of the region inside the contour.
e._winding = 1;
e._Sym._winding = -1;
}
}
public void Tessellate(WindingRule windingRule, ElementType elementType, int polySize)
{
Tessellate(windingRule, elementType, polySize, null);
}
public void Tessellate(WindingRule windingRule, ElementType elementType, int polySize, CombineCallback combineCallback)
{
_normal = Vec3.Zero;
_vertices = null;
_elements = null;
_windingRule = windingRule;
_combineCallback = combineCallback;
if (_mesh == null)
{
return;
}
// Determine the polygon normal and project vertices onto the plane
// of the polygon.
ProjectPolygon();
// ComputeInterior computes the planar arrangement specified
// by the given contours, and further subdivides this arrangement
// into regions. Each region is marked "inside" if it belongs
// to the polygon, according to the rule given by windingRule.
// Each interior region is guaranteed be monotone.
ComputeInterior();
// If the user wants only the boundary contours, we throw away all edges
// except those which separate the interior from the exterior.
// Otherwise we tessellate all the regions marked "inside".
if (elementType == ElementType.BoundaryContours)
{
SetWindingNumber(1, true);
}
else
{
TessellateInterior();
}
_mesh.Check();
if (elementType == ElementType.BoundaryContours)
{
OutputContours();
}
else
{
OutputPolymesh(elementType, polySize);
}
if (UsePooling)
{
_mesh.Free();
}
_mesh = null;
}
}
}
}

View File

@@ -0,0 +1,263 @@
using UnityEngine.Experimental.GlobalIllumination;
using Unity.Collections;
namespace UnityEngine.Rendering.Universal.Internal
{
/// <summary>
/// Computes and submits lighting data to the GPU.
/// </summary>
public class ForwardLights
{
static class LightConstantBuffer
{
public static int _MainLightPosition; // DeferredLights.LightConstantBuffer also refers to the same ShaderPropertyID - TODO: move this definition to a common location shared by other UniversalRP classes
public static int _MainLightColor; // DeferredLights.LightConstantBuffer also refers to the same ShaderPropertyID - TODO: move this definition to a common location shared by other UniversalRP classes
public static int _MainLightOcclusionProbesChannel; // Deferred?
public static int _AdditionalLightsCount;
public static int _AdditionalLightsPosition;
public static int _AdditionalLightsColor;
public static int _AdditionalLightsAttenuation;
public static int _AdditionalLightsSpotDir;
public static int _AdditionalLightOcclusionProbeChannel;
}
int m_AdditionalLightsBufferId;
int m_AdditionalLightsIndicesId;
const string k_SetupLightConstants = "Setup Light Constants";
private static readonly ProfilingSampler m_ProfilingSampler = new ProfilingSampler(k_SetupLightConstants);
MixedLightingSetup m_MixedLightingSetup;
Vector4[] m_AdditionalLightPositions;
Vector4[] m_AdditionalLightColors;
Vector4[] m_AdditionalLightAttenuations;
Vector4[] m_AdditionalLightSpotDirections;
Vector4[] m_AdditionalLightOcclusionProbeChannels;
bool m_UseStructuredBuffer;
public ForwardLights()
{
m_UseStructuredBuffer = RenderingUtils.useStructuredBuffer;
LightConstantBuffer._MainLightPosition = Shader.PropertyToID("_MainLightPosition");
LightConstantBuffer._MainLightColor = Shader.PropertyToID("_MainLightColor");
LightConstantBuffer._MainLightOcclusionProbesChannel = Shader.PropertyToID("_MainLightOcclusionProbes");
LightConstantBuffer._AdditionalLightsCount = Shader.PropertyToID("_AdditionalLightsCount");
if (m_UseStructuredBuffer)
{
m_AdditionalLightsBufferId = Shader.PropertyToID("_AdditionalLightsBuffer");
m_AdditionalLightsIndicesId = Shader.PropertyToID("_AdditionalLightsIndices");
}
else
{
LightConstantBuffer._AdditionalLightsPosition = Shader.PropertyToID("_AdditionalLightsPosition");
LightConstantBuffer._AdditionalLightsColor = Shader.PropertyToID("_AdditionalLightsColor");
LightConstantBuffer._AdditionalLightsAttenuation = Shader.PropertyToID("_AdditionalLightsAttenuation");
LightConstantBuffer._AdditionalLightsSpotDir = Shader.PropertyToID("_AdditionalLightsSpotDir");
LightConstantBuffer._AdditionalLightOcclusionProbeChannel = Shader.PropertyToID("_AdditionalLightsOcclusionProbes");
int maxLights = UniversalRenderPipeline.maxVisibleAdditionalLights;
m_AdditionalLightPositions = new Vector4[maxLights];
m_AdditionalLightColors = new Vector4[maxLights];
m_AdditionalLightAttenuations = new Vector4[maxLights];
m_AdditionalLightSpotDirections = new Vector4[maxLights];
m_AdditionalLightOcclusionProbeChannels = new Vector4[maxLights];
}
}
public void Setup(ScriptableRenderContext context, ref RenderingData renderingData)
{
int additionalLightsCount = renderingData.lightData.additionalLightsCount;
bool additionalLightsPerVertex = renderingData.lightData.shadeAdditionalLightsPerVertex;
CommandBuffer cmd = CommandBufferPool.Get();
using (new ProfilingScope(cmd, m_ProfilingSampler))
{
SetupShaderLightConstants(cmd, ref renderingData);
CoreUtils.SetKeyword(cmd, ShaderKeywordStrings.AdditionalLightsVertex,
additionalLightsCount > 0 && additionalLightsPerVertex);
CoreUtils.SetKeyword(cmd, ShaderKeywordStrings.AdditionalLightsPixel,
additionalLightsCount > 0 && !additionalLightsPerVertex);
bool isShadowMask = renderingData.lightData.supportsMixedLighting && m_MixedLightingSetup == MixedLightingSetup.ShadowMask;
bool isShadowMaskAlways = isShadowMask && QualitySettings.shadowmaskMode == ShadowmaskMode.Shadowmask;
bool isSubtractive = renderingData.lightData.supportsMixedLighting && m_MixedLightingSetup == MixedLightingSetup.Subtractive;
CoreUtils.SetKeyword(cmd, ShaderKeywordStrings.LightmapShadowMixing, isSubtractive || isShadowMaskAlways);
CoreUtils.SetKeyword(cmd, ShaderKeywordStrings.ShadowsShadowMask, isShadowMask);
CoreUtils.SetKeyword(cmd, ShaderKeywordStrings.MixedLightingSubtractive, isSubtractive); // Backward compatibility
}
context.ExecuteCommandBuffer(cmd);
CommandBufferPool.Release(cmd);
}
void InitializeLightConstants(NativeArray<VisibleLight> lights, int lightIndex, out Vector4 lightPos, out Vector4 lightColor, out Vector4 lightAttenuation, out Vector4 lightSpotDir, out Vector4 lightOcclusionProbeChannel)
{
UniversalRenderPipeline.InitializeLightConstants_Common(lights, lightIndex, out lightPos, out lightColor, out lightAttenuation, out lightSpotDir, out lightOcclusionProbeChannel);
// When no lights are visible, main light will be set to -1.
// In this case we initialize it to default values and return
if (lightIndex < 0)
return;
VisibleLight lightData = lights[lightIndex];
Light light = lightData.light;
if (light == null)
return;
if (light.bakingOutput.lightmapBakeType == LightmapBakeType.Mixed &&
lightData.light.shadows != LightShadows.None &&
m_MixedLightingSetup == MixedLightingSetup.None)
{
switch (light.bakingOutput.mixedLightingMode)
{
case MixedLightingMode.Subtractive:
m_MixedLightingSetup = MixedLightingSetup.Subtractive;
break;
case MixedLightingMode.Shadowmask:
m_MixedLightingSetup = MixedLightingSetup.ShadowMask;
break;
}
}
}
void SetupShaderLightConstants(CommandBuffer cmd, ref RenderingData renderingData)
{
m_MixedLightingSetup = MixedLightingSetup.None;
// Main light has an optimized shader path for main light. This will benefit games that only care about a single light.
// Universal pipeline also supports only a single shadow light, if available it will be the main light.
SetupMainLightConstants(cmd, ref renderingData.lightData);
SetupAdditionalLightConstants(cmd, ref renderingData);
}
void SetupMainLightConstants(CommandBuffer cmd, ref LightData lightData)
{
Vector4 lightPos, lightColor, lightAttenuation, lightSpotDir, lightOcclusionChannel;
InitializeLightConstants(lightData.visibleLights, lightData.mainLightIndex, out lightPos, out lightColor, out lightAttenuation, out lightSpotDir, out lightOcclusionChannel);
cmd.SetGlobalVector(LightConstantBuffer._MainLightPosition, lightPos);
cmd.SetGlobalVector(LightConstantBuffer._MainLightColor, lightColor);
cmd.SetGlobalVector(LightConstantBuffer._MainLightOcclusionProbesChannel, lightOcclusionChannel);
}
void SetupAdditionalLightConstants(CommandBuffer cmd, ref RenderingData renderingData)
{
ref LightData lightData = ref renderingData.lightData;
var cullResults = renderingData.cullResults;
var lights = lightData.visibleLights;
int maxAdditionalLightsCount = UniversalRenderPipeline.maxVisibleAdditionalLights;
int additionalLightsCount = SetupPerObjectLightIndices(cullResults, ref lightData);
if (additionalLightsCount > 0)
{
if (m_UseStructuredBuffer)
{
NativeArray<ShaderInput.LightData> additionalLightsData = new NativeArray<ShaderInput.LightData>(additionalLightsCount, Allocator.Temp);
for (int i = 0, lightIter = 0; i < lights.Length && lightIter < maxAdditionalLightsCount; ++i)
{
VisibleLight light = lights[i];
if (lightData.mainLightIndex != i)
{
ShaderInput.LightData data;
InitializeLightConstants(lights, i,
out data.position, out data.color, out data.attenuation,
out data.spotDirection, out data.occlusionProbeChannels);
additionalLightsData[lightIter] = data;
lightIter++;
}
}
var lightDataBuffer = ShaderData.instance.GetLightDataBuffer(additionalLightsCount);
lightDataBuffer.SetData(additionalLightsData);
int lightIndices = cullResults.lightAndReflectionProbeIndexCount;
var lightIndicesBuffer = ShaderData.instance.GetLightIndicesBuffer(lightIndices);
cmd.SetGlobalBuffer(m_AdditionalLightsBufferId, lightDataBuffer);
cmd.SetGlobalBuffer(m_AdditionalLightsIndicesId, lightIndicesBuffer);
additionalLightsData.Dispose();
}
else
{
for (int i = 0, lightIter = 0; i < lights.Length && lightIter < maxAdditionalLightsCount; ++i)
{
VisibleLight light = lights[i];
if (lightData.mainLightIndex != i)
{
InitializeLightConstants(lights, i, out m_AdditionalLightPositions[lightIter],
out m_AdditionalLightColors[lightIter],
out m_AdditionalLightAttenuations[lightIter],
out m_AdditionalLightSpotDirections[lightIter],
out m_AdditionalLightOcclusionProbeChannels[lightIter]);
lightIter++;
}
}
cmd.SetGlobalVectorArray(LightConstantBuffer._AdditionalLightsPosition, m_AdditionalLightPositions);
cmd.SetGlobalVectorArray(LightConstantBuffer._AdditionalLightsColor, m_AdditionalLightColors);
cmd.SetGlobalVectorArray(LightConstantBuffer._AdditionalLightsAttenuation, m_AdditionalLightAttenuations);
cmd.SetGlobalVectorArray(LightConstantBuffer._AdditionalLightsSpotDir, m_AdditionalLightSpotDirections);
cmd.SetGlobalVectorArray(LightConstantBuffer._AdditionalLightOcclusionProbeChannel, m_AdditionalLightOcclusionProbeChannels);
}
cmd.SetGlobalVector(LightConstantBuffer._AdditionalLightsCount, new Vector4(lightData.maxPerObjectAdditionalLightsCount,
0.0f, 0.0f, 0.0f));
}
else
{
cmd.SetGlobalVector(LightConstantBuffer._AdditionalLightsCount, Vector4.zero);
}
}
int SetupPerObjectLightIndices(CullingResults cullResults, ref LightData lightData)
{
if (lightData.additionalLightsCount == 0)
return lightData.additionalLightsCount;
var visibleLights = lightData.visibleLights;
var perObjectLightIndexMap = cullResults.GetLightIndexMap(Allocator.Temp);
int globalDirectionalLightsCount = 0;
int additionalLightsCount = 0;
// Disable all directional lights from the perobject light indices
// Pipeline handles main light globally and there's no support for additional directional lights atm.
for (int i = 0; i < visibleLights.Length; ++i)
{
if (additionalLightsCount >= UniversalRenderPipeline.maxVisibleAdditionalLights)
break;
VisibleLight light = visibleLights[i];
if (i == lightData.mainLightIndex)
{
perObjectLightIndexMap[i] = -1;
++globalDirectionalLightsCount;
}
else
{
perObjectLightIndexMap[i] -= globalDirectionalLightsCount;
++additionalLightsCount;
}
}
// Disable all remaining lights we cannot fit into the global light buffer.
for (int i = globalDirectionalLightsCount + additionalLightsCount; i < perObjectLightIndexMap.Length; ++i)
perObjectLightIndexMap[i] = -1;
cullResults.SetLightIndexMap(perObjectLightIndexMap);
if (m_UseStructuredBuffer && additionalLightsCount > 0)
{
int lightAndReflectionProbeIndices = cullResults.lightAndReflectionProbeIndexCount;
Assertions.Assert.IsTrue(lightAndReflectionProbeIndices > 0, "Pipelines configures additional lights but per-object light and probe indices count is zero.");
cullResults.FillLightAndReflectionProbeIndices(ShaderData.instance.GetLightIndicesBuffer(lightAndReflectionProbeIndices));
}
perObjectLightIndexMap.Dispose();
return additionalLightsCount;
}
}
}

View File

@@ -0,0 +1,796 @@
using UnityEngine.Rendering.Universal.Internal;
using System.Reflection;
namespace UnityEngine.Rendering.Universal
{
/// <summary>
/// Rendering modes for Universal renderer.
/// </summary>
public enum RenderingMode
{
/// <summary>Render all objects and lighting in one pass, with a hard limit on the number of lights that can be applied on an object.</summary>
Forward,
/// <summary>Render all objects first in a g-buffer pass, then apply all lighting in a separate pass using deferred shading.</summary>
Deferred
};
/// <summary>
/// Default renderer for Universal RP.
/// This renderer is supported on all Universal RP supported platforms.
/// It uses a classic forward rendering strategy with per-object light culling.
/// </summary>
public sealed class ForwardRenderer : ScriptableRenderer
{
const int k_DepthStencilBufferBits = 32;
private static class Profiling
{
private const string k_Name = nameof(ForwardRenderer);
public static readonly ProfilingSampler createCameraRenderTarget = new ProfilingSampler($"{k_Name}.{nameof(CreateCameraRenderTarget)}");
}
// Rendering mode setup from UI.
internal RenderingMode renderingMode { get { return m_RenderingMode; } }
// Actual rendering mode, which may be different (ex: wireframe rendering, harware not capable of deferred rendering).
internal RenderingMode actualRenderingMode { get { return GL.wireframe || m_DeferredLights == null || !m_DeferredLights.IsRuntimeSupportedThisFrame() ? RenderingMode.Forward : this.renderingMode; } }
internal bool accurateGbufferNormals { get { return m_DeferredLights != null ? m_DeferredLights.AccurateGbufferNormals : false; } }
DepthOnlyPass m_DepthPrepass;
DepthNormalOnlyPass m_DepthNormalPrepass;
MainLightShadowCasterPass m_MainLightShadowCasterPass;
AdditionalLightsShadowCasterPass m_AdditionalLightsShadowCasterPass;
GBufferPass m_GBufferPass;
CopyDepthPass m_GBufferCopyDepthPass;
TileDepthRangePass m_TileDepthRangePass;
TileDepthRangePass m_TileDepthRangeExtraPass; // TODO use subpass API to hide this pass
DeferredPass m_DeferredPass;
DrawObjectsPass m_RenderOpaqueForwardOnlyPass;
DrawObjectsPass m_RenderOpaqueForwardPass;
DrawSkyboxPass m_DrawSkyboxPass;
CopyDepthPass m_CopyDepthPass;
CopyColorPass m_CopyColorPass;
TransparentSettingsPass m_TransparentSettingsPass;
DrawObjectsPass m_RenderTransparentForwardPass;
InvokeOnRenderObjectCallbackPass m_OnRenderObjectCallbackPass;
FinalBlitPass m_FinalBlitPass;
CapturePass m_CapturePass;
#if ENABLE_VR && ENABLE_XR_MODULE
XROcclusionMeshPass m_XROcclusionMeshPass;
CopyDepthPass m_XRCopyDepthPass;
#endif
#if UNITY_EDITOR
SceneViewDepthCopyPass m_SceneViewDepthCopyPass;
#endif
RenderTargetHandle m_ActiveCameraColorAttachment;
RenderTargetHandle m_ActiveCameraDepthAttachment;
RenderTargetHandle m_CameraColorAttachment;
RenderTargetHandle m_CameraDepthAttachment;
RenderTargetHandle m_DepthTexture;
RenderTargetHandle m_NormalsTexture;
RenderTargetHandle[] m_GBufferHandles;
RenderTargetHandle m_OpaqueColor;
// For tiled-deferred shading.
RenderTargetHandle m_DepthInfoTexture;
RenderTargetHandle m_TileDepthInfoTexture;
ForwardLights m_ForwardLights;
DeferredLights m_DeferredLights;
RenderingMode m_RenderingMode;
StencilState m_DefaultStencilState;
// Materials used in URP Scriptable Render Passes
Material m_BlitMaterial = null;
Material m_CopyDepthMaterial = null;
Material m_SamplingMaterial = null;
Material m_TileDepthInfoMaterial = null;
Material m_TileDeferredMaterial = null;
Material m_StencilDeferredMaterial = null;
PostProcessPasses m_PostProcessPasses;
internal ColorGradingLutPass colorGradingLutPass { get => m_PostProcessPasses.colorGradingLutPass; }
internal PostProcessPass postProcessPass { get => m_PostProcessPasses.postProcessPass; }
internal PostProcessPass finalPostProcessPass { get => m_PostProcessPasses.finalPostProcessPass; }
internal RenderTargetHandle afterPostProcessColor { get => m_PostProcessPasses.afterPostProcessColor; }
internal RenderTargetHandle colorGradingLut { get => m_PostProcessPasses.colorGradingLut; }
public ForwardRenderer(ForwardRendererData data) : base(data)
{
#if ENABLE_VR && ENABLE_XR_MODULE
UniversalRenderPipeline.m_XRSystem.InitializeXRSystemData(data.xrSystemData);
#endif
m_BlitMaterial = CoreUtils.CreateEngineMaterial(data.shaders.blitPS);
m_CopyDepthMaterial = CoreUtils.CreateEngineMaterial(data.shaders.copyDepthPS);
m_SamplingMaterial = CoreUtils.CreateEngineMaterial(data.shaders.samplingPS);
//m_TileDepthInfoMaterial = CoreUtils.CreateEngineMaterial(data.shaders.tileDepthInfoPS);
//m_TileDeferredMaterial = CoreUtils.CreateEngineMaterial(data.shaders.tileDeferredPS);
m_StencilDeferredMaterial = CoreUtils.CreateEngineMaterial(data.shaders.stencilDeferredPS);
StencilStateData stencilData = data.defaultStencilState;
m_DefaultStencilState = StencilState.defaultValue;
m_DefaultStencilState.enabled = stencilData.overrideStencilState;
m_DefaultStencilState.SetCompareFunction(stencilData.stencilCompareFunction);
m_DefaultStencilState.SetPassOperation(stencilData.passOperation);
m_DefaultStencilState.SetFailOperation(stencilData.failOperation);
m_DefaultStencilState.SetZFailOperation(stencilData.zFailOperation);
m_ForwardLights = new ForwardLights();
//m_DeferredLights.LightCulling = data.lightCulling;
this.m_RenderingMode = data.renderingMode;
// Note: Since all custom render passes inject first and we have stable sort,
// we inject the builtin passes in the before events.
m_MainLightShadowCasterPass = new MainLightShadowCasterPass(RenderPassEvent.BeforeRenderingShadows);
m_AdditionalLightsShadowCasterPass = new AdditionalLightsShadowCasterPass(RenderPassEvent.BeforeRenderingShadows);
#if ENABLE_VR && ENABLE_XR_MODULE
m_XROcclusionMeshPass = new XROcclusionMeshPass(RenderPassEvent.BeforeRenderingOpaques);
// Schedule XR copydepth right after m_FinalBlitPass(AfterRendering + 1)
m_XRCopyDepthPass = new CopyDepthPass(RenderPassEvent.AfterRendering + 2, m_CopyDepthMaterial);
#endif
m_DepthPrepass = new DepthOnlyPass(RenderPassEvent.BeforeRenderingPrePasses, RenderQueueRange.opaque, data.opaqueLayerMask);
m_DepthNormalPrepass = new DepthNormalOnlyPass(RenderPassEvent.BeforeRenderingPrePasses, RenderQueueRange.opaque, data.opaqueLayerMask);
if (this.renderingMode == RenderingMode.Deferred)
{
m_DeferredLights = new DeferredLights(m_TileDepthInfoMaterial, m_TileDeferredMaterial, m_StencilDeferredMaterial);
m_DeferredLights.AccurateGbufferNormals = data.accurateGbufferNormals;
//m_DeferredLights.TiledDeferredShading = data.tiledDeferredShading;
m_DeferredLights.TiledDeferredShading = false;
m_GBufferPass = new GBufferPass(RenderPassEvent.BeforeRenderingOpaques, RenderQueueRange.opaque, data.opaqueLayerMask, m_DefaultStencilState, stencilData.stencilReference, m_DeferredLights);
// Forward-only pass only runs if deferred renderer is enabled.
// It allows specific materials to be rendered in a forward-like pass.
// We render both gbuffer pass and forward-only pass before the deferred lighting pass so we can minimize copies of depth buffer and
// benefits from some depth rejection.
// - If a material can be rendered either forward or deferred, then it should declare a UniversalForward and a UniversalGBuffer pass.
// - If a material cannot be lit in deferred (unlit, bakedLit, special material such as hair, skin shader), then it should declare UniversalForwardOnly pass
// - Legacy materials have unamed pass, which is implicitely renamed as SRPDefaultUnlit. In that case, they are considered forward-only too.
// TO declare a material with unnamed pass and UniversalForward/UniversalForwardOnly pass is an ERROR, as the material will be rendered twice.
StencilState forwardOnlyStencilState = DeferredLights.OverwriteStencil(m_DefaultStencilState, (int)StencilUsage.MaterialMask);
ShaderTagId[] forwardOnlyShaderTagIds = new ShaderTagId[]
{
new ShaderTagId("UniversalForwardOnly"),
new ShaderTagId("SRPDefaultUnlit"), // Legacy shaders (do not have a gbuffer pass) are considered forward-only for backward compatibility
new ShaderTagId("LightweightForward") // Legacy shaders (do not have a gbuffer pass) are considered forward-only for backward compatibility
};
int forwardOnlyStencilRef = stencilData.stencilReference | (int)StencilUsage.MaterialUnlit;
m_RenderOpaqueForwardOnlyPass = new DrawObjectsPass("Render Opaques Forward Only", forwardOnlyShaderTagIds, true, RenderPassEvent.BeforeRenderingOpaques + 1, RenderQueueRange.opaque, data.opaqueLayerMask, forwardOnlyStencilState, forwardOnlyStencilRef);
m_GBufferCopyDepthPass = new CopyDepthPass(RenderPassEvent.BeforeRenderingOpaques + 2, m_CopyDepthMaterial);
m_TileDepthRangePass = new TileDepthRangePass(RenderPassEvent.BeforeRenderingOpaques + 3, m_DeferredLights, 0);
m_TileDepthRangeExtraPass = new TileDepthRangePass(RenderPassEvent.BeforeRenderingOpaques + 4, m_DeferredLights, 1);
m_DeferredPass = new DeferredPass(RenderPassEvent.BeforeRenderingOpaques + 5, m_DeferredLights);
}
// Always create this pass even in deferred because we use it for wireframe rendering in the Editor or offscreen depth texture rendering.
m_RenderOpaqueForwardPass = new DrawObjectsPass(URPProfileId.DrawOpaqueObjects, true, RenderPassEvent.BeforeRenderingOpaques, RenderQueueRange.opaque, data.opaqueLayerMask, m_DefaultStencilState, stencilData.stencilReference);
m_CopyDepthPass = new CopyDepthPass(RenderPassEvent.AfterRenderingSkybox, m_CopyDepthMaterial);
m_DrawSkyboxPass = new DrawSkyboxPass(RenderPassEvent.BeforeRenderingSkybox);
m_CopyColorPass = new CopyColorPass(RenderPassEvent.AfterRenderingSkybox, m_SamplingMaterial, m_BlitMaterial);
#if ADAPTIVE_PERFORMANCE_2_1_0_OR_NEWER
if (!UniversalRenderPipeline.asset.useAdaptivePerformance || AdaptivePerformance.AdaptivePerformanceRenderSettings.SkipTransparentObjects == false)
#endif
{
m_TransparentSettingsPass = new TransparentSettingsPass(RenderPassEvent.BeforeRenderingTransparents, data.shadowTransparentReceive);
m_RenderTransparentForwardPass = new DrawObjectsPass(URPProfileId.DrawTransparentObjects, false, RenderPassEvent.BeforeRenderingTransparents, RenderQueueRange.transparent, data.transparentLayerMask, m_DefaultStencilState, stencilData.stencilReference);
}
m_OnRenderObjectCallbackPass = new InvokeOnRenderObjectCallbackPass(RenderPassEvent.BeforeRenderingPostProcessing);
m_PostProcessPasses = new PostProcessPasses(data.postProcessData, m_BlitMaterial);
m_CapturePass = new CapturePass(RenderPassEvent.AfterRendering);
m_FinalBlitPass = new FinalBlitPass(RenderPassEvent.AfterRendering + 1, m_BlitMaterial);
#if UNITY_EDITOR
m_SceneViewDepthCopyPass = new SceneViewDepthCopyPass(RenderPassEvent.AfterRendering + 9, m_CopyDepthMaterial);
#endif
// RenderTexture format depends on camera and pipeline (HDR, non HDR, etc)
// Samples (MSAA) depend on camera and pipeline
m_CameraColorAttachment.Init("_CameraColorTexture");
m_CameraDepthAttachment.Init("_CameraDepthAttachment");
m_DepthTexture.Init("_CameraDepthTexture");
m_NormalsTexture.Init("_CameraNormalsTexture");
if (this.renderingMode == RenderingMode.Deferred)
{
m_GBufferHandles = new RenderTargetHandle[(int)DeferredLights.GBufferHandles.Count];
m_GBufferHandles[(int)DeferredLights.GBufferHandles.DepthAsColor].Init("_GBufferDepthAsColor");
m_GBufferHandles[(int)DeferredLights.GBufferHandles.Albedo].Init("_GBuffer0");
m_GBufferHandles[(int)DeferredLights.GBufferHandles.SpecularMetallic].Init("_GBuffer1");
m_GBufferHandles[(int)DeferredLights.GBufferHandles.NormalSmoothness].Init("_GBuffer2");
m_GBufferHandles[(int)DeferredLights.GBufferHandles.Lighting] = new RenderTargetHandle();
m_GBufferHandles[(int)DeferredLights.GBufferHandles.ShadowMask].Init("_GBuffer4");
}
m_OpaqueColor.Init("_CameraOpaqueTexture");
m_DepthInfoTexture.Init("_DepthInfoTexture");
m_TileDepthInfoTexture.Init("_TileDepthInfoTexture");
supportedRenderingFeatures = new RenderingFeatures()
{
cameraStacking = true,
};
if (this.renderingMode == RenderingMode.Deferred)
{
// Deferred rendering does not support MSAA.
this.supportedRenderingFeatures.msaa = false;
// Avoid legacy platforms: use vulkan instead.
unsupportedGraphicsDeviceTypes = new GraphicsDeviceType[]
{
GraphicsDeviceType.OpenGLCore,
GraphicsDeviceType.OpenGLES2,
GraphicsDeviceType.OpenGLES3
};
}
}
/// <inheritdoc />
protected override void Dispose(bool disposing)
{
m_PostProcessPasses.Dispose();
CoreUtils.Destroy(m_BlitMaterial);
CoreUtils.Destroy(m_CopyDepthMaterial);
CoreUtils.Destroy(m_SamplingMaterial);
CoreUtils.Destroy(m_TileDepthInfoMaterial);
CoreUtils.Destroy(m_TileDeferredMaterial);
CoreUtils.Destroy(m_StencilDeferredMaterial);
}
/// <inheritdoc />
public override void Setup(ScriptableRenderContext context, ref RenderingData renderingData)
{
#if ADAPTIVE_PERFORMANCE_2_1_0_OR_NEWER
bool needTransparencyPass = !UniversalRenderPipeline.asset.useAdaptivePerformance || !AdaptivePerformance.AdaptivePerformanceRenderSettings.SkipTransparentObjects;
#endif
Camera camera = renderingData.cameraData.camera;
ref CameraData cameraData = ref renderingData.cameraData;
RenderTextureDescriptor cameraTargetDescriptor = renderingData.cameraData.cameraTargetDescriptor;
// Special path for depth only offscreen cameras. Only write opaques + transparents.
bool isOffscreenDepthTexture = cameraData.targetTexture != null && cameraData.targetTexture.format == RenderTextureFormat.Depth;
if (isOffscreenDepthTexture)
{
ConfigureCameraTarget(BuiltinRenderTextureType.CameraTarget, BuiltinRenderTextureType.CameraTarget);
AddRenderPasses(ref renderingData);
EnqueuePass(m_RenderOpaqueForwardPass);
// TODO: Do we need to inject transparents and skybox when rendering depth only camera? They don't write to depth.
EnqueuePass(m_DrawSkyboxPass);
#if ADAPTIVE_PERFORMANCE_2_1_0_OR_NEWER
if (!needTransparencyPass)
return;
#endif
EnqueuePass(m_RenderTransparentForwardPass);
return;
}
if (m_DeferredLights != null)
m_DeferredLights.ResolveMixedLightingMode(ref renderingData);
// Assign the camera color target early in case it is needed during AddRenderPasses.
bool isPreviewCamera = cameraData.isPreviewCamera;
var createColorTexture = rendererFeatures.Count != 0 && !isPreviewCamera;
if (createColorTexture)
{
m_ActiveCameraColorAttachment = m_CameraColorAttachment;
var activeColorRenderTargetId = m_ActiveCameraColorAttachment.Identifier();
#if ENABLE_VR && ENABLE_XR_MODULE
if (cameraData.xr.enabled) activeColorRenderTargetId = new RenderTargetIdentifier(activeColorRenderTargetId, 0, CubemapFace.Unknown, -1);
#endif
ConfigureCameraColorTarget(activeColorRenderTargetId);
}
// Add render passes and gather the input requirements
isCameraColorTargetValid = true;
AddRenderPasses(ref renderingData);
isCameraColorTargetValid = false;
RenderPassInputSummary renderPassInputs = GetRenderPassInputs(ref renderingData);
// Should apply post-processing after rendering this camera?
bool applyPostProcessing = cameraData.postProcessEnabled && m_PostProcessPasses.isCreated;
// There's at least a camera in the camera stack that applies post-processing
bool anyPostProcessing = renderingData.postProcessingEnabled && m_PostProcessPasses.isCreated;
// TODO: We could cache and generate the LUT before rendering the stack
bool generateColorGradingLUT = cameraData.postProcessEnabled && m_PostProcessPasses.isCreated;
bool isSceneViewCamera = cameraData.isSceneViewCamera;
bool requiresDepthTexture = cameraData.requiresDepthTexture || renderPassInputs.requiresDepthTexture || this.actualRenderingMode == RenderingMode.Deferred;
bool mainLightShadows = m_MainLightShadowCasterPass.Setup(ref renderingData);
bool additionalLightShadows = m_AdditionalLightsShadowCasterPass.Setup(ref renderingData);
bool transparentsNeedSettingsPass = m_TransparentSettingsPass.Setup(ref renderingData);
// Depth prepass is generated in the following cases:
// - If game or offscreen camera requires it we check if we can copy the depth from the rendering opaques pass and use that instead.
// - Scene or preview cameras always require a depth texture. We do a depth pre-pass to simplify it and it shouldn't matter much for editor.
// - Render passes require it
bool requiresDepthPrepass = requiresDepthTexture && !CanCopyDepth(ref renderingData.cameraData);
requiresDepthPrepass |= isSceneViewCamera;
requiresDepthPrepass |= isPreviewCamera;
requiresDepthPrepass |= renderPassInputs.requiresDepthPrepass;
requiresDepthPrepass |= renderPassInputs.requiresNormalsTexture;
// The copying of depth should normally happen after rendering opaques.
// But if we only require it for post processing or the scene camera then we do it after rendering transparent objects
m_CopyDepthPass.renderPassEvent = (!requiresDepthTexture && (applyPostProcessing || isSceneViewCamera)) ? RenderPassEvent.AfterRenderingTransparents : RenderPassEvent.AfterRenderingOpaques;
createColorTexture |= RequiresIntermediateColorTexture(ref cameraData);
createColorTexture |= renderPassInputs.requiresColorTexture;
createColorTexture &= !isPreviewCamera;
// If camera requires depth and there's no depth pre-pass we create a depth texture that can be read later by effect requiring it.
// When deferred renderer is enabled, we must always create a depth texture and CANNOT use BuiltinRenderTextureType.CameraTarget. This is to get
// around a bug where during gbuffer pass (MRT pass), the camera depth attachment is correctly bound, but during
// deferred pass ("camera color" + "camera depth"), the implicit depth surface of "camera color" is used instead of "camera depth",
// because BuiltinRenderTextureType.CameraTarget for depth means there is no explicit depth attachment...
bool createDepthTexture = cameraData.requiresDepthTexture && !requiresDepthPrepass;
createDepthTexture |= (cameraData.renderType == CameraRenderType.Base && !cameraData.resolveFinalTarget);
// Deferred renderer always need to access depth buffer.
createDepthTexture |= this.actualRenderingMode == RenderingMode.Deferred;
#if ENABLE_VR && ENABLE_XR_MODULE
if (cameraData.xr.enabled)
{
// URP can't handle msaa/size mismatch between depth RT and color RT(for now we create intermediate textures to ensure they match)
createDepthTexture |= createColorTexture;
createColorTexture = createDepthTexture;
}
#endif
#if UNITY_ANDROID || UNITY_WEBGL
if (SystemInfo.graphicsDeviceType != GraphicsDeviceType.Vulkan)
{
// GLES can not use render texture's depth buffer with the color buffer of the backbuffer
// in such case we create a color texture for it too.
createColorTexture |= createDepthTexture;
}
#endif
// Configure all settings require to start a new camera stack (base camera only)
if (cameraData.renderType == CameraRenderType.Base)
{
RenderTargetHandle cameraTargetHandle = RenderTargetHandle.GetCameraTarget(cameraData.xr);
m_ActiveCameraColorAttachment = (createColorTexture) ? m_CameraColorAttachment : cameraTargetHandle;
m_ActiveCameraDepthAttachment = (createDepthTexture) ? m_CameraDepthAttachment : cameraTargetHandle;
bool intermediateRenderTexture = createColorTexture || createDepthTexture;
// Doesn't create texture for Overlay cameras as they are already overlaying on top of created textures.
if (intermediateRenderTexture)
CreateCameraRenderTarget(context, ref cameraTargetDescriptor, createColorTexture, createDepthTexture);
}
else
{
m_ActiveCameraColorAttachment = m_CameraColorAttachment;
m_ActiveCameraDepthAttachment = m_CameraDepthAttachment;
}
// Assign camera targets (color and depth)
{
var activeColorRenderTargetId = m_ActiveCameraColorAttachment.Identifier();
var activeDepthRenderTargetId = m_ActiveCameraDepthAttachment.Identifier();
#if ENABLE_VR && ENABLE_XR_MODULE
if (cameraData.xr.enabled)
{
activeColorRenderTargetId = new RenderTargetIdentifier(activeColorRenderTargetId, 0, CubemapFace.Unknown, -1);
activeDepthRenderTargetId = new RenderTargetIdentifier(activeDepthRenderTargetId, 0, CubemapFace.Unknown, -1);
}
#endif
ConfigureCameraTarget(activeColorRenderTargetId, activeDepthRenderTargetId);
}
bool hasPassesAfterPostProcessing = activeRenderPassQueue.Find(x => x.renderPassEvent == RenderPassEvent.AfterRendering) != null;
if (mainLightShadows)
EnqueuePass(m_MainLightShadowCasterPass);
if (additionalLightShadows)
EnqueuePass(m_AdditionalLightsShadowCasterPass);
if (requiresDepthPrepass)
{
if (renderPassInputs.requiresNormalsTexture)
{
m_DepthNormalPrepass.Setup(cameraTargetDescriptor, m_DepthTexture, m_NormalsTexture);
EnqueuePass(m_DepthNormalPrepass);
}
else
{
m_DepthPrepass.Setup(cameraTargetDescriptor, m_DepthTexture);
EnqueuePass(m_DepthPrepass);
}
}
if (generateColorGradingLUT)
{
colorGradingLutPass.Setup(colorGradingLut);
EnqueuePass(colorGradingLutPass);
}
#if ENABLE_VR && ENABLE_XR_MODULE
if (cameraData.xr.hasValidOcclusionMesh)
EnqueuePass(m_XROcclusionMeshPass);
#endif
if (this.actualRenderingMode == RenderingMode.Deferred)
EnqueueDeferred(ref renderingData, requiresDepthPrepass, mainLightShadows, additionalLightShadows);
else
EnqueuePass(m_RenderOpaqueForwardPass);
Skybox cameraSkybox;
cameraData.camera.TryGetComponent<Skybox>(out cameraSkybox);
bool isOverlayCamera = cameraData.renderType == CameraRenderType.Overlay;
if (camera.clearFlags == CameraClearFlags.Skybox && (RenderSettings.skybox != null || cameraSkybox?.material != null) && !isOverlayCamera)
EnqueuePass(m_DrawSkyboxPass);
// If a depth texture was created we necessarily need to copy it, otherwise we could have render it to a renderbuffer.
// If deferred rendering path was selected, it has already made a copy.
bool requiresDepthCopyPass = !requiresDepthPrepass
&& renderingData.cameraData.requiresDepthTexture
&& createDepthTexture
&& this.actualRenderingMode != RenderingMode.Deferred;
if (requiresDepthCopyPass)
{
m_CopyDepthPass.Setup(m_ActiveCameraDepthAttachment, m_DepthTexture);
EnqueuePass(m_CopyDepthPass);
}
// For Base Cameras: Set the depth texture to the far Z if we do not have a depth prepass or copy depth
if (cameraData.renderType == CameraRenderType.Base && !requiresDepthPrepass && !requiresDepthCopyPass)
{
Shader.SetGlobalTexture(m_DepthTexture.id, SystemInfo.usesReversedZBuffer ? Texture2D.blackTexture : Texture2D.whiteTexture);
}
if (renderingData.cameraData.requiresOpaqueTexture || renderPassInputs.requiresColorTexture)
{
// TODO: Downsampling method should be store in the renderer instead of in the asset.
// We need to migrate this data to renderer. For now, we query the method in the active asset.
Downsampling downsamplingMethod = UniversalRenderPipeline.asset.opaqueDownsampling;
m_CopyColorPass.Setup(m_ActiveCameraColorAttachment.Identifier(), m_OpaqueColor, downsamplingMethod);
EnqueuePass(m_CopyColorPass);
}
#if ADAPTIVE_PERFORMANCE_2_1_0_OR_NEWER
if (needTransparencyPass)
#endif
{
if (transparentsNeedSettingsPass)
{
EnqueuePass(m_TransparentSettingsPass);
}
EnqueuePass(m_RenderTransparentForwardPass);
}
EnqueuePass(m_OnRenderObjectCallbackPass);
bool lastCameraInTheStack = cameraData.resolveFinalTarget;
bool hasCaptureActions = renderingData.cameraData.captureActions != null && lastCameraInTheStack;
bool applyFinalPostProcessing = anyPostProcessing && lastCameraInTheStack &&
renderingData.cameraData.antialiasing == AntialiasingMode.FastApproximateAntialiasing;
// When post-processing is enabled we can use the stack to resolve rendering to camera target (screen or RT).
// However when there are render passes executing after post we avoid resolving to screen so rendering continues (before sRGBConvertion etc)
bool resolvePostProcessingToCameraTarget = !hasCaptureActions && !hasPassesAfterPostProcessing && !applyFinalPostProcessing;
if (lastCameraInTheStack)
{
// Post-processing will resolve to final target. No need for final blit pass.
if (applyPostProcessing)
{
var destination = resolvePostProcessingToCameraTarget ? RenderTargetHandle.CameraTarget : afterPostProcessColor;
// if resolving to screen we need to be able to perform sRGBConvertion in post-processing if necessary
bool doSRGBConvertion = resolvePostProcessingToCameraTarget;
postProcessPass.Setup(cameraTargetDescriptor, m_ActiveCameraColorAttachment, destination, m_ActiveCameraDepthAttachment, colorGradingLut, applyFinalPostProcessing, doSRGBConvertion);
EnqueuePass(postProcessPass);
}
// if we applied post-processing for this camera it means current active texture is m_AfterPostProcessColor
var sourceForFinalPass = (applyPostProcessing) ? afterPostProcessColor : m_ActiveCameraColorAttachment;
// Do FXAA or any other final post-processing effect that might need to run after AA.
if (applyFinalPostProcessing)
{
finalPostProcessPass.SetupFinalPass(sourceForFinalPass);
EnqueuePass(finalPostProcessPass);
}
if (renderingData.cameraData.captureActions != null)
{
m_CapturePass.Setup(sourceForFinalPass);
EnqueuePass(m_CapturePass);
}
// if post-processing then we already resolved to camera target while doing post.
// Also only do final blit if camera is not rendering to RT.
bool cameraTargetResolved =
// final PP always blit to camera target
applyFinalPostProcessing ||
// no final PP but we have PP stack. In that case it blit unless there are render pass after PP
(applyPostProcessing && !hasPassesAfterPostProcessing) ||
// offscreen camera rendering to a texture, we don't need a blit pass to resolve to screen
m_ActiveCameraColorAttachment == RenderTargetHandle.GetCameraTarget(cameraData.xr);
// We need final blit to resolve to screen
if (!cameraTargetResolved)
{
m_FinalBlitPass.Setup(cameraTargetDescriptor, sourceForFinalPass);
EnqueuePass(m_FinalBlitPass);
}
#if ENABLE_VR && ENABLE_XR_MODULE
bool depthTargetResolved =
// active depth is depth target, we don't need a blit pass to resolve
m_ActiveCameraDepthAttachment == RenderTargetHandle.GetCameraTarget(cameraData.xr);
if (!depthTargetResolved && cameraData.xr.copyDepth)
{
m_XRCopyDepthPass.Setup(m_ActiveCameraDepthAttachment, RenderTargetHandle.GetCameraTarget(cameraData.xr));
EnqueuePass(m_XRCopyDepthPass);
}
#endif
}
// stay in RT so we resume rendering on stack after post-processing
else if (applyPostProcessing)
{
postProcessPass.Setup(cameraTargetDescriptor, m_ActiveCameraColorAttachment, afterPostProcessColor, m_ActiveCameraDepthAttachment, colorGradingLut, false, false);
EnqueuePass(postProcessPass);
}
#if UNITY_EDITOR
if (isSceneViewCamera)
{
// Scene view camera should always resolve target (not stacked)
Assertions.Assert.IsTrue(lastCameraInTheStack, "Editor camera must resolve target upon finish rendering.");
m_SceneViewDepthCopyPass.Setup(m_DepthTexture);
EnqueuePass(m_SceneViewDepthCopyPass);
}
#endif
}
/// <inheritdoc />
public override void SetupLights(ScriptableRenderContext context, ref RenderingData renderingData)
{
m_ForwardLights.Setup(context, ref renderingData);
// Perform per-tile light culling on CPU
if (this.actualRenderingMode == RenderingMode.Deferred)
m_DeferredLights.SetupLights(context, ref renderingData);
}
/// <inheritdoc />
public override void SetupCullingParameters(ref ScriptableCullingParameters cullingParameters,
ref CameraData cameraData)
{
// TODO: PerObjectCulling also affect reflection probes. Enabling it for now.
// if (asset.additionalLightsRenderingMode == LightRenderingMode.Disabled ||
// asset.maxAdditionalLightsCount == 0)
// {
// cullingParameters.cullingOptions |= CullingOptions.DisablePerObjectCulling;
// }
// We disable shadow casters if both shadow casting modes are turned off
// or the shadow distance has been turned down to zero
bool isShadowCastingDisabled = !UniversalRenderPipeline.asset.supportsMainLightShadows && !UniversalRenderPipeline.asset.supportsAdditionalLightShadows;
bool isShadowDistanceZero = Mathf.Approximately(cameraData.maxShadowDistance, 0.0f);
if (isShadowCastingDisabled || isShadowDistanceZero)
{
cullingParameters.cullingOptions &= ~CullingOptions.ShadowCasters;
}
if (this.actualRenderingMode == RenderingMode.Deferred)
cullingParameters.maximumVisibleLights = 0xFFFF;
else
{
// We set the number of maximum visible lights allowed and we add one for the mainlight...
//
// Note: However ScriptableRenderContext.Cull() does not differentiate between light types.
// If there is no active main light in the scene, ScriptableRenderContext.Cull() might return ( cullingParameters.maximumVisibleLights ) visible additional lights.
// i.e ScriptableRenderContext.Cull() might return ( UniversalRenderPipeline.maxVisibleAdditionalLights + 1 ) visible additional lights !
cullingParameters.maximumVisibleLights = UniversalRenderPipeline.maxVisibleAdditionalLights + 1;
}
cullingParameters.shadowDistance = cameraData.maxShadowDistance;
}
/// <inheritdoc />
public override void FinishRendering(CommandBuffer cmd)
{
if (m_ActiveCameraColorAttachment != RenderTargetHandle.CameraTarget)
{
cmd.ReleaseTemporaryRT(m_ActiveCameraColorAttachment.id);
m_ActiveCameraColorAttachment = RenderTargetHandle.CameraTarget;
}
if (m_ActiveCameraDepthAttachment != RenderTargetHandle.CameraTarget)
{
cmd.ReleaseTemporaryRT(m_ActiveCameraDepthAttachment.id);
m_ActiveCameraDepthAttachment = RenderTargetHandle.CameraTarget;
}
}
void EnqueueDeferred(ref RenderingData renderingData, bool hasDepthPrepass, bool applyMainShadow, bool applyAdditionalShadow)
{
// the last slice is the lighting buffer created in DeferredRenderer.cs
m_GBufferHandles[(int)DeferredLights.GBufferHandles.Lighting] = m_ActiveCameraColorAttachment;
m_DeferredLights.Setup(
ref renderingData,
applyAdditionalShadow ? m_AdditionalLightsShadowCasterPass : null,
hasDepthPrepass,
renderingData.cameraData.renderType == CameraRenderType.Overlay,
m_DepthTexture,
m_DepthInfoTexture,
m_TileDepthInfoTexture,
m_ActiveCameraDepthAttachment, m_GBufferHandles
);
EnqueuePass(m_GBufferPass);
EnqueuePass(m_RenderOpaqueForwardOnlyPass);
//Must copy depth for deferred shading: TODO wait for API fix to bind depth texture as read-only resource.
if (!hasDepthPrepass)
{
m_GBufferCopyDepthPass.Setup(m_CameraDepthAttachment, m_DepthTexture);
EnqueuePass(m_GBufferCopyDepthPass);
}
// Note: DeferredRender.Setup is called by UniversalRenderPipeline.RenderSingleCamera (overrides ScriptableRenderer.Setup).
// At this point, we do not know if m_DeferredLights.m_Tilers[x].m_Tiles actually contain any indices of lights intersecting tiles (If there are no lights intersecting tiles, we could skip several following passes) : this information is computed in DeferredRender.SetupLights, which is called later by UniversalRenderPipeline.RenderSingleCamera (via ScriptableRenderer.Execute).
// However HasTileLights uses m_HasTileVisLights which is calculated by CheckHasTileLights from all visibleLights. visibleLights is the list of lights that have passed camera culling, so we know they are in front of the camera. So we can assume m_DeferredLights.m_Tilers[x].m_Tiles will not be empty in that case.
// m_DeferredLights.m_Tilers[x].m_Tiles could be empty if we implemented an algorithm accessing scene depth information on the CPU side, but this (access depth from CPU) will probably not happen.
if (m_DeferredLights.HasTileLights())
{
// Compute for each tile a 32bits bitmask in which a raised bit means "this 1/32th depth slice contains geometry that could intersect with lights".
// Per-tile bitmasks are obtained by merging together the per-pixel bitmasks computed for each individual pixel of the tile.
EnqueuePass(m_TileDepthRangePass);
// On some platform, splitting the bitmasks computation into two passes:
// 1/ Compute bitmasks for individual or small blocks of pixels
// 2/ merge those individual bitmasks into per-tile bitmasks
// provides better performance that doing it in a single above pass.
if (m_DeferredLights.HasTileDepthRangeExtraPass())
EnqueuePass(m_TileDepthRangeExtraPass);
}
EnqueuePass(m_DeferredPass);
}
private struct RenderPassInputSummary
{
internal bool requiresDepthTexture;
internal bool requiresDepthPrepass;
internal bool requiresNormalsTexture;
internal bool requiresColorTexture;
}
private RenderPassInputSummary GetRenderPassInputs(ref RenderingData renderingData)
{
RenderPassInputSummary inputSummary = new RenderPassInputSummary();
for (int i = 0; i < activeRenderPassQueue.Count; ++i)
{
ScriptableRenderPass pass = activeRenderPassQueue[i];
bool needsDepth = (pass.input & ScriptableRenderPassInput.Depth) != ScriptableRenderPassInput.None;
bool needsNormals = (pass.input & ScriptableRenderPassInput.Normal) != ScriptableRenderPassInput.None;
bool needsColor = (pass.input & ScriptableRenderPassInput.Color) != ScriptableRenderPassInput.None;
bool eventBeforeOpaque = pass.renderPassEvent <= RenderPassEvent.BeforeRenderingOpaques;
inputSummary.requiresDepthTexture |= needsDepth;
inputSummary.requiresDepthPrepass |= needsNormals || needsDepth && eventBeforeOpaque;
inputSummary.requiresNormalsTexture |= needsNormals;
inputSummary.requiresColorTexture |= needsColor;
}
return inputSummary;
}
void CreateCameraRenderTarget(ScriptableRenderContext context, ref RenderTextureDescriptor descriptor, bool createColor, bool createDepth)
{
CommandBuffer cmd = CommandBufferPool.Get();
using (new ProfilingScope(cmd, Profiling.createCameraRenderTarget))
{
if (createColor)
{
bool useDepthRenderBuffer = m_ActiveCameraDepthAttachment == RenderTargetHandle.CameraTarget;
var colorDescriptor = descriptor;
colorDescriptor.useMipMap = false;
colorDescriptor.autoGenerateMips = false;
colorDescriptor.depthBufferBits = (useDepthRenderBuffer) ? k_DepthStencilBufferBits : 0;
cmd.GetTemporaryRT(m_ActiveCameraColorAttachment.id, colorDescriptor, FilterMode.Bilinear);
}
if (createDepth)
{
var depthDescriptor = descriptor;
depthDescriptor.useMipMap = false;
depthDescriptor.autoGenerateMips = false;
#if ENABLE_VR && ENABLE_XR_MODULE
// XRTODO: Enabled this line for non-XR pass? URP copy depth pass is already capable of handling MSAA.
depthDescriptor.bindMS = depthDescriptor.msaaSamples > 1 && !SystemInfo.supportsMultisampleAutoResolve && (SystemInfo.supportsMultisampledTextures != 0);
#endif
depthDescriptor.colorFormat = RenderTextureFormat.Depth;
depthDescriptor.depthBufferBits = k_DepthStencilBufferBits;
cmd.GetTemporaryRT(m_ActiveCameraDepthAttachment.id, depthDescriptor, FilterMode.Point);
}
}
context.ExecuteCommandBuffer(cmd);
CommandBufferPool.Release(cmd);
}
bool PlatformRequiresExplicitMsaaResolve()
{
#if UNITY_EDITOR
// In the editor play-mode we use a Game View Render Texture, with
// samples count forced to 1 so we always need to do an explicit MSAA resolve.
return true;
#else
// On Metal/iOS the MSAA resolve is done implicitly as part of the renderpass, so we do not need an extra intermediate pass for the explicit autoresolve.
return !SystemInfo.supportsMultisampleAutoResolve
&& SystemInfo.graphicsDeviceType != GraphicsDeviceType.Metal;
#endif
}
/// <summary>
/// Checks if the pipeline needs to create a intermediate render texture.
/// </summary>
/// <param name="cameraData">CameraData contains all relevant render target information for the camera.</param>
/// <seealso cref="CameraData"/>
/// <returns>Return true if pipeline needs to render to a intermediate render texture.</returns>
bool RequiresIntermediateColorTexture(ref CameraData cameraData)
{
// When rendering a camera stack we always create an intermediate render texture to composite camera results.
// We create it upon rendering the Base camera.
if (cameraData.renderType == CameraRenderType.Base && !cameraData.resolveFinalTarget)
return true;
// Always force rendering into intermediate color texture if deferred rendering mode is selected.
// Reason: without intermediate color texture, the target camera texture is y-flipped.
// However, the target camera texture is bound during gbuffer pass and deferred pass.
// Gbuffer pass will not be y-flipped because it is MRT (see ScriptableRenderContext implementation),
// while deferred pass will be y-flipped, which breaks rendering.
// This incurs an extra blit into at the end of rendering.
if (this.actualRenderingMode == RenderingMode.Deferred)
return true;
bool isSceneViewCamera = cameraData.isSceneViewCamera;
var cameraTargetDescriptor = cameraData.cameraTargetDescriptor;
int msaaSamples = cameraTargetDescriptor.msaaSamples;
bool isScaledRender = !Mathf.Approximately(cameraData.renderScale, 1.0f);
bool isCompatibleBackbufferTextureDimension = cameraTargetDescriptor.dimension == TextureDimension.Tex2D;
bool requiresExplicitMsaaResolve = msaaSamples > 1 && PlatformRequiresExplicitMsaaResolve();
bool isOffscreenRender = cameraData.targetTexture != null && !isSceneViewCamera;
bool isCapturing = cameraData.captureActions != null;
#if ENABLE_VR && ENABLE_XR_MODULE
if (cameraData.xr.enabled)
isCompatibleBackbufferTextureDimension = cameraData.xr.renderTargetDesc.dimension == cameraTargetDescriptor.dimension;
#endif
bool requiresBlitForOffscreenCamera = cameraData.postProcessEnabled || cameraData.requiresOpaqueTexture || requiresExplicitMsaaResolve || !cameraData.isDefaultViewport;
if (isOffscreenRender)
return requiresBlitForOffscreenCamera;
return requiresBlitForOffscreenCamera || isSceneViewCamera || isScaledRender || cameraData.isHdrEnabled ||
!isCompatibleBackbufferTextureDimension || isCapturing || cameraData.requireSrgbConversion;
}
bool CanCopyDepth(ref CameraData cameraData)
{
bool msaaEnabledForCamera = cameraData.cameraTargetDescriptor.msaaSamples > 1;
bool supportsTextureCopy = SystemInfo.copyTextureSupport != CopyTextureSupport.None;
bool supportsDepthTarget = RenderingUtils.SupportsRenderTextureFormat(RenderTextureFormat.Depth);
bool supportsDepthCopy = !msaaEnabledForCamera && (supportsDepthTarget || supportsTextureCopy);
// TODO: We don't have support to highp Texture2DMS currently and this breaks depth precision.
// currently disabling it until shader changes kick in.
//bool msaaDepthResolve = msaaEnabledForCamera && SystemInfo.supportsMultisampledTextures != 0;
bool msaaDepthResolve = false;
return supportsDepthCopy || msaaDepthResolve;
}
}
}

View File

@@ -0,0 +1,199 @@
#if UNITY_EDITOR
using UnityEditor;
using UnityEditor.ProjectWindowCallback;
#endif
using System;
using UnityEngine.Scripting.APIUpdating;
namespace UnityEngine.Rendering.Universal
{
[Serializable, ReloadGroup, ExcludeFromPreset]
[MovedFrom("UnityEngine.Rendering.LWRP")]
public class ForwardRendererData : ScriptableRendererData
{
#if UNITY_EDITOR
[System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Performance", "CA1812")]
internal class CreateForwardRendererAsset : EndNameEditAction
{
public override void Action(int instanceId, string pathName, string resourceFile)
{
var instance = CreateInstance<ForwardRendererData>();
instance.postProcessData = PostProcessData.GetDefaultPostProcessData();
AssetDatabase.CreateAsset(instance, pathName);
ResourceReloader.ReloadAllNullIn(instance, UniversalRenderPipelineAsset.packagePath);
Selection.activeObject = instance;
}
}
[MenuItem("Assets/Create/Rendering/Universal Render Pipeline/Forward Renderer", priority = CoreUtils.assetCreateMenuPriority2)]
static void CreateForwardRendererData()
{
ProjectWindowUtil.StartNameEditingIfProjectWindowExists(0, CreateInstance<CreateForwardRendererAsset>(), "CustomForwardRendererData.asset", null, null);
}
#endif
[Serializable, ReloadGroup]
public sealed class ShaderResources
{
[Reload("Shaders/Utils/Blit.shader")]
public Shader blitPS;
[Reload("Shaders/Utils/CopyDepth.shader")]
public Shader copyDepthPS;
[Obsolete("Obsolete, this feature will be supported by new 'ScreenSpaceShadows' renderer feature")]
public Shader screenSpaceShadowPS;
[Reload("Shaders/Utils/Sampling.shader")]
public Shader samplingPS;
[Reload("Shaders/Utils/StencilDeferred.shader")]
public Shader stencilDeferredPS;
[Reload("Shaders/Utils/FallbackError.shader")]
public Shader fallbackErrorPS;
[Reload("Shaders/Utils/MaterialError.shader")]
public Shader materialErrorPS;
}
public PostProcessData postProcessData = null;
#if ENABLE_VR && ENABLE_XR_MODULE
[Reload("Runtime/Data/XRSystemData.asset")]
public XRSystemData xrSystemData = null;
#endif
public ShaderResources shaders = null;
[SerializeField] LayerMask m_OpaqueLayerMask = -1;
[SerializeField] LayerMask m_TransparentLayerMask = -1;
[SerializeField] StencilStateData m_DefaultStencilState = new StencilStateData() { passOperation = StencilOp.Replace }; // This default state is compatible with deferred renderer.
[SerializeField] bool m_ShadowTransparentReceive = true;
[SerializeField] RenderingMode m_RenderingMode = RenderingMode.Forward;
[SerializeField] bool m_AccurateGbufferNormals = false;
//[SerializeField] bool m_TiledDeferredShading = false;
protected override ScriptableRenderer Create()
{
if (!Application.isPlaying)
{
ReloadAllNullProperties();
}
return new ForwardRenderer(this);
}
/// <summary>
/// Use this to configure how to filter opaque objects.
/// </summary>
public LayerMask opaqueLayerMask
{
get => m_OpaqueLayerMask;
set
{
SetDirty();
m_OpaqueLayerMask = value;
}
}
/// <summary>
/// Use this to configure how to filter transparent objects.
/// </summary>
public LayerMask transparentLayerMask
{
get => m_TransparentLayerMask;
set
{
SetDirty();
m_TransparentLayerMask = value;
}
}
public StencilStateData defaultStencilState
{
get => m_DefaultStencilState;
set
{
SetDirty();
m_DefaultStencilState = value;
}
}
/// <summary>
/// True if transparent objects receive shadows.
/// </summary>
public bool shadowTransparentReceive
{
get => m_ShadowTransparentReceive;
set
{
SetDirty();
m_ShadowTransparentReceive = value;
}
}
/// <summary>
/// Rendering mode.
/// </summary>
public RenderingMode renderingMode
{
get => m_RenderingMode;
set
{
SetDirty();
m_RenderingMode = value;
}
}
/// <summary>
/// Use Octaedron Octahedron normal vector encoding for gbuffer normals.
/// The overhead is negligible from desktop GPUs, while it should be avoided for mobile GPUs.
/// </summary>
public bool accurateGbufferNormals
{
get => m_AccurateGbufferNormals;
set
{
SetDirty();
m_AccurateGbufferNormals = value;
}
}
/*
public bool tiledDeferredShading
{
get => m_TiledDeferredShading;
set
{
SetDirty();
m_TiledDeferredShading = value;
}
}
*/
protected override void OnEnable()
{
base.OnEnable();
// Upon asset creation, OnEnable is called and `shaders` reference is not yet initialized
// We need to call the OnEnable for data migration when updating from old versions of UniversalRP that
// serialized resources in a different format. Early returning here when OnEnable is called
// upon asset creation is fine because we guarantee new assets get created with all resources initialized.
if (shaders == null)
return;
ReloadAllNullProperties();
}
private void ReloadAllNullProperties()
{
#if UNITY_EDITOR
ResourceReloader.TryReloadAllNullIn(this, UniversalRenderPipelineAsset.packagePath);
#if ENABLE_VR && ENABLE_XR_MODULE
ResourceReloader.TryReloadAllNullIn(xrSystemData, UniversalRenderPipelineAsset.packagePath);
#endif
#endif
}
}
}

View File

@@ -0,0 +1,130 @@
%YAML 1.1
%TAG !u! tag:unity3d.com,2011:
--- !u!114 &-8081582795363580827
MonoBehaviour:
m_ObjectHideFlags: 11
m_CorrespondingSourceObject: {fileID: 0}
m_PrefabInstance: {fileID: 0}
m_PrefabAsset: {fileID: 0}
m_GameObject: {fileID: 0}
m_Enabled: 1
m_EditorHideFlags: 0
m_Script: {fileID: 11500000, guid: d0353a89b1f911e48b9e16bdc9f2e058, type: 3}
m_Name:
m_EditorClassIdentifier:
version: 4
--- !u!21 &2100000
Material:
serializedVersion: 6
m_ObjectHideFlags: 0
m_CorrespondingSourceObject: {fileID: 0}
m_PrefabInstance: {fileID: 0}
m_PrefabAsset: {fileID: 0}
m_Name: Lit
m_Shader: {fileID: 4800000, guid: 933532a4fcc9baf4fa0491de14d08ed7, type: 3}
m_ShaderKeywords:
m_LightmapFlags: 4
m_EnableInstancingVariants: 0
m_DoubleSidedGI: 0
m_CustomRenderQueue: 2000
stringTagMap:
RenderType: Opaque
disabledShaderPasses: []
m_SavedProperties:
serializedVersion: 3
m_TexEnvs:
- _BaseMap:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _BumpMap:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _ClearCoatMap:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _Cube:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _DetailAlbedoMap:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _DetailMask:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _DetailNormalMap:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _EmissionMap:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _MainTex:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _MetallicGlossMap:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _MetallicSpecGlossMap:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _OcclusionMap:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _ParallaxMap:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _SpecGlossMap:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
m_Ints: []
m_Floats:
- _AlphaClip: 0
- _Blend: 0
- _BumpScale: 1
- _ClearCoat: 0
- _ClearCoatMask: 0
- _Cull: 2
- _Cutoff: 0.5
- _DetailNormalMapScale: 1
- _DstBlend: 0
- _EnvironmentReflections: 1
- _GlossMapScale: 1
- _Glossiness: 0.5
- _GlossinessSource: 0
- _GlossyReflections: 1
- _Metallic: 0
- _Mode: 0
- _OcclusionStrength: 1
- _Parallax: 0.02
- _QueueOffset: 0
- _ReceiveShadows: 1
- _ReflectionSource: 0
- _Shininess: 1
- _Smoothness: 0.5
- _SmoothnessTextureChannel: 0
- _SpecSource: 0
- _SpecularHighlights: 1
- _SrcBlend: 1
- _Surface: 0
- _UVSec: 0
- _WorkflowMode: 1
- _ZWrite: 1
m_Colors:
- _BaseColor: {r: 0.5, g: 0.5, b: 0.5, a: 1}
- _Color: {r: 0.5, g: 0.5, b: 0.5, a: 1}
- _EmissionColor: {r: 0, g: 0, b: 0, a: 1}
- _SpecColor: {r: 1, g: 1, b: 1, a: 1}
m_BuildTextureStacks: []

View File

@@ -0,0 +1,127 @@
%YAML 1.1
%TAG !u! tag:unity3d.com,2011:
--- !u!114 &-4934386220503690563
MonoBehaviour:
m_ObjectHideFlags: 11
m_CorrespondingSourceObject: {fileID: 0}
m_PrefabInstance: {fileID: 0}
m_PrefabAsset: {fileID: 0}
m_GameObject: {fileID: 0}
m_Enabled: 1
m_EditorHideFlags: 0
m_Script: {fileID: 11500000, guid: d0353a89b1f911e48b9e16bdc9f2e058, type: 3}
m_Name:
m_EditorClassIdentifier:
version: 4
--- !u!21 &2100000
Material:
serializedVersion: 6
m_ObjectHideFlags: 0
m_CorrespondingSourceObject: {fileID: 0}
m_PrefabInstance: {fileID: 0}
m_PrefabAsset: {fileID: 0}
m_Name: ParticlesUnlit
m_Shader: {fileID: 4800000, guid: 0406db5a14f94604a8c57ccfbc9f3b46, type: 3}
m_ShaderKeywords: _RECEIVE_SHADOWS_OFF
m_LightmapFlags: 4
m_EnableInstancingVariants: 0
m_DoubleSidedGI: 0
m_CustomRenderQueue: 3050
stringTagMap:
RenderType: Transparent
disabledShaderPasses:
- ALWAYS
- SHADOWCASTER
m_SavedProperties:
serializedVersion: 3
m_TexEnvs:
- _BaseMap:
m_Texture: {fileID: 10300, guid: 0000000000000000f000000000000000, type: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _BumpMap:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _DetailAlbedoMap:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _DetailMask:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _DetailNormalMap:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _EmissionMap:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _MainTex:
m_Texture: {fileID: 10300, guid: 0000000000000000f000000000000000, type: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _MetallicGlossMap:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _OcclusionMap:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _ParallaxMap:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
m_Ints: []
m_Floats:
- _AlphaClip: 0
- _BaseColorMode: 0
- _Blend: 0
- _BlendOp: 0
- _BumpScale: 1
- _CameraFadingEnabled: 0
- _CameraFarFadeDistance: 2
- _CameraNearFadeDistance: 1
- _ColorMode: 0
- _Cull: 2
- _Cutoff: 0.5
- _DetailNormalMapScale: 1
- _DistortionBlend: 0.5
- _DistortionEnabled: 0
- _DistortionStrength: 1
- _DistortionStrengthScaled: 0
- _DstBlend: 10
- _EmissionEnabled: 0
- _EnvironmentReflections: 1
- _FlipbookBlending: 0
- _FlipbookMode: 0
- _GlossMapScale: 1
- _Glossiness: 0.5
- _InvFade: 1
- _LightingEnabled: 0
- _Metallic: 0
- _Mode: 2
- _OcclusionStrength: 1
- _Parallax: 0.02
- _QueueOffset: 0
- _SmoothnessTextureChannel: 0
- _SoftParticlesEnabled: 0
- _SoftParticlesFarFadeDistance: 1
- _SoftParticlesNearFadeDistance: 0
- _SpecularHighlights: 1
- _SrcBlend: 5
- _Surface: 1
- _UVSec: 0
- _ZWrite: 0
m_Colors:
- _BaseColor: {r: 1, g: 1, b: 1, a: 1}
- _BaseColorAddSubDiff: {r: 0, g: 0, b: 0, a: 0}
- _CameraFadeParams: {r: 0, g: Infinity, b: 0, a: 0}
- _Color: {r: 1, g: 1, b: 1, a: 1}
- _ColorAddSubDiff: {r: 0, g: 0, b: 0, a: 0}
- _EmissionColor: {r: 0, g: 0, b: 0, a: 1}
- _SoftParticleFadeParams: {r: 0, g: 0, b: 0, a: 0}
m_BuildTextureStacks: []

View File

@@ -0,0 +1,109 @@
%YAML 1.1
%TAG !u! tag:unity3d.com,2011:
--- !u!21 &2100000
Material:
serializedVersion: 6
m_ObjectHideFlags: 0
m_CorrespondingSourceObject: {fileID: 0}
m_PrefabInstance: {fileID: 0}
m_PrefabAsset: {fileID: 0}
m_Name: SimpleLit
m_Shader: {fileID: 4800000, guid: 8d2bb70cbf9db8d4da26e15b26e74248, type: 3}
m_ShaderKeywords: _GLOSSINESS_FROM_BASE_ALPHA _SPECULAR_COLOR
m_LightmapFlags: 4
m_EnableInstancingVariants: 0
m_DoubleSidedGI: 0
m_CustomRenderQueue: 2000
stringTagMap:
RenderType: Opaque
disabledShaderPasses: []
m_SavedProperties:
serializedVersion: 3
m_TexEnvs:
- _BaseMap:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _BumpMap:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _Cube:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _DetailAlbedoMap:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _DetailMask:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _DetailNormalMap:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _EmissionMap:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _MainTex:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _ParallaxMap:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _SpecGlossMap:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
m_Ints: []
m_Floats:
- _AlphaClip: 0
- _Blend: 0
- _BumpScale: 1
- _Cull: 2
- _Cutoff: 0.5
- _DetailNormalMapScale: 1
- _DstBlend: 0
- _EnvironmentReflections: 1
- _GlossMapScale: 1
- _Glossiness: 0.5
- _GlossinessSource: 0
- _Mode: 0
- _Parallax: 0.02
- _QueueOffset: 0
- _ReceiveShadows: 1
- _ReflectionSource: 0
- _Shininess: 0.5
- _Smoothness: 0.5
- _SmoothnessSource: 1
- _SmoothnessTextureChannel: 0
- _SpecSource: 0
- _SpecularHighlights: 0
- _SrcBlend: 1
- _Surface: 0
- _UVSec: 0
- _ZWrite: 1
m_Colors:
- _BaseColor: {r: 0.5, g: 0.5, b: 0.5, a: 0.5}
- _Color: {r: 0.5, g: 0.5, b: 0.5, a: 1}
- _EmissionColor: {r: 0, g: 0, b: 0, a: 1}
- _SpecColor: {r: 0.5, g: 0.5, b: 0.5, a: 0.5}
m_BuildTextureStacks: []
--- !u!114 &2591765247069500558
MonoBehaviour:
m_ObjectHideFlags: 11
m_CorrespondingSourceObject: {fileID: 0}
m_PrefabInstance: {fileID: 0}
m_PrefabAsset: {fileID: 0}
m_GameObject: {fileID: 0}
m_Enabled: 1
m_EditorHideFlags: 0
m_Script: {fileID: 11500000, guid: d0353a89b1f911e48b9e16bdc9f2e058, type: 3}
m_Name:
m_EditorClassIdentifier:
version: 4

View File

@@ -0,0 +1,45 @@
%YAML 1.1
%TAG !u! tag:unity3d.com,2011:
--- !u!21 &2100000
Material:
serializedVersion: 6
m_ObjectHideFlags: 0
m_CorrespondingSourceObject: {fileID: 0}
m_PrefabInstance: {fileID: 0}
m_PrefabAsset: {fileID: 0}
m_Name: Sprite-Lit-Default
m_Shader: {fileID: 4800000, guid: e260cfa7296ee7642b167f1eb5be5023, type: 3}
m_ShaderKeywords: ETC1_EXTERNAL_ALPHA
m_LightmapFlags: 4
m_EnableInstancingVariants: 0
m_DoubleSidedGI: 0
m_CustomRenderQueue: -1
stringTagMap: {}
disabledShaderPasses: []
m_SavedProperties:
serializedVersion: 3
m_TexEnvs:
- _AlphaTex:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _MainTex:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _MaskTex:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _NormalMap:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
m_Ints: []
m_Floats:
- _EnableExternalAlpha: 0
m_Colors:
- _Color: {r: 1, g: 1, b: 1, a: 1}
- _Flip: {r: 1, g: 1, b: 1, a: 1}
- _RendererColor: {r: 1, g: 1, b: 1, a: 1}
m_BuildTextureStacks: []

View File

@@ -0,0 +1,44 @@
%YAML 1.1
%TAG !u! tag:unity3d.com,2011:
--- !u!21 &2100000
Material:
serializedVersion: 6
m_ObjectHideFlags: 0
m_CorrespondingSourceObject: {fileID: 0}
m_PrefabInstance: {fileID: 0}
m_PrefabAsset: {fileID: 0}
m_Name: Sprite-Unlit-Default
m_Shader: {fileID: 10753, guid: 0000000000000000f000000000000000, type: 0}
m_ShaderKeywords: ETC1_EXTERNAL_ALPHA
m_LightmapFlags: 4
m_EnableInstancingVariants: 0
m_DoubleSidedGI: 0
m_CustomRenderQueue: -1
stringTagMap: {}
disabledShaderPasses: []
m_SavedProperties:
serializedVersion: 3
m_TexEnvs:
- _AlphaTex:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _MainTex:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _MaskTex:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _NormalMap:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
m_Floats:
- PixelSnap: 0
- _EnableExternalAlpha: 0
m_Colors:
- _Color: {r: 1, g: 1, b: 1, a: 1}
- _Flip: {r: 1, g: 1, b: 1, a: 1}
- _RendererColor: {r: 1, g: 1, b: 1, a: 1}

View File

@@ -0,0 +1,168 @@
%YAML 1.1
%TAG !u! tag:unity3d.com,2011:
--- !u!114 &-7681603175492883006
MonoBehaviour:
m_ObjectHideFlags: 11
m_CorrespondingSourceObject: {fileID: 0}
m_PrefabInstance: {fileID: 0}
m_PrefabAsset: {fileID: 0}
m_GameObject: {fileID: 0}
m_Enabled: 1
m_EditorHideFlags: 0
m_Script: {fileID: 11500000, guid: d0353a89b1f911e48b9e16bdc9f2e058, type: 3}
m_Name:
m_EditorClassIdentifier:
version: 4
--- !u!21 &2100000
Material:
serializedVersion: 6
m_ObjectHideFlags: 0
m_CorrespondingSourceObject: {fileID: 0}
m_PrefabInstance: {fileID: 0}
m_PrefabAsset: {fileID: 0}
m_Name: TerrainLit
m_Shader: {fileID: 4800000, guid: 69c1f799e772cb6438f56c23efccb782, type: 3}
m_ShaderKeywords: _TERRAIN_INSTANCED_PERPIXEL_NORMAL
m_LightmapFlags: 4
m_EnableInstancingVariants: 0
m_DoubleSidedGI: 0
m_CustomRenderQueue: -1
stringTagMap: {}
disabledShaderPasses: []
m_SavedProperties:
serializedVersion: 3
m_TexEnvs:
- _BumpMap:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _Control:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _DetailAlbedoMap:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _DetailMask:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _DetailNormalMap:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _EmissionMap:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _MainTex:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _Mask0:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _Mask1:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _Mask2:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _Mask3:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _MetallicGlossMap:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _Normal0:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _Normal1:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _Normal2:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _Normal3:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _OcclusionMap:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _ParallaxMap:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _SpecGlossMap:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _Splat0:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _Splat1:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _Splat2:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _Splat3:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _TerrainHolesTexture:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
m_Ints: []
m_Floats:
- _BumpScale: 1
- _Cutoff: 0.5
- _DetailNormalMapScale: 1
- _DstBlend: 0
- _EnableHeightBlend: 0
- _EnableInstancedPerPixelNormal: 1
- _EnvironmentReflections: 1
- _GlossMapScale: 1
- _Glossiness: 0.5
- _HeightTransition: 0
- _Metallic: 0
- _Metallic0: 0
- _Metallic1: 0
- _Metallic2: 0
- _Metallic3: 0
- _Mode: 0
- _NumLayersCount: 1
- _OcclusionStrength: 1
- _Parallax: 0.02
- _Smoothness0: 1
- _Smoothness1: 1
- _Smoothness2: 1
- _Smoothness3: 1
- _SmoothnessTextureChannel: 0
- _SpecularHighlights: 1
- _SrcBlend: 1
- _TERRAIN_INSTANCED_PERPIXEL_NORMAL: 0
- _UVSec: 0
- _WorkflowMode: 1
- _ZWrite: 1
m_Colors:
- _BaseColor: {r: 1, g: 1, b: 1, a: 1}
- _Color: {r: 1, g: 1, b: 1, a: 1}
- _EmissionColor: {r: 0, g: 0, b: 0, a: 1}
- _SpecColor: {r: 0.19999996, g: 0.19999996, b: 0.19999996, a: 1}
m_BuildTextureStacks: []

View File

@@ -0,0 +1,39 @@
using System;
namespace UnityEngine.Rendering.Universal
{
[Serializable, VolumeComponentMenu("Post-processing/Bloom")]
public sealed class Bloom : VolumeComponent, IPostProcessComponent
{
[Tooltip("Filters out pixels under this level of brightness. Value is in gamma-space.")]
public MinFloatParameter threshold = new MinFloatParameter(0.9f, 0f);
[Tooltip("Strength of the bloom filter.")]
public MinFloatParameter intensity = new MinFloatParameter(0f, 0f);
[Tooltip("Changes the extent of veiling effects.")]
public ClampedFloatParameter scatter = new ClampedFloatParameter(0.7f, 0f, 1f);
[Tooltip("Clamps pixels to control the bloom amount.")]
public MinFloatParameter clamp = new MinFloatParameter(65472f, 0f);
[Tooltip("Global tint of the bloom filter.")]
public ColorParameter tint = new ColorParameter(Color.white, false, false, true);
[Tooltip("Use bicubic sampling instead of bilinear sampling for the upsampling passes. This is slightly more expensive but helps getting smoother visuals.")]
public BoolParameter highQualityFiltering = new BoolParameter(false);
[Tooltip("The number of final iterations to skip in the effect processing sequence.")]
public ClampedIntParameter skipIterations = new ClampedIntParameter(1, 0, 16);
[Tooltip("Dirtiness texture to add smudges or dust to the bloom effect.")]
public TextureParameter dirtTexture = new TextureParameter(null);
[Tooltip("Amount of dirtiness.")]
public MinFloatParameter dirtIntensity = new MinFloatParameter(0f, 0f);
public bool IsActive() => intensity.value > 0f;
public bool IsTileCompatible() => false;
}
}

View File

@@ -0,0 +1,50 @@
using System;
namespace UnityEngine.Rendering.Universal
{
[Serializable, VolumeComponentMenu("Post-processing/Channel Mixer")]
public sealed class ChannelMixer : VolumeComponent, IPostProcessComponent
{
[Tooltip("Modify influence of the red channel in the overall mix.")]
public ClampedFloatParameter redOutRedIn = new ClampedFloatParameter(100f, -200f, 200f);
[Tooltip("Modify influence of the green channel in the overall mix.")]
public ClampedFloatParameter redOutGreenIn = new ClampedFloatParameter(0f, -200f, 200f);
[Tooltip("Modify influence of the blue channel in the overall mix.")]
public ClampedFloatParameter redOutBlueIn = new ClampedFloatParameter(0f, -200f, 200f);
[Tooltip("Modify influence of the red channel in the overall mix.")]
public ClampedFloatParameter greenOutRedIn = new ClampedFloatParameter(0f, -200f, 200f);
[Tooltip("Modify influence of the green channel in the overall mix.")]
public ClampedFloatParameter greenOutGreenIn = new ClampedFloatParameter(100f, -200f, 200f);
[Tooltip("Modify influence of the blue channel in the overall mix.")]
public ClampedFloatParameter greenOutBlueIn = new ClampedFloatParameter(0f, -200f, 200f);
[Tooltip("Modify influence of the red channel in the overall mix.")]
public ClampedFloatParameter blueOutRedIn = new ClampedFloatParameter(0f, -200f, 200f);
[Tooltip("Modify influence of the green channel in the overall mix.")]
public ClampedFloatParameter blueOutGreenIn = new ClampedFloatParameter(0f, -200f, 200f);
[Tooltip("Modify influence of the blue channel in the overall mix.")]
public ClampedFloatParameter blueOutBlueIn = new ClampedFloatParameter(100f, -200f, 200f);
public bool IsActive()
{
return redOutRedIn.value != 100f
|| redOutGreenIn.value != 0f
|| redOutBlueIn.value != 0f
|| greenOutRedIn.value != 0f
|| greenOutGreenIn.value != 100f
|| greenOutBlueIn.value != 0f
|| blueOutRedIn.value != 0f
|| blueOutGreenIn.value != 0f
|| blueOutBlueIn.value != 100f;
}
public bool IsTileCompatible() => true;
}
}

View File

@@ -0,0 +1,15 @@
using System;
namespace UnityEngine.Rendering.Universal
{
[Serializable, VolumeComponentMenu("Post-processing/Chromatic Aberration")]
public sealed class ChromaticAberration : VolumeComponent, IPostProcessComponent
{
[Tooltip("Amount of tangential distortion.")]
public ClampedFloatParameter intensity = new ClampedFloatParameter(0f, 0f, 1f);
public bool IsActive() => intensity.value > 0f;
public bool IsTileCompatible() => false;
}
}

View File

@@ -0,0 +1,34 @@
using System;
namespace UnityEngine.Rendering.Universal
{
[Serializable, VolumeComponentMenu("Post-processing/Color Adjustments")]
public sealed class ColorAdjustments : VolumeComponent, IPostProcessComponent
{
[Tooltip("Adjusts the overall exposure of the scene in EV100. This is applied after HDR effect and right before tonemapping so it won't affect previous effects in the chain.")]
public FloatParameter postExposure = new FloatParameter(0f);
[Tooltip("Expands or shrinks the overall range of tonal values.")]
public ClampedFloatParameter contrast = new ClampedFloatParameter(0f, -100f, 100f);
[Tooltip("Tint the render by multiplying a color.")]
public ColorParameter colorFilter = new ColorParameter(Color.white, true, false, true);
[Tooltip("Shift the hue of all colors.")]
public ClampedFloatParameter hueShift = new ClampedFloatParameter(0f, -180f, 180f);
[Tooltip("Pushes the intensity of all colors.")]
public ClampedFloatParameter saturation = new ClampedFloatParameter(0f, -100f, 100f);
public bool IsActive()
{
return postExposure.value != 0f
|| contrast.value != 0f
|| colorFilter != Color.white
|| hueShift != 0f
|| saturation != 0f;
}
public bool IsTileCompatible() => true;
}
}

View File

@@ -0,0 +1,22 @@
using System;
namespace UnityEngine.Rendering.Universal
{
[Serializable, VolumeComponentMenu("Post-processing/Color Curves")]
public sealed class ColorCurves : VolumeComponent, IPostProcessComponent
{
public TextureCurveParameter master = new TextureCurveParameter(new TextureCurve(new[] { new Keyframe(0f, 0f, 1f, 1f), new Keyframe(1f, 1f, 1f, 1f) }, 0f, false, new Vector2(0f, 1f)));
public TextureCurveParameter red = new TextureCurveParameter(new TextureCurve(new[] { new Keyframe(0f, 0f, 1f, 1f), new Keyframe(1f, 1f, 1f, 1f) }, 0f, false, new Vector2(0f, 1f)));
public TextureCurveParameter green = new TextureCurveParameter(new TextureCurve(new[] { new Keyframe(0f, 0f, 1f, 1f), new Keyframe(1f, 1f, 1f, 1f) }, 0f, false, new Vector2(0f, 1f)));
public TextureCurveParameter blue = new TextureCurveParameter(new TextureCurve(new[] { new Keyframe(0f, 0f, 1f, 1f), new Keyframe(1f, 1f, 1f, 1f) }, 0f, false, new Vector2(0f, 1f)));
public TextureCurveParameter hueVsHue = new TextureCurveParameter(new TextureCurve(new Keyframe[] {}, 0.5f, true, new Vector2(0f, 1f)));
public TextureCurveParameter hueVsSat = new TextureCurveParameter(new TextureCurve(new Keyframe[] {}, 0.5f, true, new Vector2(0f, 1f)));
public TextureCurveParameter satVsSat = new TextureCurveParameter(new TextureCurve(new Keyframe[] {}, 0.5f, false, new Vector2(0f, 1f)));
public TextureCurveParameter lumVsSat = new TextureCurveParameter(new TextureCurve(new Keyframe[] {}, 0.5f, false, new Vector2(0f, 1f)));
public bool IsActive() => true;
public bool IsTileCompatible() => true;
}
}

View File

@@ -0,0 +1,47 @@
using System;
using UnityEngine.Experimental.Rendering;
namespace UnityEngine.Rendering.Universal
{
[Serializable, VolumeComponentMenu("Post-processing/Color Lookup")]
public sealed class ColorLookup : VolumeComponent, IPostProcessComponent
{
[Tooltip("A custom 2D texture lookup table to apply.")]
public TextureParameter texture = new TextureParameter(null);
[Tooltip("How much of the lookup texture will contribute to the color grading effect.")]
public ClampedFloatParameter contribution = new ClampedFloatParameter(1f, 0f, 1f);
public bool IsActive() => contribution.value > 0f && ValidateLUT();
public bool IsTileCompatible() => true;
public bool ValidateLUT()
{
var asset = UniversalRenderPipeline.asset;
if (asset == null || texture.value == null)
return false;
int lutSize = asset.colorGradingLutSize;
if (texture.value.height != lutSize)
return false;
bool valid = false;
switch (texture.value)
{
case Texture2D t:
valid |= t.width == lutSize * lutSize
&& !GraphicsFormatUtility.IsSRGBFormat(t.graphicsFormat);
break;
case RenderTexture rt:
valid |= rt.dimension == TextureDimension.Tex2D
&& rt.width == lutSize * lutSize
&& !rt.sRGB;
break;
}
return valid;
}
}
}

View File

@@ -0,0 +1,61 @@
using System;
namespace UnityEngine.Rendering.Universal
{
public enum DepthOfFieldMode
{
Off,
Gaussian, // Non physical, fast, small radius, far blur only
Bokeh
}
[Serializable, VolumeComponentMenu("Post-processing/Depth Of Field")]
public sealed class DepthOfField : VolumeComponent, IPostProcessComponent
{
[Tooltip("Use \"Gaussian\" for a faster but non physical depth of field; \"Bokeh\" for a more realistic but slower depth of field.")]
public DepthOfFieldModeParameter mode = new DepthOfFieldModeParameter(DepthOfFieldMode.Off);
[Tooltip("The distance at which the blurring will start.")]
public MinFloatParameter gaussianStart = new MinFloatParameter(10f, 0f);
[Tooltip("The distance at which the blurring will reach its maximum radius.")]
public MinFloatParameter gaussianEnd = new MinFloatParameter(30f, 0f);
[Tooltip("The maximum radius of the gaussian blur. Values above 1 may show under-sampling artifacts.")]
public ClampedFloatParameter gaussianMaxRadius = new ClampedFloatParameter(1f, 0.5f, 1.5f);
[Tooltip("Use higher quality sampling to reduce flickering and improve the overall blur smoothness.")]
public BoolParameter highQualitySampling = new BoolParameter(false);
[Tooltip("The distance to the point of focus.")]
public MinFloatParameter focusDistance = new MinFloatParameter(10f, 0.1f);
[Tooltip("The ratio of aperture (known as f-stop or f-number). The smaller the value is, the shallower the depth of field is.")]
public ClampedFloatParameter aperture = new ClampedFloatParameter(5.6f, 1f, 32f);
[Tooltip("The distance between the lens and the film. The larger the value is, the shallower the depth of field is.")]
public ClampedFloatParameter focalLength = new ClampedFloatParameter(50f, 1f, 300f);
[Tooltip("The number of aperture blades.")]
public ClampedIntParameter bladeCount = new ClampedIntParameter(5, 3, 9);
[Tooltip("The curvature of aperture blades. The smaller the value is, the more visible aperture blades are. A value of 1 will make the bokeh perfectly circular.")]
public ClampedFloatParameter bladeCurvature = new ClampedFloatParameter(1f, 0f, 1f);
[Tooltip("The rotation of aperture blades in degrees.")]
public ClampedFloatParameter bladeRotation = new ClampedFloatParameter(0f, -180f, 180f);
public bool IsActive()
{
if (mode.value == DepthOfFieldMode.Off || SystemInfo.graphicsShaderLevel < 35)
return false;
return mode.value != DepthOfFieldMode.Gaussian || SystemInfo.supportedRenderTargetCount > 1;
}
public bool IsTileCompatible() => false;
}
[Serializable]
public sealed class DepthOfFieldModeParameter : VolumeParameter<DepthOfFieldMode> { public DepthOfFieldModeParameter(DepthOfFieldMode value, bool overrideState = false) : base(value, overrideState) {} }
}

View File

@@ -0,0 +1,42 @@
using System;
namespace UnityEngine.Rendering.Universal
{
public enum FilmGrainLookup
{
Thin1,
Thin2,
Medium1,
Medium2,
Medium3,
Medium4,
Medium5,
Medium6,
Large01,
Large02,
Custom
}
[Serializable, VolumeComponentMenu("Post-processing/FilmGrain")]
public sealed class FilmGrain : VolumeComponent, IPostProcessComponent
{
[Tooltip("The type of grain to use. You can select a preset or provide your own texture by selecting Custom.")]
public FilmGrainLookupParameter type = new FilmGrainLookupParameter(FilmGrainLookup.Thin1);
[Tooltip("Amount of vignetting on screen.")]
public ClampedFloatParameter intensity = new ClampedFloatParameter(0f, 0f, 1f);
[Tooltip("Controls the noisiness response curve based on scene luminance. Higher values mean less noise in light areas.")]
public ClampedFloatParameter response = new ClampedFloatParameter(0.8f, 0f, 1f);
[Tooltip("A tileable texture to use for the grain. The neutral value is 0.5 where no grain is applied.")]
public NoInterpTextureParameter texture = new NoInterpTextureParameter(null);
public bool IsActive() => intensity.value > 0f && (type.value != FilmGrainLookup.Custom || texture.value != null);
public bool IsTileCompatible() => true;
}
[Serializable]
public sealed class FilmGrainLookupParameter : VolumeParameter<FilmGrainLookup> { public FilmGrainLookupParameter(FilmGrainLookup value, bool overrideState = false) : base(value, overrideState) {} }
}

View File

@@ -0,0 +1,31 @@
using System;
namespace UnityEngine.Rendering.Universal
{
[Serializable, VolumeComponentMenu("Post-processing/Lens Distortion")]
public sealed class LensDistortion : VolumeComponent, IPostProcessComponent
{
[Tooltip("Total distortion amount.")]
public ClampedFloatParameter intensity = new ClampedFloatParameter(0f, -1f, 1f);
[Tooltip("Intensity multiplier on X axis. Set it to 0 to disable distortion on this axis.")]
public ClampedFloatParameter xMultiplier = new ClampedFloatParameter(1f, 0f, 1f);
[Tooltip("Intensity multiplier on Y axis. Set it to 0 to disable distortion on this axis.")]
public ClampedFloatParameter yMultiplier = new ClampedFloatParameter(1f, 0f, 1f);
[Tooltip("Distortion center point.")]
public Vector2Parameter center = new Vector2Parameter(new Vector2(0.5f, 0.5f));
[Tooltip("Global screen scaling.")]
public ClampedFloatParameter scale = new ClampedFloatParameter(1f, 0.01f, 5f);
public bool IsActive()
{
return Mathf.Abs(intensity.value) > 0
&& (xMultiplier.value > 0f || yMultiplier.value > 0f);
}
public bool IsTileCompatible() => false;
}
}

View File

@@ -0,0 +1,27 @@
using System;
namespace UnityEngine.Rendering.Universal
{
[Serializable, VolumeComponentMenu("Post-processing/Lift, Gamma, Gain")]
public sealed class LiftGammaGain : VolumeComponent, IPostProcessComponent
{
[Tooltip("Controls the darkest portions of the render.")]
public Vector4Parameter lift = new Vector4Parameter(new Vector4(1f, 1f, 1f, 0f));
[Tooltip("Power function that controls mid-range tones.")]
public Vector4Parameter gamma = new Vector4Parameter(new Vector4(1f, 1f, 1f, 0f));
[Tooltip("Controls the lightest portions of the render.")]
public Vector4Parameter gain = new Vector4Parameter(new Vector4(1f, 1f, 1f, 0f));
public bool IsActive()
{
var defaultState = new Vector4(1f, 1f, 1f, 0f);
return lift != defaultState
|| gamma != defaultState
|| gain != defaultState;
}
public bool IsTileCompatible() => true;
}
}

View File

@@ -0,0 +1,43 @@
using System;
namespace UnityEngine.Rendering.Universal
{
public enum MotionBlurMode
{
CameraOnly,
CameraAndObjects
}
public enum MotionBlurQuality
{
Low,
Medium,
High
}
[Serializable, VolumeComponentMenu("Post-processing/Motion Blur")]
public sealed class MotionBlur : VolumeComponent, IPostProcessComponent
{
[Tooltip("The motion blur technique to use. If you don't need object motion blur, CameraOnly will result in better performance.")]
public MotionBlurModeParameter mode = new MotionBlurModeParameter(MotionBlurMode.CameraOnly);
[Tooltip("The quality of the effect. Lower presets will result in better performance at the expense of visual quality.")]
public MotionBlurQualityParameter quality = new MotionBlurQualityParameter(MotionBlurQuality.Low);
[Tooltip("The strength of the motion blur filter. Acts as a multiplier for velocities.")]
public ClampedFloatParameter intensity = new ClampedFloatParameter(0f, 0f, 1f);
[Tooltip("Sets the maximum length, as a fraction of the screen's full resolution, that the velocity resulting from Camera rotation can have. Lower values will improve performance.")]
public ClampedFloatParameter clamp = new ClampedFloatParameter(0.05f, 0f, 0.2f);
public bool IsActive() => intensity.value > 0f && mode == MotionBlurMode.CameraOnly;
public bool IsTileCompatible() => false;
}
[Serializable]
public sealed class MotionBlurModeParameter : VolumeParameter<MotionBlurMode> { public MotionBlurModeParameter(MotionBlurMode value, bool overrideState = false) : base(value, overrideState) {} }
[Serializable]
public sealed class MotionBlurQualityParameter : VolumeParameter<MotionBlurQuality> { public MotionBlurQualityParameter(MotionBlurQuality value, bool overrideState = false) : base(value, overrideState) {} }
}

View File

@@ -0,0 +1,18 @@
using System;
namespace UnityEngine.Rendering.Universal
{
[Serializable, VolumeComponentMenu("Post-processing/Panini Projection")]
public sealed class PaniniProjection : VolumeComponent, IPostProcessComponent
{
[Tooltip("Panini projection distance.")]
public ClampedFloatParameter distance = new ClampedFloatParameter(0f, 0f, 1f);
[Tooltip("Panini projection crop to fit.")]
public ClampedFloatParameter cropToFit = new ClampedFloatParameter(1f, 0f, 1f);
public bool IsActive() => distance.value > 0f;
public bool IsTileCompatible() => false;
}
}

View File

@@ -0,0 +1,39 @@
using System;
namespace UnityEngine.Rendering.Universal
{
[Serializable, VolumeComponentMenu("Post-processing/Shadows, Midtones, Highlights")]
public sealed class ShadowsMidtonesHighlights : VolumeComponent, IPostProcessComponent
{
[Tooltip("Controls the darkest portions of the render.")]
public Vector4Parameter shadows = new Vector4Parameter(new Vector4(1f, 1f, 1f, 0f));
[Tooltip("Power function that controls mid-range tones.")]
public Vector4Parameter midtones = new Vector4Parameter(new Vector4(1f, 1f, 1f, 0f));
[Tooltip("Controls the lightest portions of the render.")]
public Vector4Parameter highlights = new Vector4Parameter(new Vector4(1f, 1f, 1f, 0f));
[Tooltip("Start point of the transition between shadows and midtones.")]
public MinFloatParameter shadowsStart = new MinFloatParameter(0f, 0f);
[Tooltip("End point of the transition between shadows and midtones.")]
public MinFloatParameter shadowsEnd = new MinFloatParameter(0.3f, 0f);
[Tooltip("Start point of the transition between midtones and highlights.")]
public MinFloatParameter highlightsStart = new MinFloatParameter(0.55f, 0f);
[Tooltip("End point of the transition between midtones and highlights.")]
public MinFloatParameter highlightsEnd = new MinFloatParameter(1f, 0f);
public bool IsActive()
{
var defaultState = new Vector4(1f, 1f, 1f, 0f);
return shadows != defaultState
|| midtones != defaultState
|| highlights != defaultState;
}
public bool IsTileCompatible() => true;
}
}

View File

@@ -0,0 +1,21 @@
using System;
namespace UnityEngine.Rendering.Universal
{
[Serializable, VolumeComponentMenu("Post-processing/Split Toning")]
public sealed class SplitToning : VolumeComponent, IPostProcessComponent
{
[Tooltip("The color to use for shadows.")]
public ColorParameter shadows = new ColorParameter(Color.grey, false, false, true);
[Tooltip("The color to use for highlights.")]
public ColorParameter highlights = new ColorParameter(Color.grey, false, false, true);
[Tooltip("Balance between the colors in the highlights and shadows.")]
public ClampedFloatParameter balance = new ClampedFloatParameter(0f, -100f, 100f);
public bool IsActive() => shadows != Color.grey || highlights != Color.grey;
public bool IsTileCompatible() => true;
}
}

View File

@@ -0,0 +1,25 @@
using System;
namespace UnityEngine.Rendering.Universal
{
public enum TonemappingMode
{
None,
Neutral, // Neutral tonemapper
ACES, // ACES Filmic reference tonemapper (custom approximation)
}
[Serializable, VolumeComponentMenu("Post-processing/Tonemapping")]
public sealed class Tonemapping : VolumeComponent, IPostProcessComponent
{
[Tooltip("Select a tonemapping algorithm to use for the color grading process.")]
public TonemappingModeParameter mode = new TonemappingModeParameter(TonemappingMode.None);
public bool IsActive() => mode.value != TonemappingMode.None;
public bool IsTileCompatible() => true;
}
[Serializable]
public sealed class TonemappingModeParameter : VolumeParameter<TonemappingMode> { public TonemappingModeParameter(TonemappingMode value, bool overrideState = false) : base(value, overrideState) {} }
}

View File

@@ -0,0 +1,27 @@
using System;
namespace UnityEngine.Rendering.Universal
{
[Serializable, VolumeComponentMenu("Post-processing/Vignette")]
public sealed class Vignette : VolumeComponent, IPostProcessComponent
{
[Tooltip("Vignette color.")]
public ColorParameter color = new ColorParameter(Color.black, false, false, true);
[Tooltip("Sets the vignette center point (screen center is [0.5,0.5]).")]
public Vector2Parameter center = new Vector2Parameter(new Vector2(0.5f, 0.5f));
[Tooltip("Amount of vignetting on screen.")]
public ClampedFloatParameter intensity = new ClampedFloatParameter(0f, 0f, 1f);
[Tooltip("Smoothness of the vignette borders.")]
public ClampedFloatParameter smoothness = new ClampedFloatParameter(0.2f, 0.01f, 1f);
[Tooltip("Should the vignette be perfectly round or be dependent on the current aspect ratio?")]
public BoolParameter rounded = new BoolParameter(false);
public bool IsActive() => intensity.value > 0f;
public bool IsTileCompatible() => true;
}
}

View File

@@ -0,0 +1,18 @@
using System;
namespace UnityEngine.Rendering.Universal
{
[Serializable, VolumeComponentMenu("Post-processing/White Balance")]
public sealed class WhiteBalance : VolumeComponent, IPostProcessComponent
{
[Tooltip("Sets the white balance to a custom color temperature.")]
public ClampedFloatParameter temperature = new ClampedFloatParameter(0f, -100, 100f);
[Tooltip("Sets the white balance to compensate for a green or magenta tint.")]
public ClampedFloatParameter tint = new ClampedFloatParameter(0f, -100, 100f);
public bool IsActive() => temperature.value != 0f || tint.value != 0f;
public bool IsTileCompatible() => true;
}
}

View File

@@ -0,0 +1,954 @@
using System;
using System.Collections.Generic;
using Unity.Collections;
namespace UnityEngine.Rendering.Universal.Internal
{
/// <summary>
/// Renders a shadow map atlas for additional shadow-casting Lights.
/// </summary>
public partial class AdditionalLightsShadowCasterPass : ScriptableRenderPass
{
private static class AdditionalShadowsConstantBuffer
{
public static int _AdditionalLightsWorldToShadow;
public static int _AdditionalShadowParams;
public static int _AdditionalShadowOffset0;
public static int _AdditionalShadowOffset1;
public static int _AdditionalShadowOffset2;
public static int _AdditionalShadowOffset3;
public static int _AdditionalShadowmapSize;
}
internal struct ShadowResolutionRequest
{
public int visibleLightIndex;
public int perLightShadowSliceIndex;
public int requestedResolution;
public bool softShadow; // otherwise it's hard-shadow (no filtering)
public bool pointLightShadow; // otherwise it's spot light shadow (1 shadow slice instead of 6)
public int offsetX; // x coordinate of the square area allocated in the atlas for this shadow map
public int offsetY; // y coordinate of the square area allocated in the atlas for this shadow map
public int allocatedResolution; // width of the square area allocated in the atlas for this shadow map
public ShadowResolutionRequest(int _visibleLightIndex, int _perLightShadowSliceIndex, int _requestedResolution, bool _softShadow , bool _pointLightShadow)
{
visibleLightIndex = _visibleLightIndex;
perLightShadowSliceIndex = _perLightShadowSliceIndex;
requestedResolution = _requestedResolution;
softShadow = _softShadow;
pointLightShadow = _pointLightShadow;
offsetX = 0;
offsetY = 0;
allocatedResolution = 0;
}
}
static int m_AdditionalLightsWorldToShadow_SSBO;
static int m_AdditionalShadowParams_SSBO;
bool m_UseStructuredBuffer;
const int k_ShadowmapBufferBits = 16;
private RenderTargetHandle m_AdditionalLightsShadowmap;
RenderTexture m_AdditionalLightsShadowmapTexture;
int m_ShadowmapWidth;
int m_ShadowmapHeight;
ShadowSliceData[] m_AdditionalLightsShadowSlices = null;
int[] m_VisibleLightIndexToAdditionalLightIndex = null; // maps a "global" visible light index (index to renderingData.lightData.visibleLights) to an "additional light index" (index to arrays _AdditionalLightsPosition, _AdditionalShadowParams, ...), or -1 if it is not an additional light (i.e if it is the main light)
int[] m_AdditionalLightIndexToVisibleLightIndex = null; // maps additional light index (index to arrays _AdditionalLightsPosition, _AdditionalShadowParams, ...) to its "global" visible light index (index to renderingData.lightData.visibleLights)
List<int> m_ShadowSliceToAdditionalLightIndex = new List<int>(); // For each shadow slice, store the "additional light indices" of the punctual light that casts it
List<int> m_GlobalShadowSliceIndexToPerLightShadowSliceIndex = new List<int>(); // For each shadow slice, store its "per-light shadow slice index" in the punctual light that casts it (can be up to 5 for point lights)
Vector4[] m_AdditionalLightIndexToShadowParams = null; // per-additional-light shadow info passed to the lighting shader
Matrix4x4[] m_AdditionalLightShadowSliceIndexTo_WorldShadowMatrix = null; // per-shadow-slice info passed to the lighting shader
List<ShadowResolutionRequest> m_ShadowResolutionRequests = new List<ShadowResolutionRequest>(); // intermediate array used to compute the final resolution of each shadow slice rendered in the frame
float[] m_VisibleLightIndexToCameraSquareDistance = null; // stores for each shadowed additional light its (squared) distance to camera ; used to sub-sort shadow requests according to how close their casting light is
ShadowResolutionRequest[] m_SortedShadowResolutionRequests = null;
int[] m_VisibleLightIndexToSortedShadowResolutionRequestsFirstSliceIndex = null; // for each visible light, store the index of its first shadow slice in m_SortedShadowResolutionRequests (for quicker access)
List<RectInt> m_UnusedAtlasSquareAreas = new List<RectInt>(); // this list tracks space available in the atlas
bool m_SupportsBoxFilterForShadows;
ProfilingSampler m_ProfilingSetupSampler = new ProfilingSampler("Setup Additional Shadows");
int MAX_PUNCTUAL_LIGHT_SHADOW_SLICES_IN_UBO // keep in sync with MAX_PUNCTUAL_LIGHT_SHADOW_SLICES_IN_UBO in Shadows.hlsl
{
get
{
if (UniversalRenderPipeline.maxVisibleAdditionalLights != UniversalRenderPipeline.k_MaxVisibleAdditionalLightsNonMobile)
// Reduce uniform block size on Mobile/GL to avoid shader performance or compilation issues - keep in sync with MAX_PUNCTUAL_LIGHT_SHADOW_SLICES_IN_UBO in Shadows.hlsl
return UniversalRenderPipeline.maxVisibleAdditionalLights;
else
return 545; // keep in sync with MAX_PUNCTUAL_LIGHT_SHADOW_SLICES_IN_UBO in Shadows.hlsl
}
}
public AdditionalLightsShadowCasterPass(RenderPassEvent evt)
{
base.profilingSampler = new ProfilingSampler(nameof(AdditionalLightsShadowCasterPass));
renderPassEvent = evt;
AdditionalShadowsConstantBuffer._AdditionalLightsWorldToShadow = Shader.PropertyToID("_AdditionalLightsWorldToShadow");
AdditionalShadowsConstantBuffer._AdditionalShadowParams = Shader.PropertyToID("_AdditionalShadowParams");
AdditionalShadowsConstantBuffer._AdditionalShadowOffset0 = Shader.PropertyToID("_AdditionalShadowOffset0");
AdditionalShadowsConstantBuffer._AdditionalShadowOffset1 = Shader.PropertyToID("_AdditionalShadowOffset1");
AdditionalShadowsConstantBuffer._AdditionalShadowOffset2 = Shader.PropertyToID("_AdditionalShadowOffset2");
AdditionalShadowsConstantBuffer._AdditionalShadowOffset3 = Shader.PropertyToID("_AdditionalShadowOffset3");
AdditionalShadowsConstantBuffer._AdditionalShadowmapSize = Shader.PropertyToID("_AdditionalShadowmapSize");
m_AdditionalLightsShadowmap.Init("_AdditionalLightsShadowmapTexture");
m_AdditionalLightsWorldToShadow_SSBO = Shader.PropertyToID("_AdditionalLightsWorldToShadow_SSBO");
m_AdditionalShadowParams_SSBO = Shader.PropertyToID("_AdditionalShadowParams_SSBO");
m_UseStructuredBuffer = RenderingUtils.useStructuredBuffer;
m_SupportsBoxFilterForShadows = Application.isMobilePlatform || SystemInfo.graphicsDeviceType == GraphicsDeviceType.Switch;
// Preallocated a fixed size. CommandBuffer.SetGlobal* does allow this data to grow.
int maxVisibleAdditionalLights = UniversalRenderPipeline.maxVisibleAdditionalLights;
const int maxMainLights = 1;
int maxVisibleLights = UniversalRenderPipeline.maxVisibleAdditionalLights + maxMainLights;
// These array sizes should be as big as ScriptableCullingParameters.maximumVisibleLights (that is defined during ScriptableRenderer.SetupCullingParameters).
// We initialize these array sizes with the number of visible lights allowed by the ForwardRenderer.
// The number of visible lights can become much higher when using the Deferred rendering path, we resize the arrays during Setup() if required.
m_AdditionalLightIndexToVisibleLightIndex = new int[maxVisibleLights];
m_VisibleLightIndexToAdditionalLightIndex = new int[maxVisibleLights];
m_VisibleLightIndexToSortedShadowResolutionRequestsFirstSliceIndex = new int[maxVisibleLights];
m_AdditionalLightIndexToShadowParams = new Vector4[maxVisibleLights];
m_VisibleLightIndexToCameraSquareDistance = new float[maxVisibleLights];
if (!m_UseStructuredBuffer)
{
// Uniform buffers are faster on some platforms, but they have stricter size limitations
m_AdditionalLightShadowSliceIndexTo_WorldShadowMatrix = new Matrix4x4[MAX_PUNCTUAL_LIGHT_SHADOW_SLICES_IN_UBO];
m_UnusedAtlasSquareAreas.Capacity = MAX_PUNCTUAL_LIGHT_SHADOW_SLICES_IN_UBO;
m_ShadowResolutionRequests.Capacity = MAX_PUNCTUAL_LIGHT_SHADOW_SLICES_IN_UBO;
}
}
private int GetPunctualLightShadowSlicesCount(in LightType lightType)
{
switch (lightType)
{
case LightType.Spot:
return 1;
case LightType.Point:
return 6;
default:
return 0;
}
}
// Magic numbers used to identify light type when rendering shadow receiver.
// Keep in sync with AdditionalLightRealtimeShadow code in com.unity.render-pipelines.universal/ShaderLibrary/Shadows.hlsl
private const float LightTypeIdentifierInShadowParams_Spot = 0;
private const float LightTypeIdentifierInShadowParams_Point = 1;
// Returns the guard angle that must be added to a frustum angle covering a projection map of resolution sliceResolutionInTexels,
// in order to also cover a guard band of size guardBandSizeInTexels around the projection map.
// Formula illustrated in https://i.ibb.co/wpW5Mnf/Calc-Guard-Angle.png
internal static float CalcGuardAngle(float frustumAngleInDegrees, float guardBandSizeInTexels, float sliceResolutionInTexels)
{
float frustumAngle = frustumAngleInDegrees * Mathf.Deg2Rad;
float halfFrustumAngle = frustumAngle / 2;
float tanHalfFrustumAngle = Mathf.Tan(halfFrustumAngle);
float halfSliceResolution = sliceResolutionInTexels / 2;
float halfGuardBand = guardBandSizeInTexels / 2;
float factorBetweenAngleTangents = 1 + halfGuardBand / halfSliceResolution;
float tanHalfGuardAnglePlusHalfFrustumAngle = tanHalfFrustumAngle * factorBetweenAngleTangents;
float halfGuardAnglePlusHalfFrustumAngle = Mathf.Atan(tanHalfGuardAnglePlusHalfFrustumAngle);
float halfGuardAngleInRadian = halfGuardAnglePlusHalfFrustumAngle - halfFrustumAngle;
float guardAngleInRadian = 2 * halfGuardAngleInRadian;
float guardAngleInDegree = guardAngleInRadian * Mathf.Rad2Deg;
return guardAngleInDegree;
}
private const int kMinimumPunctualLightHardShadowResolution = 8;
private const int kMinimumPunctualLightSoftShadowResolution = 16;
// Minimal shadow map resolution required to have meaningful shadows visible during lighting
int MinimalPunctualLightShadowResolution(bool softShadow)
{
return softShadow ? kMinimumPunctualLightSoftShadowResolution : kMinimumPunctualLightHardShadowResolution;
}
static bool m_IssuedMessageAboutPointLightHardShadowResolutionTooSmall = false;
static bool m_IssuedMessageAboutPointLightSoftShadowResolutionTooSmall = false;
// Returns the guard angle that must be added to a point light shadow face frustum angle
// in order to avoid shadows missing at the boundaries between cube faces.
internal static float GetPointLightShadowFrustumFovBiasInDegrees(int shadowSliceResolution, bool shadowFiltering)
{
// Commented-out code below uses the theoretical formula to compute the required guard angle based on the number of additional
// texels that the projection should cover. It is close to HDRP's HDShadowUtils.CalcGuardAnglePerspective method.
// However, due to precision issues or other filterings performed at lighting for example, this formula also still requires a fudge factor.
// Since we only handle a fixed number of resolutions, we use empirical values instead.
#if false
float fudgeFactor = 1.5f;
return fudgeFactor * CalcGuardAngle(90, shadowFiltering ? 5 : 1, shadowSliceResolution);
#endif
float fovBias = 4.00f;
// Empirical value found to remove gaps between point light shadow faces in test scenes.
// We can see that the guard angle is roughly proportional to the inverse of resolution https://docs.google.com/spreadsheets/d/1QrIZJn18LxVKq2-K1XS4EFRZcZdZOJTTKKhDN8Z1b_s
if (shadowSliceResolution <= kMinimumPunctualLightHardShadowResolution)
{
if (!m_IssuedMessageAboutPointLightHardShadowResolutionTooSmall)
{
Debug.LogWarning("Too many additional punctual lights shadows, increase shadow atlas size or remove some shadowed lights");
m_IssuedMessageAboutPointLightHardShadowResolutionTooSmall = true; // Only output this once per shadow requests configuration
}
}
else if (shadowSliceResolution <= 16)
fovBias = 43.0f;
else if (shadowSliceResolution <= 32)
fovBias = 18.55f;
else if (shadowSliceResolution <= 64)
fovBias = 8.63f;
else if (shadowSliceResolution <= 128)
fovBias = 4.13f;
else if (shadowSliceResolution <= 256)
fovBias = 2.03f;
else if (shadowSliceResolution <= 512)
fovBias = 1.00f;
else if (shadowSliceResolution <= 1024)
fovBias = 0.50f;
if (shadowFiltering)
{
if (shadowSliceResolution <= kMinimumPunctualLightSoftShadowResolution)
{
if (!m_IssuedMessageAboutPointLightSoftShadowResolutionTooSmall)
{
Debug.LogWarning("Too many additional punctual lights shadows to use Soft Shadows. Increase shadow atlas size, remove some shadowed lights or use Hard Shadows.");
// With such small resolutions no fovBias can give good visual results
m_IssuedMessageAboutPointLightSoftShadowResolutionTooSmall = true; // Only output this once per shadow requests configuration
}
}
else if (shadowSliceResolution <= 32)
fovBias += 9.35f;
else if (shadowSliceResolution <= 64)
fovBias += 4.07f;
else if (shadowSliceResolution <= 128)
fovBias += 1.77f;
else if (shadowSliceResolution <= 256)
fovBias += 0.85f;
else if (shadowSliceResolution <= 512)
fovBias += 0.39f;
else if (shadowSliceResolution <= 1024)
fovBias += 0.17f;
// These values were verified to work on platforms for which m_SupportsBoxFilterForShadows is true (Mobile, Switch).
// TODO: Investigate finer-tuned values for those platforms. Soft shadows are implemented differently for them.
}
return fovBias;
}
bool m_IssuedMessageAboutShadowSlicesTooMany = false;
Vector4 m_MainLightShadowParams; // Shadow Fade parameters _MainLightShadowParams.zw are actually also used by AdditionalLights
// Adapted from InsertionSort() in com.unity.render-pipelines.high-definition/Runtime/Lighting/Shadow/HDDynamicShadowAtlas.cs
// Sort array in decreasing requestedResolution order,
// sub-sorting in "HardShadow > SoftShadow" and then "Spot > Point", i.e place last requests that will be removed in priority to make room for the others, because their resolution is too small to produce good-looking shadows ; or because they take relatively more space in the atlas )
// sub-sub-sorting in light distance to camera
// then grouping in increasing visibleIndex (and sub-sorting each group in ShadowSliceIndex order)
internal void InsertionSort(ShadowResolutionRequest[] array, int startIndex, int lastIndex)
{
int i = startIndex + 1;
while (i < lastIndex)
{
var curr = array[i];
int j = i - 1;
// Sort in priority order
while ((j >= 0) && ((curr.requestedResolution > array[j].requestedResolution)
|| (curr.requestedResolution == array[j].requestedResolution && !curr.softShadow && array[j].softShadow)
|| (curr.requestedResolution == array[j].requestedResolution && curr.softShadow == array[j].softShadow && !curr.pointLightShadow && array[j].pointLightShadow)
|| (curr.requestedResolution == array[j].requestedResolution && curr.softShadow == array[j].softShadow && curr.pointLightShadow == array[j].pointLightShadow && m_VisibleLightIndexToCameraSquareDistance[curr.visibleLightIndex] < m_VisibleLightIndexToCameraSquareDistance[array[j].visibleLightIndex])
|| (curr.requestedResolution == array[j].requestedResolution && curr.softShadow == array[j].softShadow && curr.pointLightShadow == array[j].pointLightShadow && m_VisibleLightIndexToCameraSquareDistance[curr.visibleLightIndex] == m_VisibleLightIndexToCameraSquareDistance[array[j].visibleLightIndex] && curr.visibleLightIndex < array[j].visibleLightIndex)
|| (curr.requestedResolution == array[j].requestedResolution && curr.softShadow == array[j].softShadow && curr.pointLightShadow == array[j].pointLightShadow && m_VisibleLightIndexToCameraSquareDistance[curr.visibleLightIndex] == m_VisibleLightIndexToCameraSquareDistance[array[j].visibleLightIndex] && curr.visibleLightIndex == array[j].visibleLightIndex && curr.perLightShadowSliceIndex < array[j].perLightShadowSliceIndex)))
{
array[j + 1] = array[j];
j--;
}
array[j + 1] = curr;
i++;
}
}
int EstimateScaleFactorNeededToFitAllShadowsInAtlas(in ShadowResolutionRequest[] shadowResolutionRequests, int endIndex, int atlasWidth)
{
long totalTexelsInShadowAtlas = atlasWidth * atlasWidth;
long totalTexelsInShadowRequests = 0;
for (int shadowRequestIndex = 0; shadowRequestIndex < endIndex; ++shadowRequestIndex)
totalTexelsInShadowRequests += shadowResolutionRequests[shadowRequestIndex].requestedResolution * shadowResolutionRequests[shadowRequestIndex].requestedResolution;
int estimatedScaleFactor = 1;
while (totalTexelsInShadowRequests > totalTexelsInShadowAtlas * estimatedScaleFactor * estimatedScaleFactor)
estimatedScaleFactor *= 2;
return estimatedScaleFactor;
}
// Assigns to each of the first totalShadowSlicesCount items in m_SortedShadowResolutionRequests a location in the shadow atlas based on requested resolutions.
// If necessary, scales down shadow maps active in the frame, to make all of them fit in the atlas.
void AtlasLayout(int atlasSize, int totalShadowSlicesCount, int estimatedScaleFactor)
{
bool allShadowSlicesFitInAtlas = false;
bool tooManyShadows = false;
int shadowSlicesScaleFactor = estimatedScaleFactor;
while (!allShadowSlicesFitInAtlas && !tooManyShadows)
{
m_UnusedAtlasSquareAreas.Clear();
m_UnusedAtlasSquareAreas.Add(new RectInt(0, 0, atlasSize, atlasSize));
allShadowSlicesFitInAtlas = true;
for (int shadowRequestIndex = 0; shadowRequestIndex < totalShadowSlicesCount; ++shadowRequestIndex)
{
var resolution = m_SortedShadowResolutionRequests[shadowRequestIndex].requestedResolution / shadowSlicesScaleFactor;
if (resolution < MinimalPunctualLightShadowResolution(m_SortedShadowResolutionRequests[shadowRequestIndex].softShadow))
{
tooManyShadows = true;
break;
}
bool foundSpaceInAtlas = false;
// Try to find free space in the atlas
for (int unusedAtlasSquareAreaIndex = 0; unusedAtlasSquareAreaIndex < m_UnusedAtlasSquareAreas.Count; ++unusedAtlasSquareAreaIndex)
{
var atlasArea = m_UnusedAtlasSquareAreas[unusedAtlasSquareAreaIndex];
var atlasAreaWidth = atlasArea.width;
var atlasAreaHeight = atlasArea.height;
var atlasAreaX = atlasArea.x;
var atlasAreaY = atlasArea.y;
if (atlasAreaWidth >= resolution)
{
// we can use this atlas area for the shadow request
m_SortedShadowResolutionRequests[shadowRequestIndex].offsetX = atlasAreaX;
m_SortedShadowResolutionRequests[shadowRequestIndex].offsetY = atlasAreaY;
m_SortedShadowResolutionRequests[shadowRequestIndex].allocatedResolution = resolution;
// this atlas space is not available anymore, so remove it from the list
m_UnusedAtlasSquareAreas.RemoveAt(unusedAtlasSquareAreaIndex);
// make sure to split space so that the rest of this square area can be used
int remainingShadowRequestsCount = totalShadowSlicesCount - shadowRequestIndex - 1; // (no need to add more than that)
int newSquareAreasCount = 0;
int newSquareAreaWidth = resolution; // we split the area in squares of same size
int newSquareAreaHeight = resolution;
var newSquareAreaX = atlasAreaX;
var newSquareAreaY = atlasAreaY;
while (newSquareAreasCount < remainingShadowRequestsCount)
{
newSquareAreaX += newSquareAreaWidth;
if (newSquareAreaX + newSquareAreaWidth > (atlasAreaX + atlasAreaWidth))
{
newSquareAreaX = atlasAreaX;
newSquareAreaY += newSquareAreaHeight;
if (newSquareAreaY + newSquareAreaHeight > (atlasAreaY + atlasAreaHeight))
break;
}
// replace the space we removed previously by new smaller squares (inserting them in this order ensures shadow maps will be packed at the side of the atlas, without gaps)
m_UnusedAtlasSquareAreas.Insert(unusedAtlasSquareAreaIndex + newSquareAreasCount, new RectInt(newSquareAreaX, newSquareAreaY, newSquareAreaWidth, newSquareAreaHeight));
++newSquareAreasCount;
}
foundSpaceInAtlas = true;
break;
}
}
if (!foundSpaceInAtlas)
{
allShadowSlicesFitInAtlas = false;
break;
}
}
if (!allShadowSlicesFitInAtlas && !tooManyShadows)
shadowSlicesScaleFactor *= 2;
}
if (!m_IssuedMessageAboutShadowMapsTooBig && tooManyShadows)
{
Debug.LogWarning($"Too many additional punctual lights shadows. URP tried reducing shadow resolutions by {shadowSlicesScaleFactor} but it was still too much. Increase shadow atlas size, decrease big shadow resolutions, or reduce the number of shadow maps active in the same frame (currently was {totalShadowSlicesCount}).");
m_IssuedMessageAboutShadowMapsTooBig = true; // Only output this once per shadow requests configuration
}
if (!m_IssuedMessageAboutShadowMapsRescale && shadowSlicesScaleFactor > 1)
{
Debug.Log($"Reduced additional punctual light shadows resolution by {shadowSlicesScaleFactor} to make {totalShadowSlicesCount} shadow maps fit in the {atlasSize}x{atlasSize} shadow atlas. To avoid this, increase shadow atlas size, decrease big shadow resolutions, or reduce the number of shadow maps active in the same frame");
m_IssuedMessageAboutShadowMapsRescale = true; // Only output this once per shadow requests configuration
}
}
bool m_IssuedMessageAboutShadowMapsRescale = false;
bool m_IssuedMessageAboutShadowMapsTooBig = false;
bool m_IssuedMessageAboutRemovedShadowSlices = false;
Dictionary<int, ulong> m_ShadowRequestsHashes = new Dictionary<int, ulong>(); // used to keep track of changes in the shadow requests and shadow atlas configuration (per camera)
ulong ResolutionLog2ForHash(int resolution)
{
switch (resolution)
{
case 4096: return 12;
case 2048: return 11;
case 1024: return 10;
case 0512: return 09;
}
return 08;
}
ulong ComputeShadowRequestHash(ref RenderingData renderingData)
{
ulong numberOfShadowedPointLights = 0;
ulong numberOfSoftShadowedLights = 0;
ulong numberOfShadowsWithResolution0128 = 0;
ulong numberOfShadowsWithResolution0256 = 0;
ulong numberOfShadowsWithResolution0512 = 0;
ulong numberOfShadowsWithResolution1024 = 0;
ulong numberOfShadowsWithResolution2048 = 0;
ulong numberOfShadowsWithResolution4096 = 0;
var visibleLights = renderingData.lightData.visibleLights;
for (int visibleLightIndex = 0; visibleLightIndex < visibleLights.Length; ++visibleLightIndex)
{
if (!IsValidShadowCastingLight(ref renderingData.lightData, visibleLightIndex))
continue;
if (visibleLights[visibleLightIndex].lightType == LightType.Point)
++numberOfShadowedPointLights;
if (visibleLights[visibleLightIndex].light.shadows == LightShadows.Soft)
++numberOfSoftShadowedLights;
if (renderingData.shadowData.resolution[visibleLightIndex] == 0128)
++numberOfShadowsWithResolution0128;
if (renderingData.shadowData.resolution[visibleLightIndex] == 0256)
++numberOfShadowsWithResolution0256;
if (renderingData.shadowData.resolution[visibleLightIndex] == 0512)
++numberOfShadowsWithResolution0512;
if (renderingData.shadowData.resolution[visibleLightIndex] == 1024)
++numberOfShadowsWithResolution1024;
if (renderingData.shadowData.resolution[visibleLightIndex] == 2048)
++numberOfShadowsWithResolution2048;
if (renderingData.shadowData.resolution[visibleLightIndex] == 4096)
++numberOfShadowsWithResolution4096;
}
ulong shadowRequestsHash = ResolutionLog2ForHash(renderingData.shadowData.additionalLightsShadowmapWidth) - 8; // bits [00~02]
shadowRequestsHash |= numberOfShadowedPointLights << 03; // bits [03~10]
shadowRequestsHash |= numberOfSoftShadowedLights << 11; // bits [11~18]
shadowRequestsHash |= numberOfShadowsWithResolution0128 << 19; // bits [19~26]
shadowRequestsHash |= numberOfShadowsWithResolution0256 << 27; // bits [27~34]
shadowRequestsHash |= numberOfShadowsWithResolution0512 << 35; // bits [35~42]
shadowRequestsHash |= numberOfShadowsWithResolution1024 << 43; // bits [43~49]
shadowRequestsHash |= numberOfShadowsWithResolution2048 << 50; // bits [50~56]
shadowRequestsHash |= numberOfShadowsWithResolution4096 << 57; // bits [57~63]
return shadowRequestsHash;
}
public bool Setup(ref RenderingData renderingData)
{
using var profScope = new ProfilingScope(null, m_ProfilingSetupSampler);
Clear();
m_ShadowmapWidth = renderingData.shadowData.additionalLightsShadowmapWidth;
m_ShadowmapHeight = renderingData.shadowData.additionalLightsShadowmapHeight;
// In order to apply shadow fade to AdditionalLights, we need to set constants _MainLightShadowParams.zw used by function GetShadowFade in Shadows.hlsl.
// However, we also have to make sure not to override _MainLightShadowParams.xy constants, that are used by MainLight only. Therefore we need to store these values in m_MainLightShadowParams and set them again during SetupAdditionalLightsShadowReceiverConstants.
m_MainLightShadowParams = ShadowUtils.GetMainLightShadowParams(ref renderingData);
var visibleLights = renderingData.lightData.visibleLights;
int additionalLightsCount = renderingData.lightData.additionalLightsCount;
int atlasWidth = renderingData.shadowData.additionalLightsShadowmapWidth;
int totalShadowResolutionRequestsCount = 0; // Number of shadow slices that we would need for all shadowed additional (punctual) lights in the scene. We might have to ignore some of those requests if they do not fit in the shadow atlas.
m_ShadowResolutionRequests.Clear();
// Check changes in the shadow requests and shadow atlas configuration - compute shadow request/configuration hash
if (!renderingData.cameraData.isPreviewCamera)
{
ulong newShadowRequestHash = ComputeShadowRequestHash(ref renderingData);
ulong oldShadowRequestHash = 0;
m_ShadowRequestsHashes.TryGetValue(renderingData.cameraData.camera.GetHashCode(), out oldShadowRequestHash);
if (oldShadowRequestHash != newShadowRequestHash)
{
m_ShadowRequestsHashes[renderingData.cameraData.camera.GetHashCode()] = newShadowRequestHash;
// congif changed ; reset error message flags as we might need to issue those messages again
m_IssuedMessageAboutPointLightHardShadowResolutionTooSmall = false;
m_IssuedMessageAboutPointLightSoftShadowResolutionTooSmall = false;
m_IssuedMessageAboutShadowMapsRescale = false;
m_IssuedMessageAboutShadowMapsTooBig = false;
m_IssuedMessageAboutShadowSlicesTooMany = false;
m_IssuedMessageAboutRemovedShadowSlices = false;
}
}
if (m_AdditionalLightIndexToVisibleLightIndex.Length < visibleLights.Length)
{
// Array "visibleLights" is returned by ScriptableRenderContext.Cull()
// The maximum number of "visibleLights" that ScriptableRenderContext.Cull() should return, is defined by parameter ScriptableCullingParameters.maximumVisibleLights
// Universal RP sets this "ScriptableCullingParameters.maximumVisibleLights" value during ScriptableRenderer.SetupCullingParameters.
// When using Deferred rendering, it is possible to specify a very high number of visible lights.
m_AdditionalLightIndexToVisibleLightIndex = new int[visibleLights.Length];
m_VisibleLightIndexToAdditionalLightIndex = new int[visibleLights.Length];
m_AdditionalLightIndexToShadowParams = new Vector4[visibleLights.Length];
m_VisibleLightIndexToCameraSquareDistance = new float[visibleLights.Length];
m_VisibleLightIndexToSortedShadowResolutionRequestsFirstSliceIndex = new int[visibleLights.Length];
}
// reset m_VisibleLightIndexClosenessToCamera
for (int visibleLightIndex = 0; visibleLightIndex < m_VisibleLightIndexToCameraSquareDistance.Length; ++visibleLightIndex)
m_VisibleLightIndexToCameraSquareDistance[visibleLightIndex] = float.MaxValue;
for (int visibleLightIndex = 0; visibleLightIndex < visibleLights.Length; ++visibleLightIndex)
{
if (visibleLightIndex == renderingData.lightData.mainLightIndex)
// Skip main directional light as it is not packed into the shadow atlas
continue;
if (IsValidShadowCastingLight(ref renderingData.lightData, visibleLightIndex))
{
int shadowSlicesCountForThisLight = GetPunctualLightShadowSlicesCount(visibleLights[visibleLightIndex].lightType);
totalShadowResolutionRequestsCount += shadowSlicesCountForThisLight;
for (int perLightShadowSliceIndex = 0; perLightShadowSliceIndex < shadowSlicesCountForThisLight; ++perLightShadowSliceIndex)
{
m_ShadowResolutionRequests.Add(new ShadowResolutionRequest(visibleLightIndex, perLightShadowSliceIndex, renderingData.shadowData.resolution[visibleLightIndex],
(visibleLights[visibleLightIndex].light.shadows == LightShadows.Soft), (visibleLights[visibleLightIndex].lightType == LightType.Point)));
}
// mark this light as casting shadows
m_VisibleLightIndexToCameraSquareDistance[visibleLightIndex] = (renderingData.cameraData.camera.transform.position - visibleLights[visibleLightIndex].light.transform.position).sqrMagnitude;
}
}
if (m_SortedShadowResolutionRequests == null || m_SortedShadowResolutionRequests.Length < totalShadowResolutionRequestsCount)
m_SortedShadowResolutionRequests = new ShadowResolutionRequest[totalShadowResolutionRequestsCount];
for (int shadowRequestIndex = 0; shadowRequestIndex < m_ShadowResolutionRequests.Count; ++shadowRequestIndex)
m_SortedShadowResolutionRequests[shadowRequestIndex] = m_ShadowResolutionRequests[shadowRequestIndex];
for (int sortedArrayIndex = totalShadowResolutionRequestsCount; sortedArrayIndex < m_SortedShadowResolutionRequests.Length; ++sortedArrayIndex)
m_SortedShadowResolutionRequests[sortedArrayIndex].requestedResolution = 0; // reset unused entries
InsertionSort(m_SortedShadowResolutionRequests, 0, totalShadowResolutionRequestsCount);
// To avoid visual artifacts when there is not enough place in the atlas, we remove shadow slices that would be allocated a too small resolution.
// When not using structured buffers, m_AdditionalLightShadowSliceIndexTo_WorldShadowMatrix.Length maps to _AdditionalLightsWorldToShadow in Shadows.hlsl
// In that case we have to limit its size because uniform buffers cannot be higher than 64kb for some platforms.
int totalShadowSlicesCount = m_UseStructuredBuffer ? totalShadowResolutionRequestsCount : Math.Min(totalShadowResolutionRequestsCount, MAX_PUNCTUAL_LIGHT_SHADOW_SLICES_IN_UBO); // Number of shadow slices that we will actually be able to fit in the shadow atlas without causing visual artifacts.
// Find biggest end index in m_SortedShadowResolutionRequests array, under which all shadow requests can be allocated a big enough shadow atlas slot, to not cause rendering artifacts
bool allShadowsAfterStartIndexHaveEnoughResolution = false;
int estimatedScaleFactor = 1;
while (!allShadowsAfterStartIndexHaveEnoughResolution && totalShadowSlicesCount > 0)
{
estimatedScaleFactor = EstimateScaleFactorNeededToFitAllShadowsInAtlas(m_SortedShadowResolutionRequests, totalShadowSlicesCount, atlasWidth);
// check if resolution of the least priority shadow slice request would be acceptable
if (m_SortedShadowResolutionRequests[totalShadowSlicesCount - 1].requestedResolution >= estimatedScaleFactor * MinimalPunctualLightShadowResolution(m_SortedShadowResolutionRequests[totalShadowSlicesCount - 1].softShadow))
allShadowsAfterStartIndexHaveEnoughResolution = true;
else // Skip shadow requests for this light ; their resolution is too small to look any good
totalShadowSlicesCount -= GetPunctualLightShadowSlicesCount(m_SortedShadowResolutionRequests[totalShadowSlicesCount - 1].pointLightShadow ? LightType.Point : LightType.Spot);
}
if (totalShadowSlicesCount < totalShadowResolutionRequestsCount)
{
if (!m_IssuedMessageAboutRemovedShadowSlices)
{
Debug.LogWarning($"Too many additional punctual lights shadows to look good, URP removed {totalShadowResolutionRequestsCount - totalShadowSlicesCount } shadow maps to make the others fit in the shadow atlas. To avoid this, increase shadow atlas size, remove some shadowed lights, replace soft shadows by hard shadows ; or replace point lights by spot lights");
m_IssuedMessageAboutRemovedShadowSlices = true; // Only output this once per shadow requests configuration
}
}
for (int sortedArrayIndex = totalShadowSlicesCount; sortedArrayIndex < m_SortedShadowResolutionRequests.Length; ++sortedArrayIndex)
m_SortedShadowResolutionRequests[sortedArrayIndex].requestedResolution = 0; // Reset entries that we cannot fit in the atlas
// Reset the reverse lookup array
for (int visibleLightIndex = 0; visibleLightIndex < m_VisibleLightIndexToSortedShadowResolutionRequestsFirstSliceIndex.Length; ++visibleLightIndex)
m_VisibleLightIndexToSortedShadowResolutionRequestsFirstSliceIndex[visibleLightIndex] = -1;
// Update the reverse lookup array (starting from the end of the array, in order to use index of slice#0 in case a same visibleLight has several shadowSlices)
for (int sortedArrayIndex = totalShadowSlicesCount - 1; sortedArrayIndex >= 0; --sortedArrayIndex)
m_VisibleLightIndexToSortedShadowResolutionRequestsFirstSliceIndex[m_SortedShadowResolutionRequests[sortedArrayIndex].visibleLightIndex] = sortedArrayIndex;
AtlasLayout(atlasWidth, totalShadowSlicesCount, estimatedScaleFactor);
if (m_AdditionalLightsShadowSlices == null || m_AdditionalLightsShadowSlices.Length < totalShadowSlicesCount)
m_AdditionalLightsShadowSlices = new ShadowSliceData[totalShadowSlicesCount];
if (m_AdditionalLightShadowSliceIndexTo_WorldShadowMatrix == null ||
(m_UseStructuredBuffer && (m_AdditionalLightShadowSliceIndexTo_WorldShadowMatrix.Length < totalShadowSlicesCount))) // m_AdditionalLightShadowSliceIndexTo_WorldShadowMatrix can be resized when using SSBO to pass shadow data (no size limitation)
m_AdditionalLightShadowSliceIndexTo_WorldShadowMatrix = new Matrix4x4[totalShadowSlicesCount];
// initialize _AdditionalShadowParams
Vector4 defaultShadowParams = new Vector4(0 /*shadowStrength*/, 0, 0, -1 /*perLightFirstShadowSliceIndex*/);
// shadowParams.x is used in RenderAdditionalShadowMapAtlas to skip shadow map rendering for non-shadow-casting lights
// shadowParams.w is used in Lighting shader to find if Additional light casts shadows
for (int i = 0; i < visibleLights.Length; ++i)
m_AdditionalLightIndexToShadowParams[i] = defaultShadowParams;
int validShadowCastingLightsCount = 0;
bool supportsSoftShadows = renderingData.shadowData.supportsSoftShadows;
int additionalLightIndex = -1;
for (int visibleLightIndex = 0; visibleLightIndex < visibleLights.Length && m_ShadowSliceToAdditionalLightIndex.Count < totalShadowSlicesCount; ++visibleLightIndex)
{
VisibleLight shadowLight = visibleLights[visibleLightIndex];
// Skip main directional light as it is not packed into the shadow atlas
if (visibleLightIndex == renderingData.lightData.mainLightIndex)
{
m_VisibleLightIndexToAdditionalLightIndex[visibleLightIndex] = -1;
continue;
}
++additionalLightIndex; // ForwardLights.SetupAdditionalLightConstants skips main Light and thus uses a different index for additional lights
m_AdditionalLightIndexToVisibleLightIndex[additionalLightIndex] = visibleLightIndex;
m_VisibleLightIndexToAdditionalLightIndex[visibleLightIndex] = additionalLightIndex;
LightType lightType = shadowLight.lightType;
int perLightShadowSlicesCount = GetPunctualLightShadowSlicesCount(lightType);
if ((m_ShadowSliceToAdditionalLightIndex.Count + perLightShadowSlicesCount) > totalShadowSlicesCount && IsValidShadowCastingLight(ref renderingData.lightData, visibleLightIndex))
{
if (!m_IssuedMessageAboutShadowSlicesTooMany)
{
// This case can especially happen in Deferred, where there can be a high number of visibleLights
Debug.Log($"There are too many shadowed additional punctual lights active at the same time, URP will not render all the shadows. To ensure all shadows are rendered, reduce the number of shadowed additional lights in the scene ; make sure they are not active at the same time ; or replace point lights by spot lights (spot lights use less shadow maps than point lights).");
m_IssuedMessageAboutShadowSlicesTooMany = true; // Only output this once
}
break;
}
int perLightFirstShadowSliceIndex = m_ShadowSliceToAdditionalLightIndex.Count; // shadowSliceIndex within the global array of all additional light shadow slices
bool isValidShadowCastingLight = false;
for (int perLightShadowSlice = 0; perLightShadowSlice < perLightShadowSlicesCount; ++perLightShadowSlice)
{
int globalShadowSliceIndex = m_ShadowSliceToAdditionalLightIndex.Count; // shadowSliceIndex within the global array of all additional light shadow slices
bool lightRangeContainsShadowCasters = renderingData.cullResults.GetShadowCasterBounds(visibleLightIndex, out var shadowCastersBounds);
if (lightRangeContainsShadowCasters)
{
// We need to iterate the lights even though additional lights are disabled because
// cullResults.GetShadowCasterBounds() does the fence sync for the shadow culling jobs.
if (!renderingData.shadowData.supportsAdditionalLightShadows)
continue;
if (IsValidShadowCastingLight(ref renderingData.lightData, visibleLightIndex))
{
if (m_VisibleLightIndexToSortedShadowResolutionRequestsFirstSliceIndex[visibleLightIndex] == -1)
{
// We could not find place in the shadow atlas for shadow maps of this light.
// Skip it.
}
else if (lightType == LightType.Spot)
{
bool success = ShadowUtils.ExtractSpotLightMatrix(ref renderingData.cullResults,
ref renderingData.shadowData,
visibleLightIndex,
out var shadowTransform,
out m_AdditionalLightsShadowSlices[globalShadowSliceIndex].viewMatrix,
out m_AdditionalLightsShadowSlices[globalShadowSliceIndex].projectionMatrix,
out m_AdditionalLightsShadowSlices[globalShadowSliceIndex].splitData);
if (success)
{
m_ShadowSliceToAdditionalLightIndex.Add(additionalLightIndex);
m_GlobalShadowSliceIndexToPerLightShadowSliceIndex.Add(perLightShadowSlice);
var light = shadowLight.light;
float shadowStrength = light.shadowStrength;
float softShadows = (supportsSoftShadows && light.shadows == LightShadows.Soft) ? 1.0f : 0.0f;
Vector4 shadowParams = new Vector4(shadowStrength, softShadows, LightTypeIdentifierInShadowParams_Spot, perLightFirstShadowSliceIndex);
m_AdditionalLightShadowSliceIndexTo_WorldShadowMatrix[globalShadowSliceIndex] = shadowTransform;
m_AdditionalLightIndexToShadowParams[additionalLightIndex] = shadowParams;
isValidShadowCastingLight = true;
}
}
else if (lightType == LightType.Point)
{
var sliceResolution = m_SortedShadowResolutionRequests[m_VisibleLightIndexToSortedShadowResolutionRequestsFirstSliceIndex[visibleLightIndex]].allocatedResolution;
float fovBias = GetPointLightShadowFrustumFovBiasInDegrees(sliceResolution, (shadowLight.light.shadows == LightShadows.Soft));
// Note: the same fovBias will also be used to compute ShadowUtils.GetShadowBias
bool success = ShadowUtils.ExtractPointLightMatrix(ref renderingData.cullResults,
ref renderingData.shadowData,
visibleLightIndex,
(CubemapFace)perLightShadowSlice,
fovBias,
out var shadowTransform,
out m_AdditionalLightsShadowSlices[globalShadowSliceIndex].viewMatrix,
out m_AdditionalLightsShadowSlices[globalShadowSliceIndex].projectionMatrix,
out m_AdditionalLightsShadowSlices[globalShadowSliceIndex].splitData);
if (success)
{
m_ShadowSliceToAdditionalLightIndex.Add(additionalLightIndex);
m_GlobalShadowSliceIndexToPerLightShadowSliceIndex.Add(perLightShadowSlice);
var light = shadowLight.light;
float shadowStrength = light.shadowStrength;
float softShadows = (supportsSoftShadows && light.shadows == LightShadows.Soft) ? 1.0f : 0.0f;
Vector4 shadowParams = new Vector4(shadowStrength, softShadows, LightTypeIdentifierInShadowParams_Point, perLightFirstShadowSliceIndex);
m_AdditionalLightShadowSliceIndexTo_WorldShadowMatrix[globalShadowSliceIndex] = shadowTransform;
m_AdditionalLightIndexToShadowParams[additionalLightIndex] = shadowParams;
isValidShadowCastingLight = true;
}
}
}
}
}
if (isValidShadowCastingLight)
validShadowCastingLightsCount++;
}
// Lights that need to be rendered in the shadow map atlas
if (validShadowCastingLightsCount == 0)
return false;
int shadowCastingLightsBufferCount = m_ShadowSliceToAdditionalLightIndex.Count;
// Trim shadow atlas dimensions if possible (to avoid allocating texture space that will not be used)
int atlasMaxX = 0;
int atlasMaxY = 0;
for (int sortedShadowResolutionRequestIndex = 0; sortedShadowResolutionRequestIndex < totalShadowSlicesCount; ++sortedShadowResolutionRequestIndex)
{
var shadowResolutionRequest = m_SortedShadowResolutionRequests[sortedShadowResolutionRequestIndex];
atlasMaxX = Mathf.Max(atlasMaxX, shadowResolutionRequest.offsetX + shadowResolutionRequest.allocatedResolution);
atlasMaxY = Mathf.Max(atlasMaxY, shadowResolutionRequest.offsetY + shadowResolutionRequest.allocatedResolution);
}
// ...but make sure we still use power-of-two dimensions (might perform better on some hardware)
m_ShadowmapWidth = Mathf.NextPowerOfTwo(atlasMaxX);
m_ShadowmapHeight = Mathf.NextPowerOfTwo(atlasMaxY);
float oneOverAtlasWidth = 1.0f / m_ShadowmapWidth;
float oneOverAtlasHeight = 1.0f / m_ShadowmapHeight;
Matrix4x4 sliceTransform;
for (int globalShadowSliceIndex = 0; globalShadowSliceIndex < shadowCastingLightsBufferCount; ++globalShadowSliceIndex)
{
additionalLightIndex = m_ShadowSliceToAdditionalLightIndex[globalShadowSliceIndex];
// We can skip the slice if strength is zero.
if (Mathf.Approximately(m_AdditionalLightIndexToShadowParams[additionalLightIndex].x, 0.0f) || Mathf.Approximately(m_AdditionalLightIndexToShadowParams[additionalLightIndex].w, -1.0f))
continue;
int visibleLightIndex = m_AdditionalLightIndexToVisibleLightIndex[additionalLightIndex];
int sortedShadowResolutionRequestFirstSliceIndex = m_VisibleLightIndexToSortedShadowResolutionRequestsFirstSliceIndex[visibleLightIndex];
int perLightSliceIndex = m_GlobalShadowSliceIndexToPerLightShadowSliceIndex[globalShadowSliceIndex];
var shadowResolutionRequest = m_SortedShadowResolutionRequests[sortedShadowResolutionRequestFirstSliceIndex + perLightSliceIndex];
int sliceResolution = shadowResolutionRequest.allocatedResolution;
sliceTransform = Matrix4x4.identity;
sliceTransform.m00 = sliceResolution * oneOverAtlasWidth;
sliceTransform.m11 = sliceResolution * oneOverAtlasHeight;
m_AdditionalLightsShadowSlices[globalShadowSliceIndex].offsetX = shadowResolutionRequest.offsetX;
m_AdditionalLightsShadowSlices[globalShadowSliceIndex].offsetY = shadowResolutionRequest.offsetY;
m_AdditionalLightsShadowSlices[globalShadowSliceIndex].resolution = sliceResolution;
sliceTransform.m03 = m_AdditionalLightsShadowSlices[globalShadowSliceIndex].offsetX * oneOverAtlasWidth;
sliceTransform.m13 = m_AdditionalLightsShadowSlices[globalShadowSliceIndex].offsetY * oneOverAtlasHeight;
// We bake scale and bias to each shadow map in the atlas in the matrix.
// saves some instructions in shader.
m_AdditionalLightShadowSliceIndexTo_WorldShadowMatrix[globalShadowSliceIndex] = sliceTransform * m_AdditionalLightShadowSliceIndexTo_WorldShadowMatrix[globalShadowSliceIndex];
}
return true;
}
public override void Configure(CommandBuffer cmd, RenderTextureDescriptor cameraTextureDescriptor)
{
m_AdditionalLightsShadowmapTexture = ShadowUtils.GetTemporaryShadowTexture(m_ShadowmapWidth, m_ShadowmapHeight, k_ShadowmapBufferBits);
ConfigureTarget(new RenderTargetIdentifier(m_AdditionalLightsShadowmapTexture));
ConfigureClear(ClearFlag.All, Color.black);
}
/// <inheritdoc/>
public override void Execute(ScriptableRenderContext context, ref RenderingData renderingData)
{
if (renderingData.shadowData.supportsAdditionalLightShadows)
RenderAdditionalShadowmapAtlas(ref context, ref renderingData.cullResults, ref renderingData.lightData, ref renderingData.shadowData);
}
public override void OnCameraCleanup(CommandBuffer cmd)
{
if (cmd == null)
throw new ArgumentNullException("cmd");
if (m_AdditionalLightsShadowmapTexture)
{
RenderTexture.ReleaseTemporary(m_AdditionalLightsShadowmapTexture);
m_AdditionalLightsShadowmapTexture = null;
}
}
// Get the "additional light index" (used to index arrays _AdditionalLightsPosition, _AdditionalShadowParams, ...) from the "global" visible light index
// Function called by Deferred Renderer
public int GetShadowLightIndexFromLightIndex(int visibleLightIndex)
{
if (visibleLightIndex < 0 || visibleLightIndex >= m_VisibleLightIndexToAdditionalLightIndex.Length)
return -1;
return m_VisibleLightIndexToAdditionalLightIndex[visibleLightIndex];
}
void Clear()
{
m_ShadowSliceToAdditionalLightIndex.Clear();
m_GlobalShadowSliceIndexToPerLightShadowSliceIndex.Clear();
m_AdditionalLightsShadowmapTexture = null;
}
void RenderAdditionalShadowmapAtlas(ref ScriptableRenderContext context, ref CullingResults cullResults, ref LightData lightData, ref ShadowData shadowData)
{
NativeArray<VisibleLight> visibleLights = lightData.visibleLights;
bool additionalLightHasSoftShadows = false;
// NOTE: Do NOT mix ProfilingScope with named CommandBuffers i.e. CommandBufferPool.Get("name").
// Currently there's an issue which results in mismatched markers.
CommandBuffer cmd = CommandBufferPool.Get();
using (new ProfilingScope(cmd, ProfilingSampler.Get(URPProfileId.AdditionalLightsShadow)))
{
bool anyShadowSliceRenderer = false;
int shadowSlicesCount = m_ShadowSliceToAdditionalLightIndex.Count;
for (int globalShadowSliceIndex = 0; globalShadowSliceIndex < shadowSlicesCount; ++globalShadowSliceIndex)
{
int additionalLightIndex = m_ShadowSliceToAdditionalLightIndex[globalShadowSliceIndex];
// we do the shadow strength check here again here because we might have zero strength for non-shadow-casting lights.
// In that case we need the shadow data buffer but we can skip rendering them to shadowmap.
if (Mathf.Approximately(m_AdditionalLightIndexToShadowParams[additionalLightIndex].x, 0.0f) || Mathf.Approximately(m_AdditionalLightIndexToShadowParams[additionalLightIndex].w, -1.0f))
continue;
int visibleLightIndex = m_AdditionalLightIndexToVisibleLightIndex[additionalLightIndex];
VisibleLight shadowLight = visibleLights[visibleLightIndex];
ShadowSliceData shadowSliceData = m_AdditionalLightsShadowSlices[globalShadowSliceIndex];
var settings = new ShadowDrawingSettings(cullResults, visibleLightIndex);
settings.splitData = shadowSliceData.splitData;
Vector4 shadowBias = ShadowUtils.GetShadowBias(ref shadowLight, visibleLightIndex,
ref shadowData, shadowSliceData.projectionMatrix, shadowSliceData.resolution);
ShadowUtils.SetupShadowCasterConstantBuffer(cmd, ref shadowLight, shadowBias);
CoreUtils.SetKeyword(cmd, ShaderKeywordStrings.CastingPunctualLightShadow, true);
ShadowUtils.RenderShadowSlice(cmd, ref context, ref shadowSliceData, ref settings);
additionalLightHasSoftShadows |= shadowLight.light.shadows == LightShadows.Soft;
anyShadowSliceRenderer = true;
}
// We share soft shadow settings for main light and additional lights to save keywords.
// So we check here if pipeline supports soft shadows and either main light or any additional light has soft shadows
// to enable the keyword.
// TODO: In PC and Consoles we can upload shadow data per light and branch on shader. That will be more likely way faster.
bool mainLightHasSoftShadows = shadowData.supportsMainLightShadows &&
lightData.mainLightIndex != -1 &&
visibleLights[lightData.mainLightIndex].light.shadows ==
LightShadows.Soft;
bool softShadows = shadowData.supportsSoftShadows &&
(mainLightHasSoftShadows || additionalLightHasSoftShadows);
CoreUtils.SetKeyword(cmd, ShaderKeywordStrings.AdditionalLightShadows, anyShadowSliceRenderer);
CoreUtils.SetKeyword(cmd, ShaderKeywordStrings.SoftShadows, softShadows);
if (anyShadowSliceRenderer)
SetupAdditionalLightsShadowReceiverConstants(cmd, ref shadowData, softShadows);
}
context.ExecuteCommandBuffer(cmd);
CommandBufferPool.Release(cmd);
}
// Set constant buffer data that will be used during the lighting/shadowing pass
void SetupAdditionalLightsShadowReceiverConstants(CommandBuffer cmd, ref ShadowData shadowData, bool softShadows)
{
float invShadowAtlasWidth = 1.0f / shadowData.additionalLightsShadowmapWidth;
float invShadowAtlasHeight = 1.0f / shadowData.additionalLightsShadowmapHeight;
float invHalfShadowAtlasWidth = 0.5f * invShadowAtlasWidth;
float invHalfShadowAtlasHeight = 0.5f * invShadowAtlasHeight;
cmd.SetGlobalTexture(m_AdditionalLightsShadowmap.id, m_AdditionalLightsShadowmapTexture);
// set shadow fade (shadow distance) parameters
ShadowUtils.SetupShadowReceiverConstantBuffer(cmd, m_MainLightShadowParams);
if (m_UseStructuredBuffer)
{
// per-light data
var shadowParamsBuffer = ShaderData.instance.GetAdditionalLightShadowParamsStructuredBuffer(m_AdditionalLightIndexToShadowParams.Length);
shadowParamsBuffer.SetData(m_AdditionalLightIndexToShadowParams);
cmd.SetGlobalBuffer(m_AdditionalShadowParams_SSBO, shadowParamsBuffer);
// per-shadow-slice data
var shadowSliceMatricesBuffer = ShaderData.instance.GetAdditionalLightShadowSliceMatricesStructuredBuffer(m_AdditionalLightShadowSliceIndexTo_WorldShadowMatrix.Length);
shadowSliceMatricesBuffer.SetData(m_AdditionalLightShadowSliceIndexTo_WorldShadowMatrix);
cmd.SetGlobalBuffer(m_AdditionalLightsWorldToShadow_SSBO, shadowSliceMatricesBuffer);
}
else
{
cmd.SetGlobalVectorArray(AdditionalShadowsConstantBuffer._AdditionalShadowParams, m_AdditionalLightIndexToShadowParams); // per-light data
cmd.SetGlobalMatrixArray(AdditionalShadowsConstantBuffer._AdditionalLightsWorldToShadow, m_AdditionalLightShadowSliceIndexTo_WorldShadowMatrix); // per-shadow-slice data
}
if (softShadows)
{
if (m_SupportsBoxFilterForShadows)
{
cmd.SetGlobalVector(AdditionalShadowsConstantBuffer._AdditionalShadowOffset0,
new Vector4(-invHalfShadowAtlasWidth, -invHalfShadowAtlasHeight, 0.0f, 0.0f));
cmd.SetGlobalVector(AdditionalShadowsConstantBuffer._AdditionalShadowOffset1,
new Vector4(invHalfShadowAtlasWidth, -invHalfShadowAtlasHeight, 0.0f, 0.0f));
cmd.SetGlobalVector(AdditionalShadowsConstantBuffer._AdditionalShadowOffset2,
new Vector4(-invHalfShadowAtlasWidth, invHalfShadowAtlasHeight, 0.0f, 0.0f));
cmd.SetGlobalVector(AdditionalShadowsConstantBuffer._AdditionalShadowOffset3,
new Vector4(invHalfShadowAtlasWidth, invHalfShadowAtlasHeight, 0.0f, 0.0f));
}
// Currently only used when !SHADER_API_MOBILE but risky to not set them as it's generic
// enough so custom shaders might use it.
cmd.SetGlobalVector(AdditionalShadowsConstantBuffer._AdditionalShadowmapSize, new Vector4(invShadowAtlasWidth, invShadowAtlasHeight,
shadowData.additionalLightsShadowmapWidth, shadowData.additionalLightsShadowmapHeight));
}
}
bool IsValidShadowCastingLight(ref LightData lightData, int i)
{
if (i == lightData.mainLightIndex)
return false;
VisibleLight shadowLight = lightData.visibleLights[i];
// Directional and light shadows are not supported in the shadow map atlas
if (shadowLight.lightType == LightType.Directional)
return false;
Light light = shadowLight.light;
return light != null && light.shadows != LightShadows.None && !Mathf.Approximately(light.shadowStrength, 0.0f);
}
}
}

View File

@@ -0,0 +1,45 @@
namespace UnityEngine.Rendering.Universal
{
/// <summary>
/// Let customizable actions inject commands to capture the camera output.
///
/// You can use this pass to inject capture commands into a command buffer
/// with the goal of having camera capture happening in external code.
/// </summary>
internal class CapturePass : ScriptableRenderPass
{
RenderTargetHandle m_CameraColorHandle;
const string m_ProfilerTag = "Capture Pass";
private static readonly ProfilingSampler m_ProfilingSampler = new ProfilingSampler(m_ProfilerTag);
public CapturePass(RenderPassEvent evt)
{
base.profilingSampler = new ProfilingSampler(nameof(CapturePass));
renderPassEvent = evt;
}
/// <summary>
/// Configure the pass
/// </summary>
/// <param name="actions"></param>
public void Setup(RenderTargetHandle colorHandle)
{
m_CameraColorHandle = colorHandle;
}
/// <inheritdoc/>
public override void Execute(ScriptableRenderContext context, ref RenderingData renderingData)
{
CommandBuffer cmdBuf = CommandBufferPool.Get();
using (new ProfilingScope(cmdBuf, m_ProfilingSampler))
{
var colorAttachmentIdentifier = m_CameraColorHandle.Identifier();
var captureActions = renderingData.cameraData.captureActions;
for (captureActions.Reset(); captureActions.MoveNext();)
captureActions.Current(colorAttachmentIdentifier, cmdBuf);
}
context.ExecuteCommandBuffer(cmdBuf);
CommandBufferPool.Release(cmdBuf);
}
}
}

View File

@@ -0,0 +1,220 @@
using UnityEngine.Experimental.Rendering;
namespace UnityEngine.Rendering.Universal.Internal
{
// Note: this pass can't be done at the same time as post-processing as it needs to be done in
// advance in case we're doing on-tile color grading.
/// <summary>
/// Renders a color grading LUT texture.
/// </summary>
public class ColorGradingLutPass : ScriptableRenderPass
{
readonly Material m_LutBuilderLdr;
readonly Material m_LutBuilderHdr;
readonly GraphicsFormat m_HdrLutFormat;
readonly GraphicsFormat m_LdrLutFormat;
RenderTargetHandle m_InternalLut;
public ColorGradingLutPass(RenderPassEvent evt, PostProcessData data)
{
base.profilingSampler = new ProfilingSampler(nameof(ColorGradingLutPass));
renderPassEvent = evt;
overrideCameraTarget = true;
Material Load(Shader shader)
{
if (shader == null)
{
Debug.LogError($"Missing shader. {GetType().DeclaringType.Name} render pass will not execute. Check for missing reference in the renderer resources.");
return null;
}
return CoreUtils.CreateEngineMaterial(shader);
}
m_LutBuilderLdr = Load(data.shaders.lutBuilderLdrPS);
m_LutBuilderHdr = Load(data.shaders.lutBuilderHdrPS);
// Warm up lut format as IsFormatSupported adds GC pressure...
const FormatUsage kFlags = FormatUsage.Linear | FormatUsage.Render;
if (SystemInfo.IsFormatSupported(GraphicsFormat.R16G16B16A16_SFloat, kFlags))
m_HdrLutFormat = GraphicsFormat.R16G16B16A16_SFloat;
else if (SystemInfo.IsFormatSupported(GraphicsFormat.B10G11R11_UFloatPack32, kFlags))
m_HdrLutFormat = GraphicsFormat.B10G11R11_UFloatPack32;
else
// Obviously using this for log lut encoding is a very bad idea for precision but we
// need it for compatibility reasons and avoid black screens on platforms that don't
// support floating point formats. Expect banding and posterization artifact if this
// ends up being used.
m_HdrLutFormat = GraphicsFormat.R8G8B8A8_UNorm;
m_LdrLutFormat = GraphicsFormat.R8G8B8A8_UNorm;
}
public void Setup(in RenderTargetHandle internalLut)
{
m_InternalLut = internalLut;
}
/// <inheritdoc/>
public override void Execute(ScriptableRenderContext context, ref RenderingData renderingData)
{
var cmd = CommandBufferPool.Get();
using (new ProfilingScope(cmd, ProfilingSampler.Get(URPProfileId.ColorGradingLUT)))
{
// Fetch all color grading settings
var stack = VolumeManager.instance.stack;
var channelMixer = stack.GetComponent<ChannelMixer>();
var colorAdjustments = stack.GetComponent<ColorAdjustments>();
var curves = stack.GetComponent<ColorCurves>();
var liftGammaGain = stack.GetComponent<LiftGammaGain>();
var shadowsMidtonesHighlights = stack.GetComponent<ShadowsMidtonesHighlights>();
var splitToning = stack.GetComponent<SplitToning>();
var tonemapping = stack.GetComponent<Tonemapping>();
var whiteBalance = stack.GetComponent<WhiteBalance>();
ref var postProcessingData = ref renderingData.postProcessingData;
bool hdr = postProcessingData.gradingMode == ColorGradingMode.HighDynamicRange;
// Prepare texture & material
int lutHeight = postProcessingData.lutSize;
int lutWidth = lutHeight * lutHeight;
var format = hdr ? m_HdrLutFormat : m_LdrLutFormat;
var material = hdr ? m_LutBuilderHdr : m_LutBuilderLdr;
var desc = new RenderTextureDescriptor(lutWidth, lutHeight, format, 0);
desc.vrUsage = VRTextureUsage.None; // We only need one for both eyes in VR
cmd.GetTemporaryRT(m_InternalLut.id, desc, FilterMode.Bilinear);
// Prepare data
var lmsColorBalance = ColorUtils.ColorBalanceToLMSCoeffs(whiteBalance.temperature.value, whiteBalance.tint.value);
var hueSatCon = new Vector4(colorAdjustments.hueShift.value / 360f, colorAdjustments.saturation.value / 100f + 1f, colorAdjustments.contrast.value / 100f + 1f, 0f);
var channelMixerR = new Vector4(channelMixer.redOutRedIn.value / 100f, channelMixer.redOutGreenIn.value / 100f, channelMixer.redOutBlueIn.value / 100f, 0f);
var channelMixerG = new Vector4(channelMixer.greenOutRedIn.value / 100f, channelMixer.greenOutGreenIn.value / 100f, channelMixer.greenOutBlueIn.value / 100f, 0f);
var channelMixerB = new Vector4(channelMixer.blueOutRedIn.value / 100f, channelMixer.blueOutGreenIn.value / 100f, channelMixer.blueOutBlueIn.value / 100f, 0f);
var shadowsHighlightsLimits = new Vector4(
shadowsMidtonesHighlights.shadowsStart.value,
shadowsMidtonesHighlights.shadowsEnd.value,
shadowsMidtonesHighlights.highlightsStart.value,
shadowsMidtonesHighlights.highlightsEnd.value
);
var(shadows, midtones, highlights) = ColorUtils.PrepareShadowsMidtonesHighlights(
shadowsMidtonesHighlights.shadows.value,
shadowsMidtonesHighlights.midtones.value,
shadowsMidtonesHighlights.highlights.value
);
var(lift, gamma, gain) = ColorUtils.PrepareLiftGammaGain(
liftGammaGain.lift.value,
liftGammaGain.gamma.value,
liftGammaGain.gain.value
);
var(splitShadows, splitHighlights) = ColorUtils.PrepareSplitToning(
splitToning.shadows.value,
splitToning.highlights.value,
splitToning.balance.value
);
var lutParameters = new Vector4(lutHeight, 0.5f / lutWidth, 0.5f / lutHeight,
lutHeight / (lutHeight - 1f));
// Fill in constants
material.SetVector(ShaderConstants._Lut_Params, lutParameters);
material.SetVector(ShaderConstants._ColorBalance, lmsColorBalance);
material.SetVector(ShaderConstants._ColorFilter, colorAdjustments.colorFilter.value.linear);
material.SetVector(ShaderConstants._ChannelMixerRed, channelMixerR);
material.SetVector(ShaderConstants._ChannelMixerGreen, channelMixerG);
material.SetVector(ShaderConstants._ChannelMixerBlue, channelMixerB);
material.SetVector(ShaderConstants._HueSatCon, hueSatCon);
material.SetVector(ShaderConstants._Lift, lift);
material.SetVector(ShaderConstants._Gamma, gamma);
material.SetVector(ShaderConstants._Gain, gain);
material.SetVector(ShaderConstants._Shadows, shadows);
material.SetVector(ShaderConstants._Midtones, midtones);
material.SetVector(ShaderConstants._Highlights, highlights);
material.SetVector(ShaderConstants._ShaHiLimits, shadowsHighlightsLimits);
material.SetVector(ShaderConstants._SplitShadows, splitShadows);
material.SetVector(ShaderConstants._SplitHighlights, splitHighlights);
// YRGB curves
material.SetTexture(ShaderConstants._CurveMaster, curves.master.value.GetTexture());
material.SetTexture(ShaderConstants._CurveRed, curves.red.value.GetTexture());
material.SetTexture(ShaderConstants._CurveGreen, curves.green.value.GetTexture());
material.SetTexture(ShaderConstants._CurveBlue, curves.blue.value.GetTexture());
// Secondary curves
material.SetTexture(ShaderConstants._CurveHueVsHue, curves.hueVsHue.value.GetTexture());
material.SetTexture(ShaderConstants._CurveHueVsSat, curves.hueVsSat.value.GetTexture());
material.SetTexture(ShaderConstants._CurveLumVsSat, curves.lumVsSat.value.GetTexture());
material.SetTexture(ShaderConstants._CurveSatVsSat, curves.satVsSat.value.GetTexture());
// Tonemapping (baked into the lut for HDR)
if (hdr)
{
material.shaderKeywords = null;
switch (tonemapping.mode.value)
{
case TonemappingMode.Neutral: material.EnableKeyword(ShaderKeywordStrings.TonemapNeutral); break;
case TonemappingMode.ACES: material.EnableKeyword(ShaderKeywordStrings.TonemapACES); break;
default: break; // None
}
}
renderingData.cameraData.xr.StopSinglePass(cmd);
// Render the lut
cmd.Blit(null, m_InternalLut.id, material);
renderingData.cameraData.xr.StartSinglePass(cmd);
}
context.ExecuteCommandBuffer(cmd);
CommandBufferPool.Release(cmd);
}
/// <inheritdoc/>
public override void OnFinishCameraStackRendering(CommandBuffer cmd)
{
cmd.ReleaseTemporaryRT(m_InternalLut.id);
}
public void Cleanup()
{
CoreUtils.Destroy(m_LutBuilderLdr);
CoreUtils.Destroy(m_LutBuilderHdr);
}
// Precomputed shader ids to same some CPU cycles (mostly affects mobile)
static class ShaderConstants
{
public static readonly int _Lut_Params = Shader.PropertyToID("_Lut_Params");
public static readonly int _ColorBalance = Shader.PropertyToID("_ColorBalance");
public static readonly int _ColorFilter = Shader.PropertyToID("_ColorFilter");
public static readonly int _ChannelMixerRed = Shader.PropertyToID("_ChannelMixerRed");
public static readonly int _ChannelMixerGreen = Shader.PropertyToID("_ChannelMixerGreen");
public static readonly int _ChannelMixerBlue = Shader.PropertyToID("_ChannelMixerBlue");
public static readonly int _HueSatCon = Shader.PropertyToID("_HueSatCon");
public static readonly int _Lift = Shader.PropertyToID("_Lift");
public static readonly int _Gamma = Shader.PropertyToID("_Gamma");
public static readonly int _Gain = Shader.PropertyToID("_Gain");
public static readonly int _Shadows = Shader.PropertyToID("_Shadows");
public static readonly int _Midtones = Shader.PropertyToID("_Midtones");
public static readonly int _Highlights = Shader.PropertyToID("_Highlights");
public static readonly int _ShaHiLimits = Shader.PropertyToID("_ShaHiLimits");
public static readonly int _SplitShadows = Shader.PropertyToID("_SplitShadows");
public static readonly int _SplitHighlights = Shader.PropertyToID("_SplitHighlights");
public static readonly int _CurveMaster = Shader.PropertyToID("_CurveMaster");
public static readonly int _CurveRed = Shader.PropertyToID("_CurveRed");
public static readonly int _CurveGreen = Shader.PropertyToID("_CurveGreen");
public static readonly int _CurveBlue = Shader.PropertyToID("_CurveBlue");
public static readonly int _CurveHueVsHue = Shader.PropertyToID("_CurveHueVsHue");
public static readonly int _CurveHueVsSat = Shader.PropertyToID("_CurveHueVsSat");
public static readonly int _CurveLumVsSat = Shader.PropertyToID("_CurveLumVsSat");
public static readonly int _CurveSatVsSat = Shader.PropertyToID("_CurveSatVsSat");
}
}
}

View File

@@ -0,0 +1,120 @@
using System;
namespace UnityEngine.Rendering.Universal.Internal
{
/// <summary>
/// Copy the given color buffer to the given destination color buffer.
///
/// You can use this pass to copy a color buffer to the destination,
/// so you can use it later in rendering. For example, you can copy
/// the opaque texture to use it for distortion effects.
/// </summary>
public class CopyColorPass : ScriptableRenderPass
{
int m_SampleOffsetShaderHandle;
Material m_SamplingMaterial;
Downsampling m_DownsamplingMethod;
Material m_CopyColorMaterial;
private RenderTargetIdentifier source { get; set; }
private RenderTargetHandle destination { get; set; }
/// <summary>
/// Create the CopyColorPass
/// </summary>
public CopyColorPass(RenderPassEvent evt, Material samplingMaterial, Material copyColorMaterial = null)
{
base.profilingSampler = new ProfilingSampler(nameof(CopyColorPass));
m_SamplingMaterial = samplingMaterial;
m_CopyColorMaterial = copyColorMaterial;
m_SampleOffsetShaderHandle = Shader.PropertyToID("_SampleOffset");
renderPassEvent = evt;
m_DownsamplingMethod = Downsampling.None;
}
/// <summary>
/// Configure the pass with the source and destination to execute on.
/// </summary>
/// <param name="source">Source Render Target</param>
/// <param name="destination">Destination Render Target</param>
public void Setup(RenderTargetIdentifier source, RenderTargetHandle destination, Downsampling downsampling)
{
this.source = source;
this.destination = destination;
m_DownsamplingMethod = downsampling;
}
public override void OnCameraSetup(CommandBuffer cmd, ref RenderingData renderingData)
{
RenderTextureDescriptor descriptor = renderingData.cameraData.cameraTargetDescriptor;
descriptor.msaaSamples = 1;
descriptor.depthBufferBits = 0;
if (m_DownsamplingMethod == Downsampling._2xBilinear)
{
descriptor.width /= 2;
descriptor.height /= 2;
}
else if (m_DownsamplingMethod == Downsampling._4xBox || m_DownsamplingMethod == Downsampling._4xBilinear)
{
descriptor.width /= 4;
descriptor.height /= 4;
}
cmd.GetTemporaryRT(destination.id, descriptor, m_DownsamplingMethod == Downsampling.None ? FilterMode.Point : FilterMode.Bilinear);
}
/// <inheritdoc/>
public override void Execute(ScriptableRenderContext context, ref RenderingData renderingData)
{
if (m_SamplingMaterial == null)
{
Debug.LogErrorFormat("Missing {0}. {1} render pass will not execute. Check for missing reference in the renderer resources.", m_SamplingMaterial, GetType().Name);
return;
}
CommandBuffer cmd = CommandBufferPool.Get();
using (new ProfilingScope(cmd, ProfilingSampler.Get(URPProfileId.CopyColor)))
{
RenderTargetIdentifier opaqueColorRT = destination.Identifier();
ScriptableRenderer.SetRenderTarget(cmd, opaqueColorRT, BuiltinRenderTextureType.CameraTarget, clearFlag,
clearColor);
bool useDrawProceduleBlit = renderingData.cameraData.xr.enabled;
switch (m_DownsamplingMethod)
{
case Downsampling.None:
RenderingUtils.Blit(cmd, source, opaqueColorRT, m_CopyColorMaterial, 0, useDrawProceduleBlit);
break;
case Downsampling._2xBilinear:
RenderingUtils.Blit(cmd, source, opaqueColorRT, m_CopyColorMaterial, 0, useDrawProceduleBlit);
break;
case Downsampling._4xBox:
m_SamplingMaterial.SetFloat(m_SampleOffsetShaderHandle, 2);
RenderingUtils.Blit(cmd, source, opaqueColorRT, m_SamplingMaterial, 0, useDrawProceduleBlit);
break;
case Downsampling._4xBilinear:
RenderingUtils.Blit(cmd, source, opaqueColorRT, m_CopyColorMaterial, 0, useDrawProceduleBlit);
break;
}
}
context.ExecuteCommandBuffer(cmd);
CommandBufferPool.Release(cmd);
}
/// <inheritdoc/>
public override void OnCameraCleanup(CommandBuffer cmd)
{
if (cmd == null)
throw new ArgumentNullException("cmd");
if (destination != RenderTargetHandle.CameraTarget)
{
cmd.ReleaseTemporaryRT(destination.id);
destination = RenderTargetHandle.CameraTarget;
}
}
}
}

View File

@@ -0,0 +1,157 @@
using System;
namespace UnityEngine.Rendering.Universal.Internal
{
/// <summary>
/// Copy the given depth buffer into the given destination depth buffer.
///
/// You can use this pass to copy a depth buffer to a destination,
/// so you can use it later in rendering. If the source texture has MSAA
/// enabled, the pass uses a custom MSAA resolve. If the source texture
/// does not have MSAA enabled, the pass uses a Blit or a Copy Texture
/// operation, depending on what the current platform supports.
/// </summary>
public class CopyDepthPass : ScriptableRenderPass
{
private RenderTargetHandle source { get; set; }
private RenderTargetHandle destination { get; set; }
internal bool AllocateRT { get; set; }
Material m_CopyDepthMaterial;
public CopyDepthPass(RenderPassEvent evt, Material copyDepthMaterial)
{
base.profilingSampler = new ProfilingSampler(nameof(CopyDepthPass));
AllocateRT = true;
m_CopyDepthMaterial = copyDepthMaterial;
renderPassEvent = evt;
}
/// <summary>
/// Configure the pass with the source and destination to execute on.
/// </summary>
/// <param name="source">Source Render Target</param>
/// <param name="destination">Destination Render Targt</param>
public void Setup(RenderTargetHandle source, RenderTargetHandle destination)
{
this.source = source;
this.destination = destination;
this.AllocateRT = AllocateRT && !destination.HasInternalRenderTargetId();
}
public override void OnCameraSetup(CommandBuffer cmd, ref RenderingData renderingData)
{
var descriptor = renderingData.cameraData.cameraTargetDescriptor;
descriptor.colorFormat = RenderTextureFormat.Depth;
descriptor.depthBufferBits = 32; //TODO: do we really need this. double check;
descriptor.msaaSamples = 1;
if (this.AllocateRT)
cmd.GetTemporaryRT(destination.id, descriptor, FilterMode.Point);
// On Metal iOS, prevent camera attachments to be bound and cleared during this pass.
ConfigureTarget(new RenderTargetIdentifier(destination.Identifier(), 0, CubemapFace.Unknown, -1));
ConfigureClear(ClearFlag.None, Color.black);
}
/// <inheritdoc/>
public override void Execute(ScriptableRenderContext context, ref RenderingData renderingData)
{
if (m_CopyDepthMaterial == null)
{
Debug.LogErrorFormat("Missing {0}. {1} render pass will not execute. Check for missing reference in the renderer resources.", m_CopyDepthMaterial, GetType().Name);
return;
}
CommandBuffer cmd = CommandBufferPool.Get();
using (new ProfilingScope(cmd, ProfilingSampler.Get(URPProfileId.CopyDepth)))
{
RenderTextureDescriptor descriptor = renderingData.cameraData.cameraTargetDescriptor;
int cameraSamples = descriptor.msaaSamples;
CameraData cameraData = renderingData.cameraData;
switch (cameraSamples)
{
case 8:
cmd.DisableShaderKeyword(ShaderKeywordStrings.DepthMsaa2);
cmd.DisableShaderKeyword(ShaderKeywordStrings.DepthMsaa4);
cmd.EnableShaderKeyword(ShaderKeywordStrings.DepthMsaa8);
break;
case 4:
cmd.DisableShaderKeyword(ShaderKeywordStrings.DepthMsaa2);
cmd.EnableShaderKeyword(ShaderKeywordStrings.DepthMsaa4);
cmd.DisableShaderKeyword(ShaderKeywordStrings.DepthMsaa8);
break;
case 2:
cmd.EnableShaderKeyword(ShaderKeywordStrings.DepthMsaa2);
cmd.DisableShaderKeyword(ShaderKeywordStrings.DepthMsaa4);
cmd.DisableShaderKeyword(ShaderKeywordStrings.DepthMsaa8);
break;
// MSAA disabled
default:
cmd.DisableShaderKeyword(ShaderKeywordStrings.DepthMsaa2);
cmd.DisableShaderKeyword(ShaderKeywordStrings.DepthMsaa4);
cmd.DisableShaderKeyword(ShaderKeywordStrings.DepthMsaa8);
break;
}
cmd.SetGlobalTexture("_CameraDepthAttachment", source.Identifier());
#if ENABLE_VR && ENABLE_XR_MODULE
// XR uses procedural draw instead of cmd.blit or cmd.DrawFullScreenMesh
if (renderingData.cameraData.xr.enabled)
{
// XR flip logic is not the same as non-XR case because XR uses draw procedure
// and draw procedure does not need to take projection matrix yflip into account
// We y-flip if
// 1) we are bliting from render texture to back buffer and
// 2) renderTexture starts UV at top
// XRTODO: handle scalebias and scalebiasRt for src and dst separately
bool isRenderToBackBufferTarget = destination.Identifier() == cameraData.xr.renderTarget && !cameraData.xr.renderTargetIsRenderTexture;
bool yflip = isRenderToBackBufferTarget && SystemInfo.graphicsUVStartsAtTop;
float flipSign = (yflip) ? -1.0f : 1.0f;
Vector4 scaleBiasRt = (flipSign < 0.0f)
? new Vector4(flipSign, 1.0f, -1.0f, 1.0f)
: new Vector4(flipSign, 0.0f, 1.0f, 1.0f);
cmd.SetGlobalVector(ShaderPropertyId.scaleBiasRt, scaleBiasRt);
cmd.DrawProcedural(Matrix4x4.identity, m_CopyDepthMaterial, 0, MeshTopology.Quads, 4);
}
else
#endif
{
// Blit has logic to flip projection matrix when rendering to render texture.
// Currently the y-flip is handled in CopyDepthPass.hlsl by checking _ProjectionParams.x
// If you replace this Blit with a Draw* that sets projection matrix double check
// to also update shader.
// scaleBias.x = flipSign
// scaleBias.y = scale
// scaleBias.z = bias
// scaleBias.w = unused
float flipSign = (cameraData.IsCameraProjectionMatrixFlipped()) ? -1.0f : 1.0f;
Vector4 scaleBiasRt = (flipSign < 0.0f)
? new Vector4(flipSign, 1.0f, -1.0f, 1.0f)
: new Vector4(flipSign, 0.0f, 1.0f, 1.0f);
cmd.SetGlobalVector(ShaderPropertyId.scaleBiasRt, scaleBiasRt);
cmd.DrawMesh(RenderingUtils.fullscreenMesh, Matrix4x4.identity, m_CopyDepthMaterial);
}
}
context.ExecuteCommandBuffer(cmd);
CommandBufferPool.Release(cmd);
}
/// <inheritdoc/>
public override void OnCameraCleanup(CommandBuffer cmd)
{
if (cmd == null)
throw new ArgumentNullException("cmd");
if (this.AllocateRT)
cmd.ReleaseTemporaryRT(destination.id);
destination = RenderTargetHandle.CameraTarget;
}
}
}

View File

@@ -0,0 +1,48 @@
using UnityEngine.Experimental.GlobalIllumination;
using UnityEngine.Profiling;
using Unity.Collections;
// cleanup code
// listMinDepth and maxDepth should be stored in a different uniform block?
// Point lights stored as vec4
// RelLightIndices should be stored in ushort instead of uint.
// TODO use Unity.Mathematics
// TODO Check if there is a bitarray structure (with dynamic size) available in Unity
namespace UnityEngine.Rendering.Universal.Internal
{
// Render all tiled-based deferred lights.
internal class DeferredPass : ScriptableRenderPass
{
DeferredLights m_DeferredLights;
public DeferredPass(RenderPassEvent evt, DeferredLights deferredLights)
{
base.profilingSampler = new ProfilingSampler(nameof(DeferredPass));
base.renderPassEvent = evt;
m_DeferredLights = deferredLights;
}
// ScriptableRenderPass
public override void Configure(CommandBuffer cmd, RenderTextureDescriptor cameraTextureDescripor)
{
RenderTargetIdentifier lightingAttachmentId = m_DeferredLights.GbufferAttachmentIdentifiers[m_DeferredLights.GBufferLightingIndex];
RenderTargetIdentifier depthAttachmentId = m_DeferredLights.DepthAttachmentIdentifier;
// TODO: Cannot currently bind depth texture as read-only!
ConfigureTarget(lightingAttachmentId, depthAttachmentId);
}
// ScriptableRenderPass
public override void Execute(ScriptableRenderContext context, ref RenderingData renderingData)
{
m_DeferredLights.ExecuteDeferredPass(context, ref renderingData);
}
// ScriptableRenderPass
public override void OnCameraCleanup(CommandBuffer cmd)
{
m_DeferredLights.OnCameraCleanup(cmd);
}
}
}

View File

@@ -0,0 +1,99 @@
using System;
namespace UnityEngine.Rendering.Universal.Internal
{
public class DepthNormalOnlyPass : ScriptableRenderPass
{
internal RenderTextureDescriptor normalDescriptor { get; private set; }
internal RenderTextureDescriptor depthDescriptor { get; private set; }
private RenderTargetHandle depthHandle { get; set; }
private RenderTargetHandle normalHandle { get; set; }
private ShaderTagId m_ShaderTagId = new ShaderTagId("DepthNormals");
private FilteringSettings m_FilteringSettings;
// Constants
private const int k_DepthBufferBits = 32;
/// <summary>
/// Create the DepthNormalOnlyPass
/// </summary>
public DepthNormalOnlyPass(RenderPassEvent evt, RenderQueueRange renderQueueRange, LayerMask layerMask)
{
base.profilingSampler = new ProfilingSampler(nameof(DepthNormalOnlyPass));
m_FilteringSettings = new FilteringSettings(renderQueueRange, layerMask);
renderPassEvent = evt;
}
/// <summary>
/// Configure the pass
/// </summary>
public void Setup(RenderTextureDescriptor baseDescriptor, RenderTargetHandle depthHandle, RenderTargetHandle normalHandle)
{
this.depthHandle = depthHandle;
baseDescriptor.colorFormat = RenderTextureFormat.Depth;
baseDescriptor.depthBufferBits = k_DepthBufferBits;
baseDescriptor.msaaSamples = 1;// Depth-Only pass don't use MSAA
depthDescriptor = baseDescriptor;
this.normalHandle = normalHandle;
baseDescriptor.colorFormat = RenderTextureFormat.RGHalf;
baseDescriptor.depthBufferBits = 0;
baseDescriptor.msaaSamples = 1;
normalDescriptor = baseDescriptor;
}
/// <inheritdoc/>
public override void OnCameraSetup(CommandBuffer cmd, ref RenderingData renderingData)
{
cmd.GetTemporaryRT(normalHandle.id, normalDescriptor, FilterMode.Point);
cmd.GetTemporaryRT(depthHandle.id, depthDescriptor, FilterMode.Point);
ConfigureTarget(
new RenderTargetIdentifier(normalHandle.Identifier(), 0, CubemapFace.Unknown, -1),
new RenderTargetIdentifier(depthHandle.Identifier(), 0, CubemapFace.Unknown, -1)
);
ConfigureClear(ClearFlag.All, Color.black);
}
/// <inheritdoc/>
public override void Execute(ScriptableRenderContext context, ref RenderingData renderingData)
{
// NOTE: Do NOT mix ProfilingScope with named CommandBuffers i.e. CommandBufferPool.Get("name").
// Currently there's an issue which results in mismatched markers.
CommandBuffer cmd = CommandBufferPool.Get();
using (new ProfilingScope(cmd, ProfilingSampler.Get(URPProfileId.DepthNormalPrepass)))
{
context.ExecuteCommandBuffer(cmd);
cmd.Clear();
var sortFlags = renderingData.cameraData.defaultOpaqueSortFlags;
var drawSettings = CreateDrawingSettings(m_ShaderTagId, ref renderingData, sortFlags);
drawSettings.perObjectData = PerObjectData.None;
ref CameraData cameraData = ref renderingData.cameraData;
Camera camera = cameraData.camera;
context.DrawRenderers(renderingData.cullResults, ref drawSettings, ref m_FilteringSettings);
}
context.ExecuteCommandBuffer(cmd);
CommandBufferPool.Release(cmd);
}
/// <inheritdoc/>
public override void OnCameraCleanup(CommandBuffer cmd)
{
if (cmd == null)
{
throw new ArgumentNullException("cmd");
}
if (depthHandle != RenderTargetHandle.CameraTarget)
{
cmd.ReleaseTemporaryRT(normalHandle.id);
cmd.ReleaseTemporaryRT(depthHandle.id);
normalHandle = RenderTargetHandle.CameraTarget;
depthHandle = RenderTargetHandle.CameraTarget;
}
}
}
}

View File

@@ -0,0 +1,88 @@
using System;
namespace UnityEngine.Rendering.Universal.Internal
{
/// <summary>
/// Render all objects that have a 'DepthOnly' pass into the given depth buffer.
///
/// You can use this pass to prime a depth buffer for subsequent rendering.
/// Use it as a z-prepass, or use it to generate a depth buffer.
/// </summary>
public class DepthOnlyPass : ScriptableRenderPass
{
int kDepthBufferBits = 32;
private RenderTargetHandle depthAttachmentHandle { get; set; }
internal RenderTextureDescriptor descriptor { get; private set; }
FilteringSettings m_FilteringSettings;
ShaderTagId m_ShaderTagId = new ShaderTagId("DepthOnly");
/// <summary>
/// Create the DepthOnlyPass
/// </summary>
public DepthOnlyPass(RenderPassEvent evt, RenderQueueRange renderQueueRange, LayerMask layerMask)
{
base.profilingSampler = new ProfilingSampler(nameof(DepthOnlyPass));
m_FilteringSettings = new FilteringSettings(renderQueueRange, layerMask);
renderPassEvent = evt;
}
/// <summary>
/// Configure the pass
/// </summary>
public void Setup(
RenderTextureDescriptor baseDescriptor,
RenderTargetHandle depthAttachmentHandle)
{
this.depthAttachmentHandle = depthAttachmentHandle;
baseDescriptor.colorFormat = RenderTextureFormat.Depth;
baseDescriptor.depthBufferBits = kDepthBufferBits;
// Depth-Only pass don't use MSAA
baseDescriptor.msaaSamples = 1;
descriptor = baseDescriptor;
}
public override void OnCameraSetup(CommandBuffer cmd, ref RenderingData renderingData)
{
cmd.GetTemporaryRT(depthAttachmentHandle.id, descriptor, FilterMode.Point);
ConfigureTarget(new RenderTargetIdentifier(depthAttachmentHandle.Identifier(), 0, CubemapFace.Unknown, -1));
ConfigureClear(ClearFlag.All, Color.black);
}
/// <inheritdoc/>
public override void Execute(ScriptableRenderContext context, ref RenderingData renderingData)
{
// NOTE: Do NOT mix ProfilingScope with named CommandBuffers i.e. CommandBufferPool.Get("name").
// Currently there's an issue which results in mismatched markers.
CommandBuffer cmd = CommandBufferPool.Get();
using (new ProfilingScope(cmd, ProfilingSampler.Get(URPProfileId.DepthPrepass)))
{
context.ExecuteCommandBuffer(cmd);
cmd.Clear();
var sortFlags = renderingData.cameraData.defaultOpaqueSortFlags;
var drawSettings = CreateDrawingSettings(m_ShaderTagId, ref renderingData, sortFlags);
drawSettings.perObjectData = PerObjectData.None;
context.DrawRenderers(renderingData.cullResults, ref drawSettings, ref m_FilteringSettings);
}
context.ExecuteCommandBuffer(cmd);
CommandBufferPool.Release(cmd);
}
/// <inheritdoc/>
public override void OnCameraCleanup(CommandBuffer cmd)
{
if (cmd == null)
throw new ArgumentNullException("cmd");
if (depthAttachmentHandle != RenderTargetHandle.CameraTarget)
{
cmd.ReleaseTemporaryRT(depthAttachmentHandle.id);
depthAttachmentHandle = RenderTargetHandle.CameraTarget;
}
}
}
}

View File

@@ -0,0 +1,106 @@
using System;
using System.Collections.Generic;
using UnityEngine.Profiling;
namespace UnityEngine.Rendering.Universal.Internal
{
/// <summary>
/// Draw objects into the given color and depth target
///
/// You can use this pass to render objects that have a material and/or shader
/// with the pass names UniversalForward or SRPDefaultUnlit.
/// </summary>
public class DrawObjectsPass : ScriptableRenderPass
{
FilteringSettings m_FilteringSettings;
RenderStateBlock m_RenderStateBlock;
List<ShaderTagId> m_ShaderTagIdList = new List<ShaderTagId>();
string m_ProfilerTag;
ProfilingSampler m_ProfilingSampler;
bool m_IsOpaque;
static readonly int s_DrawObjectPassDataPropID = Shader.PropertyToID("_DrawObjectPassData");
public DrawObjectsPass(string profilerTag, ShaderTagId[] shaderTagIds, bool opaque, RenderPassEvent evt, RenderQueueRange renderQueueRange, LayerMask layerMask, StencilState stencilState, int stencilReference)
{
base.profilingSampler = new ProfilingSampler(nameof(DrawObjectsPass));
m_ProfilerTag = profilerTag;
m_ProfilingSampler = new ProfilingSampler(profilerTag);
foreach (ShaderTagId sid in shaderTagIds)
m_ShaderTagIdList.Add(sid);
renderPassEvent = evt;
m_FilteringSettings = new FilteringSettings(renderQueueRange, layerMask);
m_RenderStateBlock = new RenderStateBlock(RenderStateMask.Nothing);
m_IsOpaque = opaque;
if (stencilState.enabled)
{
m_RenderStateBlock.stencilReference = stencilReference;
m_RenderStateBlock.mask = RenderStateMask.Stencil;
m_RenderStateBlock.stencilState = stencilState;
}
}
public DrawObjectsPass(string profilerTag, bool opaque, RenderPassEvent evt, RenderQueueRange renderQueueRange, LayerMask layerMask, StencilState stencilState, int stencilReference)
: this(profilerTag,
new ShaderTagId[] { new ShaderTagId("SRPDefaultUnlit"), new ShaderTagId("UniversalForward"), new ShaderTagId("UniversalForwardOnly"), new ShaderTagId("LightweightForward")},
opaque, evt, renderQueueRange, layerMask, stencilState, stencilReference)
{}
internal DrawObjectsPass(URPProfileId profileId, bool opaque, RenderPassEvent evt, RenderQueueRange renderQueueRange, LayerMask layerMask, StencilState stencilState, int stencilReference)
: this(profileId.GetType().Name, opaque, evt, renderQueueRange, layerMask, stencilState, stencilReference)
{
m_ProfilingSampler = ProfilingSampler.Get(profileId);
}
/// <inheritdoc/>
public override void Execute(ScriptableRenderContext context, ref RenderingData renderingData)
{
// NOTE: Do NOT mix ProfilingScope with named CommandBuffers i.e. CommandBufferPool.Get("name").
// Currently there's an issue which results in mismatched markers.
CommandBuffer cmd = CommandBufferPool.Get();
using (new ProfilingScope(cmd, m_ProfilingSampler))
{
// Global render pass data containing various settings.
// x,y,z are currently unused
// w is used for knowing whether the object is opaque(1) or alpha blended(0)
Vector4 drawObjectPassData = new Vector4(0.0f, 0.0f, 0.0f, (m_IsOpaque) ? 1.0f : 0.0f);
cmd.SetGlobalVector(s_DrawObjectPassDataPropID, drawObjectPassData);
// scaleBias.x = flipSign
// scaleBias.y = scale
// scaleBias.z = bias
// scaleBias.w = unused
float flipSign = (renderingData.cameraData.IsCameraProjectionMatrixFlipped()) ? -1.0f : 1.0f;
Vector4 scaleBias = (flipSign < 0.0f)
? new Vector4(flipSign, 1.0f, -1.0f, 1.0f)
: new Vector4(flipSign, 0.0f, 1.0f, 1.0f);
cmd.SetGlobalVector(ShaderPropertyId.scaleBiasRt, scaleBias);
context.ExecuteCommandBuffer(cmd);
cmd.Clear();
Camera camera = renderingData.cameraData.camera;
var sortFlags = (m_IsOpaque) ? renderingData.cameraData.defaultOpaqueSortFlags : SortingCriteria.CommonTransparent;
var drawSettings = CreateDrawingSettings(m_ShaderTagIdList, ref renderingData, sortFlags);
var filterSettings = m_FilteringSettings;
#if UNITY_EDITOR
// When rendering the preview camera, we want the layer mask to be forced to Everything
if (renderingData.cameraData.isPreviewCamera)
{
filterSettings.layerMask = -1;
}
#endif
context.DrawRenderers(renderingData.cullResults, ref drawSettings, ref filterSettings, ref m_RenderStateBlock);
// Render objects that did not match any shader pass with error shader
RenderingUtils.RenderObjectsWithError(context, ref renderingData.cullResults, camera, filterSettings, SortingCriteria.None);
}
context.ExecuteCommandBuffer(cmd);
CommandBufferPool.Release(cmd);
}
}
}

View File

@@ -0,0 +1,75 @@
namespace UnityEngine.Rendering.Universal
{
/// <summary>
/// Draw the skybox into the given color buffer using the given depth buffer for depth testing.
///
/// This pass renders the standard Unity skybox.
/// </summary>
public class DrawSkyboxPass : ScriptableRenderPass
{
public DrawSkyboxPass(RenderPassEvent evt)
{
base.profilingSampler = new ProfilingSampler(nameof(DrawSkyboxPass));
renderPassEvent = evt;
}
/// <inheritdoc/>
public override void Execute(ScriptableRenderContext context, ref RenderingData renderingData)
{
#if ENABLE_VR && ENABLE_XR_MODULE
// XRTODO: Remove this code once Skybox pass is moved to SRP land.
if (renderingData.cameraData.xr.enabled)
{
// Setup Legacy XR buffer states
if (renderingData.cameraData.xr.singlePassEnabled)
{
// Setup legacy skybox stereo buffer
renderingData.cameraData.camera.SetStereoProjectionMatrix(Camera.StereoscopicEye.Left, renderingData.cameraData.GetProjectionMatrix(0));
renderingData.cameraData.camera.SetStereoViewMatrix(Camera.StereoscopicEye.Left, renderingData.cameraData.GetViewMatrix(0));
renderingData.cameraData.camera.SetStereoProjectionMatrix(Camera.StereoscopicEye.Right, renderingData.cameraData.GetProjectionMatrix(1));
renderingData.cameraData.camera.SetStereoViewMatrix(Camera.StereoscopicEye.Right, renderingData.cameraData.GetViewMatrix(1));
CommandBuffer cmd = CommandBufferPool.Get();
// Use legacy stereo instancing mode to have legacy XR code path configured
cmd.SetSinglePassStereo(SystemInfo.supportsMultiview ? SinglePassStereoMode.Multiview : SinglePassStereoMode.Instancing);
context.ExecuteCommandBuffer(cmd);
cmd.Clear();
// Calling into built-in skybox pass
context.DrawSkybox(renderingData.cameraData.camera);
// Disable Legacy XR path
cmd.SetSinglePassStereo(SinglePassStereoMode.None);
context.ExecuteCommandBuffer(cmd);
// We do not need to submit here due to special handling of stereo matricies in core.
// context.Submit();
CommandBufferPool.Release(cmd);
renderingData.cameraData.camera.ResetStereoProjectionMatrices();
renderingData.cameraData.camera.ResetStereoViewMatrices();
}
else
{
renderingData.cameraData.camera.projectionMatrix = renderingData.cameraData.GetProjectionMatrix(0);
renderingData.cameraData.camera.worldToCameraMatrix = renderingData.cameraData.GetViewMatrix(0);
context.DrawSkybox(renderingData.cameraData.camera);
// XRTODO: remove this call because it creates issues with nested profiling scopes
// See examples in UniversalRenderPipeline.RenderSingleCamera() and in ScriptableRenderer.Execute()
context.Submit(); // Submit and execute the skybox pass before resetting the matrices
renderingData.cameraData.camera.ResetProjectionMatrix();
renderingData.cameraData.camera.ResetWorldToCameraMatrix();
}
}
else
#endif
{
context.DrawSkybox(renderingData.cameraData.camera);
}
}
}
}

View File

@@ -0,0 +1,117 @@
namespace UnityEngine.Rendering.Universal.Internal
{
/// <summary>
/// Copy the given color target to the current camera target
///
/// You can use this pass to copy the result of rendering to
/// the camera target. The pass takes the screen viewport into
/// consideration.
/// </summary>
public class FinalBlitPass : ScriptableRenderPass
{
RenderTargetHandle m_Source;
Material m_BlitMaterial;
public FinalBlitPass(RenderPassEvent evt, Material blitMaterial)
{
base.profilingSampler = new ProfilingSampler(nameof(FinalBlitPass));
m_BlitMaterial = blitMaterial;
renderPassEvent = evt;
}
/// <summary>
/// Configure the pass
/// </summary>
/// <param name="baseDescriptor"></param>
/// <param name="colorHandle"></param>
public void Setup(RenderTextureDescriptor baseDescriptor, RenderTargetHandle colorHandle)
{
m_Source = colorHandle;
}
/// <inheritdoc/>
public override void Execute(ScriptableRenderContext context, ref RenderingData renderingData)
{
if (m_BlitMaterial == null)
{
Debug.LogErrorFormat("Missing {0}. {1} render pass will not execute. Check for missing reference in the renderer resources.", m_BlitMaterial, GetType().Name);
return;
}
// Note: We need to get the cameraData.targetTexture as this will get the targetTexture of the camera stack.
// Overlay cameras need to output to the target described in the base camera while doing camera stack.
ref CameraData cameraData = ref renderingData.cameraData;
RenderTargetIdentifier cameraTarget = (cameraData.targetTexture != null) ? new RenderTargetIdentifier(cameraData.targetTexture) : BuiltinRenderTextureType.CameraTarget;
bool isSceneViewCamera = cameraData.isSceneViewCamera;
CommandBuffer cmd = CommandBufferPool.Get();
using (new ProfilingScope(cmd, ProfilingSampler.Get(URPProfileId.FinalBlit)))
{
CoreUtils.SetKeyword(cmd, ShaderKeywordStrings.LinearToSRGBConversion,
cameraData.requireSrgbConversion);
cmd.SetGlobalTexture(ShaderPropertyId.sourceTex, m_Source.Identifier());
#if ENABLE_VR && ENABLE_XR_MODULE
if (cameraData.xr.enabled)
{
int depthSlice = cameraData.xr.singlePassEnabled ? -1 : cameraData.xr.GetTextureArraySlice();
cameraTarget =
new RenderTargetIdentifier(cameraData.xr.renderTarget, 0, CubemapFace.Unknown, depthSlice);
CoreUtils.SetRenderTarget(
cmd,
cameraTarget,
RenderBufferLoadAction.Load,
RenderBufferStoreAction.Store,
ClearFlag.None,
Color.black);
cmd.SetViewport(cameraData.pixelRect);
// We y-flip if
// 1) we are bliting from render texture to back buffer(UV starts at bottom) and
// 2) renderTexture starts UV at top
bool yflip = !cameraData.xr.renderTargetIsRenderTexture && SystemInfo.graphicsUVStartsAtTop;
Vector4 scaleBias = yflip ? new Vector4(1, -1, 0, 1) : new Vector4(1, 1, 0, 0);
cmd.SetGlobalVector(ShaderPropertyId.scaleBias, scaleBias);
cmd.DrawProcedural(Matrix4x4.identity, m_BlitMaterial, 0, MeshTopology.Quads, 4);
}
else
#endif
if (isSceneViewCamera || cameraData.isDefaultViewport)
{
// This set render target is necessary so we change the LOAD state to DontCare.
cmd.SetRenderTarget(BuiltinRenderTextureType.CameraTarget,
RenderBufferLoadAction.DontCare, RenderBufferStoreAction.Store, // color
RenderBufferLoadAction.DontCare, RenderBufferStoreAction.DontCare); // depth
cmd.Blit(m_Source.Identifier(), cameraTarget, m_BlitMaterial);
}
else
{
// TODO: Final blit pass should always blit to backbuffer. The first time we do we don't need to Load contents to tile.
// We need to keep in the pipeline of first render pass to each render target to properly set load/store actions.
// meanwhile we set to load so split screen case works.
CoreUtils.SetRenderTarget(
cmd,
cameraTarget,
RenderBufferLoadAction.Load,
RenderBufferStoreAction.Store,
ClearFlag.None,
Color.black);
Camera camera = cameraData.camera;
cmd.SetViewProjectionMatrices(Matrix4x4.identity, Matrix4x4.identity);
cmd.SetViewport(cameraData.pixelRect);
cmd.DrawMesh(RenderingUtils.fullscreenMesh, Matrix4x4.identity, m_BlitMaterial);
cmd.SetViewProjectionMatrices(camera.worldToCameraMatrix, camera.projectionMatrix);
}
}
context.ExecuteCommandBuffer(cmd);
CommandBufferPool.Release(cmd);
}
}
}

View File

@@ -0,0 +1,119 @@
using UnityEngine.Experimental.GlobalIllumination;
using UnityEngine.Experimental.Rendering;
using UnityEngine.Profiling;
using Unity.Collections;
namespace UnityEngine.Rendering.Universal.Internal
{
// Render all tiled-based deferred lights.
internal class GBufferPass : ScriptableRenderPass
{
static ShaderTagId s_ShaderTagLit = new ShaderTagId("Lit");
static ShaderTagId s_ShaderTagSimpleLit = new ShaderTagId("SimpleLit");
static ShaderTagId s_ShaderTagUnlit = new ShaderTagId("Unlit");
static ShaderTagId s_ShaderTagUniversalGBuffer = new ShaderTagId("UniversalGBuffer");
static ShaderTagId s_ShaderTagUniversalMaterialType = new ShaderTagId("UniversalMaterialType");
ProfilingSampler m_ProfilingSampler = new ProfilingSampler("Render GBuffer");
DeferredLights m_DeferredLights;
ShaderTagId[] m_ShaderTagValues;
RenderStateBlock[] m_RenderStateBlocks;
FilteringSettings m_FilteringSettings;
RenderStateBlock m_RenderStateBlock;
public GBufferPass(RenderPassEvent evt, RenderQueueRange renderQueueRange, LayerMask layerMask, StencilState stencilState, int stencilReference, DeferredLights deferredLights)
{
base.profilingSampler = new ProfilingSampler(nameof(GBufferPass));
base.renderPassEvent = evt;
m_DeferredLights = deferredLights;
m_FilteringSettings = new FilteringSettings(renderQueueRange, layerMask);
m_RenderStateBlock = new RenderStateBlock(RenderStateMask.Nothing);
m_RenderStateBlock.stencilState = stencilState;
m_RenderStateBlock.stencilReference = stencilReference;
m_RenderStateBlock.mask = RenderStateMask.Stencil;
m_ShaderTagValues = new ShaderTagId[4];
m_ShaderTagValues[0] = s_ShaderTagLit;
m_ShaderTagValues[1] = s_ShaderTagSimpleLit;
m_ShaderTagValues[2] = s_ShaderTagUnlit;
m_ShaderTagValues[3] = new ShaderTagId(); // Special catch all case for materials where UniversalMaterialType is not defined or the tag value doesn't match anything we know.
m_RenderStateBlocks = new RenderStateBlock[4];
m_RenderStateBlocks[0] = DeferredLights.OverwriteStencil(m_RenderStateBlock, (int)StencilUsage.MaterialMask, (int)StencilUsage.MaterialLit);
m_RenderStateBlocks[1] = DeferredLights.OverwriteStencil(m_RenderStateBlock, (int)StencilUsage.MaterialMask, (int)StencilUsage.MaterialSimpleLit);
m_RenderStateBlocks[2] = DeferredLights.OverwriteStencil(m_RenderStateBlock, (int)StencilUsage.MaterialMask, (int)StencilUsage.MaterialUnlit);
m_RenderStateBlocks[3] = m_RenderStateBlocks[0];
}
public override void Configure(CommandBuffer cmd, RenderTextureDescriptor cameraTextureDescriptor)
{
RenderTargetHandle[] gbufferAttachments = m_DeferredLights.GbufferAttachments;
// Create and declare the render targets used in the pass
for (int i = 0; i < gbufferAttachments.Length; ++i)
{
// Lighting buffer has already been declared with line ConfigureCameraTarget(m_ActiveCameraColorAttachment.Identifier(), ...) in DeferredRenderer.Setup
if (i != m_DeferredLights.GBufferLightingIndex)
{
RenderTextureDescriptor gbufferSlice = cameraTextureDescriptor;
gbufferSlice.depthBufferBits = 0; // make sure no depth surface is actually created
gbufferSlice.stencilFormat = GraphicsFormat.None;
gbufferSlice.graphicsFormat = m_DeferredLights.GetGBufferFormat(i);
cmd.GetTemporaryRT(m_DeferredLights.GbufferAttachments[i].id, gbufferSlice);
}
}
ConfigureTarget(m_DeferredLights.GbufferAttachmentIdentifiers, m_DeferredLights.DepthAttachmentIdentifier);
// We must explicitely specify we don't want any clear to avoid unwanted side-effects.
// ScriptableRenderer may still implicitely force a clear the first time the camera color/depth targets are bound.
ConfigureClear(ClearFlag.None, Color.black);
}
public override void Execute(ScriptableRenderContext context, ref RenderingData renderingData)
{
CommandBuffer gbufferCommands = CommandBufferPool.Get();
using (new ProfilingScope(gbufferCommands, m_ProfilingSampler))
{
// User can stack several scriptable renderers during rendering but deferred renderer should only lit pixels added by this gbuffer pass.
// If we detect we are in such case (camera isin overlay mode), we clear the highest bits of stencil we have control of and use them to
// mark what pixel to shade during deferred pass. Gbuffer will always mark pixels using their material types.
if (m_DeferredLights.IsOverlay)
m_DeferredLights.ClearStencilPartial(gbufferCommands);
context.ExecuteCommandBuffer(gbufferCommands);
gbufferCommands.Clear();
ref CameraData cameraData = ref renderingData.cameraData;
Camera camera = cameraData.camera;
ShaderTagId lightModeTag = s_ShaderTagUniversalGBuffer;
DrawingSettings drawingSettings = CreateDrawingSettings(lightModeTag, ref renderingData, renderingData.cameraData.defaultOpaqueSortFlags);
ShaderTagId universalMaterialTypeTag = s_ShaderTagUniversalMaterialType;
NativeArray<ShaderTagId> tagValues = new NativeArray<ShaderTagId>(m_ShaderTagValues, Allocator.Temp);
NativeArray<RenderStateBlock> stateBlocks = new NativeArray<RenderStateBlock>(m_RenderStateBlocks, Allocator.Temp);
context.DrawRenderers(renderingData.cullResults, ref drawingSettings, ref m_FilteringSettings, universalMaterialTypeTag, false, tagValues, stateBlocks);
tagValues.Dispose();
stateBlocks.Dispose();
// Render objects that did not match any shader pass with error shader
RenderingUtils.RenderObjectsWithError(context, ref renderingData.cullResults, camera, m_FilteringSettings, SortingCriteria.None);
}
context.ExecuteCommandBuffer(gbufferCommands);
CommandBufferPool.Release(gbufferCommands);
}
public override void OnCameraCleanup(CommandBuffer cmd)
{
RenderTargetHandle[] gbufferAttachments = m_DeferredLights.GbufferAttachments;
for (int i = 0; i < gbufferAttachments.Length; ++i)
if (i != m_DeferredLights.GBufferLightingIndex)
cmd.ReleaseTemporaryRT(gbufferAttachments[i].id);
}
}
}

View File

@@ -0,0 +1,21 @@
namespace UnityEngine.Rendering.Universal
{
/// <summary>
/// Invokes OnRenderObject callback
/// </summary>
internal class InvokeOnRenderObjectCallbackPass : ScriptableRenderPass
{
public InvokeOnRenderObjectCallbackPass(RenderPassEvent evt)
{
base.profilingSampler = new ProfilingSampler(nameof(InvokeOnRenderObjectCallbackPass));
renderPassEvent = evt;
}
/// <inheritdoc/>
public override void Execute(ScriptableRenderContext context, ref RenderingData renderingData)
{
context.InvokeOnRenderObjectCallback();
}
}
}

View File

@@ -0,0 +1,262 @@
using System;
namespace UnityEngine.Rendering.Universal.Internal
{
/// <summary>
/// Renders a shadow map for the main Light.
/// </summary>
public class MainLightShadowCasterPass : ScriptableRenderPass
{
private static class MainLightShadowConstantBuffer
{
public static int _WorldToShadow;
public static int _CascadeShadowSplitSpheres0;
public static int _CascadeShadowSplitSpheres1;
public static int _CascadeShadowSplitSpheres2;
public static int _CascadeShadowSplitSpheres3;
public static int _CascadeShadowSplitSphereRadii;
public static int _ShadowOffset0;
public static int _ShadowOffset1;
public static int _ShadowOffset2;
public static int _ShadowOffset3;
public static int _ShadowmapSize;
}
const int k_MaxCascades = 4;
const int k_ShadowmapBufferBits = 16;
Vector4 m_MainLightShadowParams;
int m_ShadowmapWidth;
int m_ShadowmapHeight;
int m_ShadowCasterCascadesCount;
bool m_SupportsBoxFilterForShadows;
RenderTargetHandle m_MainLightShadowmap;
RenderTexture m_MainLightShadowmapTexture;
Matrix4x4[] m_MainLightShadowMatrices;
ShadowSliceData[] m_CascadeSlices;
Vector4[] m_CascadeSplitDistances;
ProfilingSampler m_ProfilingSetupSampler = new ProfilingSampler("Setup Main Shadowmap");
public MainLightShadowCasterPass(RenderPassEvent evt)
{
base.profilingSampler = new ProfilingSampler(nameof(MainLightShadowCasterPass));
renderPassEvent = evt;
m_MainLightShadowMatrices = new Matrix4x4[k_MaxCascades + 1];
m_CascadeSlices = new ShadowSliceData[k_MaxCascades];
m_CascadeSplitDistances = new Vector4[k_MaxCascades];
MainLightShadowConstantBuffer._WorldToShadow = Shader.PropertyToID("_MainLightWorldToShadow");
MainLightShadowConstantBuffer._CascadeShadowSplitSpheres0 = Shader.PropertyToID("_CascadeShadowSplitSpheres0");
MainLightShadowConstantBuffer._CascadeShadowSplitSpheres1 = Shader.PropertyToID("_CascadeShadowSplitSpheres1");
MainLightShadowConstantBuffer._CascadeShadowSplitSpheres2 = Shader.PropertyToID("_CascadeShadowSplitSpheres2");
MainLightShadowConstantBuffer._CascadeShadowSplitSpheres3 = Shader.PropertyToID("_CascadeShadowSplitSpheres3");
MainLightShadowConstantBuffer._CascadeShadowSplitSphereRadii = Shader.PropertyToID("_CascadeShadowSplitSphereRadii");
MainLightShadowConstantBuffer._ShadowOffset0 = Shader.PropertyToID("_MainLightShadowOffset0");
MainLightShadowConstantBuffer._ShadowOffset1 = Shader.PropertyToID("_MainLightShadowOffset1");
MainLightShadowConstantBuffer._ShadowOffset2 = Shader.PropertyToID("_MainLightShadowOffset2");
MainLightShadowConstantBuffer._ShadowOffset3 = Shader.PropertyToID("_MainLightShadowOffset3");
MainLightShadowConstantBuffer._ShadowmapSize = Shader.PropertyToID("_MainLightShadowmapSize");
m_MainLightShadowmap.Init("_MainLightShadowmapTexture");
m_SupportsBoxFilterForShadows = Application.isMobilePlatform || SystemInfo.graphicsDeviceType == GraphicsDeviceType.Switch;
}
public bool Setup(ref RenderingData renderingData)
{
using var profScope = new ProfilingScope(null, m_ProfilingSetupSampler);
if (!renderingData.shadowData.supportsMainLightShadows)
return false;
Clear();
int shadowLightIndex = renderingData.lightData.mainLightIndex;
if (shadowLightIndex == -1)
return false;
VisibleLight shadowLight = renderingData.lightData.visibleLights[shadowLightIndex];
Light light = shadowLight.light;
if (light.shadows == LightShadows.None)
return false;
if (shadowLight.lightType != LightType.Directional)
{
Debug.LogWarning("Only directional lights are supported as main light.");
}
Bounds bounds;
if (!renderingData.cullResults.GetShadowCasterBounds(shadowLightIndex, out bounds))
return false;
m_ShadowCasterCascadesCount = renderingData.shadowData.mainLightShadowCascadesCount;
int shadowResolution = ShadowUtils.GetMaxTileResolutionInAtlas(renderingData.shadowData.mainLightShadowmapWidth,
renderingData.shadowData.mainLightShadowmapHeight, m_ShadowCasterCascadesCount);
m_ShadowmapWidth = renderingData.shadowData.mainLightShadowmapWidth;
m_ShadowmapHeight = (m_ShadowCasterCascadesCount == 2) ?
renderingData.shadowData.mainLightShadowmapHeight >> 1 :
renderingData.shadowData.mainLightShadowmapHeight;
for (int cascadeIndex = 0; cascadeIndex < m_ShadowCasterCascadesCount; ++cascadeIndex)
{
bool success = ShadowUtils.ExtractDirectionalLightMatrix(ref renderingData.cullResults, ref renderingData.shadowData,
shadowLightIndex, cascadeIndex, m_ShadowmapWidth, m_ShadowmapHeight, shadowResolution, light.shadowNearPlane,
out m_CascadeSplitDistances[cascadeIndex], out m_CascadeSlices[cascadeIndex]);
if (!success)
return false;
}
m_MainLightShadowParams = ShadowUtils.GetMainLightShadowParams(ref renderingData);
return true;
}
public override void Configure(CommandBuffer cmd, RenderTextureDescriptor cameraTextureDescriptor)
{
m_MainLightShadowmapTexture = ShadowUtils.GetTemporaryShadowTexture(m_ShadowmapWidth,
m_ShadowmapHeight, k_ShadowmapBufferBits);
ConfigureTarget(new RenderTargetIdentifier(m_MainLightShadowmapTexture));
ConfigureClear(ClearFlag.All, Color.black);
}
/// <inheritdoc/>
public override void Execute(ScriptableRenderContext context, ref RenderingData renderingData)
{
RenderMainLightCascadeShadowmap(ref context, ref renderingData.cullResults, ref renderingData.lightData, ref renderingData.shadowData);
}
/// <inheritdoc/>
public override void OnCameraCleanup(CommandBuffer cmd)
{
if (cmd == null)
throw new ArgumentNullException("cmd");
if (m_MainLightShadowmapTexture)
{
RenderTexture.ReleaseTemporary(m_MainLightShadowmapTexture);
m_MainLightShadowmapTexture = null;
}
}
void Clear()
{
m_MainLightShadowmapTexture = null;
for (int i = 0; i < m_MainLightShadowMatrices.Length; ++i)
m_MainLightShadowMatrices[i] = Matrix4x4.identity;
for (int i = 0; i < m_CascadeSplitDistances.Length; ++i)
m_CascadeSplitDistances[i] = new Vector4(0.0f, 0.0f, 0.0f, 0.0f);
for (int i = 0; i < m_CascadeSlices.Length; ++i)
m_CascadeSlices[i].Clear();
}
void RenderMainLightCascadeShadowmap(ref ScriptableRenderContext context, ref CullingResults cullResults, ref LightData lightData, ref ShadowData shadowData)
{
int shadowLightIndex = lightData.mainLightIndex;
if (shadowLightIndex == -1)
return;
VisibleLight shadowLight = lightData.visibleLights[shadowLightIndex];
// NOTE: Do NOT mix ProfilingScope with named CommandBuffers i.e. CommandBufferPool.Get("name").
// Currently there's an issue which results in mismatched markers.
CommandBuffer cmd = CommandBufferPool.Get();
using (new ProfilingScope(cmd, ProfilingSampler.Get(URPProfileId.MainLightShadow)))
{
var settings = new ShadowDrawingSettings(cullResults, shadowLightIndex);
for (int cascadeIndex = 0; cascadeIndex < m_ShadowCasterCascadesCount; ++cascadeIndex)
{
settings.splitData = m_CascadeSlices[cascadeIndex].splitData;
Vector4 shadowBias = ShadowUtils.GetShadowBias(ref shadowLight, shadowLightIndex, ref shadowData, m_CascadeSlices[cascadeIndex].projectionMatrix, m_CascadeSlices[cascadeIndex].resolution);
ShadowUtils.SetupShadowCasterConstantBuffer(cmd, ref shadowLight, shadowBias);
CoreUtils.SetKeyword(cmd, ShaderKeywordStrings.CastingPunctualLightShadow, false);
ShadowUtils.RenderShadowSlice(cmd, ref context, ref m_CascadeSlices[cascadeIndex],
ref settings, m_CascadeSlices[cascadeIndex].projectionMatrix, m_CascadeSlices[cascadeIndex].viewMatrix);
}
bool softShadows = shadowLight.light.shadows == LightShadows.Soft && shadowData.supportsSoftShadows;
CoreUtils.SetKeyword(cmd, ShaderKeywordStrings.MainLightShadows, shadowData.mainLightShadowCascadesCount == 1);
CoreUtils.SetKeyword(cmd, ShaderKeywordStrings.MainLightShadowCascades, shadowData.mainLightShadowCascadesCount > 1);
CoreUtils.SetKeyword(cmd, ShaderKeywordStrings.SoftShadows, softShadows);
SetupMainLightShadowReceiverConstants(cmd, shadowLight, shadowData.supportsSoftShadows);
}
context.ExecuteCommandBuffer(cmd);
CommandBufferPool.Release(cmd);
}
void SetupMainLightShadowReceiverConstants(CommandBuffer cmd, VisibleLight shadowLight, bool supportsSoftShadows)
{
int cascadeCount = m_ShadowCasterCascadesCount;
for (int i = 0; i < cascadeCount; ++i)
m_MainLightShadowMatrices[i] = m_CascadeSlices[i].shadowTransform;
// We setup and additional a no-op WorldToShadow matrix in the last index
// because the ComputeCascadeIndex function in Shadows.hlsl can return an index
// out of bounds. (position not inside any cascade) and we want to avoid branching
Matrix4x4 noOpShadowMatrix = Matrix4x4.zero;
noOpShadowMatrix.m22 = (SystemInfo.usesReversedZBuffer) ? 1.0f : 0.0f;
for (int i = cascadeCount; i <= k_MaxCascades; ++i)
m_MainLightShadowMatrices[i] = noOpShadowMatrix;
float invShadowAtlasWidth = 1.0f / m_ShadowmapWidth;
float invShadowAtlasHeight = 1.0f / m_ShadowmapHeight;
float invHalfShadowAtlasWidth = 0.5f * invShadowAtlasWidth;
float invHalfShadowAtlasHeight = 0.5f * invShadowAtlasHeight;
cmd.SetGlobalTexture(m_MainLightShadowmap.id, m_MainLightShadowmapTexture);
cmd.SetGlobalMatrixArray(MainLightShadowConstantBuffer._WorldToShadow, m_MainLightShadowMatrices);
ShadowUtils.SetupShadowReceiverConstantBuffer(cmd, m_MainLightShadowParams);
if (m_ShadowCasterCascadesCount > 1)
{
cmd.SetGlobalVector(MainLightShadowConstantBuffer._CascadeShadowSplitSpheres0,
m_CascadeSplitDistances[0]);
cmd.SetGlobalVector(MainLightShadowConstantBuffer._CascadeShadowSplitSpheres1,
m_CascadeSplitDistances[1]);
cmd.SetGlobalVector(MainLightShadowConstantBuffer._CascadeShadowSplitSpheres2,
m_CascadeSplitDistances[2]);
cmd.SetGlobalVector(MainLightShadowConstantBuffer._CascadeShadowSplitSpheres3,
m_CascadeSplitDistances[3]);
cmd.SetGlobalVector(MainLightShadowConstantBuffer._CascadeShadowSplitSphereRadii, new Vector4(
m_CascadeSplitDistances[0].w * m_CascadeSplitDistances[0].w,
m_CascadeSplitDistances[1].w * m_CascadeSplitDistances[1].w,
m_CascadeSplitDistances[2].w * m_CascadeSplitDistances[2].w,
m_CascadeSplitDistances[3].w * m_CascadeSplitDistances[3].w));
}
// Inside shader soft shadows are controlled through global keyword.
// If any additional light has soft shadows it will force soft shadows on main light too.
// As it is not trivial finding out which additional light has soft shadows, we will pass main light properties if soft shadows are supported.
// This workaround will be removed once we will support soft shadows per light.
if (supportsSoftShadows)
{
if (m_SupportsBoxFilterForShadows)
{
cmd.SetGlobalVector(MainLightShadowConstantBuffer._ShadowOffset0,
new Vector4(-invHalfShadowAtlasWidth, -invHalfShadowAtlasHeight, 0.0f, 0.0f));
cmd.SetGlobalVector(MainLightShadowConstantBuffer._ShadowOffset1,
new Vector4(invHalfShadowAtlasWidth, -invHalfShadowAtlasHeight, 0.0f, 0.0f));
cmd.SetGlobalVector(MainLightShadowConstantBuffer._ShadowOffset2,
new Vector4(-invHalfShadowAtlasWidth, invHalfShadowAtlasHeight, 0.0f, 0.0f));
cmd.SetGlobalVector(MainLightShadowConstantBuffer._ShadowOffset3,
new Vector4(invHalfShadowAtlasWidth, invHalfShadowAtlasHeight, 0.0f, 0.0f));
}
// Currently only used when !SHADER_API_MOBILE but risky to not set them as it's generic
// enough so custom shaders might use it.
cmd.SetGlobalVector(MainLightShadowConstantBuffer._ShadowmapSize, new Vector4(invShadowAtlasWidth,
invShadowAtlasHeight,
m_ShadowmapWidth, m_ShadowmapHeight));
}
}
};
}

View File

@@ -0,0 +1,138 @@
using System.Collections.Generic;
using UnityEngine.Rendering.Universal;
using UnityEngine.Rendering;
using UnityEngine.Scripting.APIUpdating;
namespace UnityEngine.Experimental.Rendering.Universal
{
[MovedFrom("UnityEngine.Experimental.Rendering.LWRP")] public class RenderObjectsPass : ScriptableRenderPass
{
RenderQueueType renderQueueType;
FilteringSettings m_FilteringSettings;
RenderObjects.CustomCameraSettings m_CameraSettings;
string m_ProfilerTag;
ProfilingSampler m_ProfilingSampler;
public Material overrideMaterial { get; set; }
public int overrideMaterialPassIndex { get; set; }
List<ShaderTagId> m_ShaderTagIdList = new List<ShaderTagId>();
public void SetDetphState(bool writeEnabled, CompareFunction function = CompareFunction.Less)
{
m_RenderStateBlock.mask |= RenderStateMask.Depth;
m_RenderStateBlock.depthState = new DepthState(writeEnabled, function);
}
public void SetStencilState(int reference, CompareFunction compareFunction, StencilOp passOp, StencilOp failOp, StencilOp zFailOp)
{
StencilState stencilState = StencilState.defaultValue;
stencilState.enabled = true;
stencilState.SetCompareFunction(compareFunction);
stencilState.SetPassOperation(passOp);
stencilState.SetFailOperation(failOp);
stencilState.SetZFailOperation(zFailOp);
m_RenderStateBlock.mask |= RenderStateMask.Stencil;
m_RenderStateBlock.stencilReference = reference;
m_RenderStateBlock.stencilState = stencilState;
}
RenderStateBlock m_RenderStateBlock;
public RenderObjectsPass(string profilerTag, RenderPassEvent renderPassEvent, string[] shaderTags, RenderQueueType renderQueueType, int layerMask, RenderObjects.CustomCameraSettings cameraSettings)
{
base.profilingSampler = new ProfilingSampler(nameof(RenderObjectsPass));
m_ProfilerTag = profilerTag;
m_ProfilingSampler = new ProfilingSampler(profilerTag);
this.renderPassEvent = renderPassEvent;
this.renderQueueType = renderQueueType;
this.overrideMaterial = null;
this.overrideMaterialPassIndex = 0;
RenderQueueRange renderQueueRange = (renderQueueType == RenderQueueType.Transparent)
? RenderQueueRange.transparent
: RenderQueueRange.opaque;
m_FilteringSettings = new FilteringSettings(renderQueueRange, layerMask);
if (shaderTags != null && shaderTags.Length > 0)
{
foreach (var passName in shaderTags)
m_ShaderTagIdList.Add(new ShaderTagId(passName));
}
else
{
m_ShaderTagIdList.Add(new ShaderTagId("SRPDefaultUnlit"));
m_ShaderTagIdList.Add(new ShaderTagId("UniversalForward"));
m_ShaderTagIdList.Add(new ShaderTagId("UniversalForwardOnly"));
m_ShaderTagIdList.Add(new ShaderTagId("LightweightForward"));
}
m_RenderStateBlock = new RenderStateBlock(RenderStateMask.Nothing);
m_CameraSettings = cameraSettings;
}
internal RenderObjectsPass(URPProfileId profileId, RenderPassEvent renderPassEvent, string[] shaderTags, RenderQueueType renderQueueType, int layerMask, RenderObjects.CustomCameraSettings cameraSettings)
: this(profileId.GetType().Name, renderPassEvent, shaderTags, renderQueueType, layerMask, cameraSettings)
{
m_ProfilingSampler = ProfilingSampler.Get(profileId);
}
public override void Execute(ScriptableRenderContext context, ref RenderingData renderingData)
{
SortingCriteria sortingCriteria = (renderQueueType == RenderQueueType.Transparent)
? SortingCriteria.CommonTransparent
: renderingData.cameraData.defaultOpaqueSortFlags;
DrawingSettings drawingSettings = CreateDrawingSettings(m_ShaderTagIdList, ref renderingData, sortingCriteria);
drawingSettings.overrideMaterial = overrideMaterial;
drawingSettings.overrideMaterialPassIndex = overrideMaterialPassIndex;
ref CameraData cameraData = ref renderingData.cameraData;
Camera camera = cameraData.camera;
// In case of camera stacking we need to take the viewport rect from base camera
Rect pixelRect = renderingData.cameraData.pixelRect;
float cameraAspect = (float)pixelRect.width / (float)pixelRect.height;
// NOTE: Do NOT mix ProfilingScope with named CommandBuffers i.e. CommandBufferPool.Get("name").
// Currently there's an issue which results in mismatched markers.
CommandBuffer cmd = CommandBufferPool.Get();
using (new ProfilingScope(cmd, m_ProfilingSampler))
{
if (m_CameraSettings.overrideCamera)
{
if (cameraData.xr.enabled)
{
Debug.LogWarning("RenderObjects pass is configured to override camera matrices. While rendering in stereo camera matrices cannot be overridden.");
}
else
{
Matrix4x4 projectionMatrix = Matrix4x4.Perspective(m_CameraSettings.cameraFieldOfView, cameraAspect,
camera.nearClipPlane, camera.farClipPlane);
projectionMatrix = GL.GetGPUProjectionMatrix(projectionMatrix, cameraData.IsCameraProjectionMatrixFlipped());
Matrix4x4 viewMatrix = cameraData.GetViewMatrix();
Vector4 cameraTranslation = viewMatrix.GetColumn(3);
viewMatrix.SetColumn(3, cameraTranslation + m_CameraSettings.offset);
RenderingUtils.SetViewAndProjectionMatrices(cmd, viewMatrix, projectionMatrix, false);
}
}
context.ExecuteCommandBuffer(cmd);
cmd.Clear();
context.DrawRenderers(renderingData.cullResults, ref drawingSettings, ref m_FilteringSettings,
ref m_RenderStateBlock);
if (m_CameraSettings.overrideCamera && m_CameraSettings.restoreCamera && !cameraData.xr.enabled)
{
RenderingUtils.SetViewAndProjectionMatrices(cmd, cameraData.GetViewMatrix(), cameraData.GetGPUProjectionMatrix(), false);
}
}
context.ExecuteCommandBuffer(cmd);
CommandBufferPool.Release(cmd);
}
}
}

View File

@@ -0,0 +1,65 @@
namespace UnityEngine.Rendering.Universal
{
internal class SceneViewDepthCopyPass : ScriptableRenderPass
{
private RenderTargetHandle source { get; set; }
Material m_CopyDepthMaterial;
const string m_ProfilerTag = "Copy Depth for Scene View";
private static readonly ProfilingSampler m_ProfilingSampler = new ProfilingSampler(m_ProfilerTag);
public SceneViewDepthCopyPass(RenderPassEvent evt, Material copyDepthMaterial)
{
base.profilingSampler = new ProfilingSampler(nameof(SceneViewDepthCopyPass));
m_CopyDepthMaterial = copyDepthMaterial;
renderPassEvent = evt;
}
public void Setup(RenderTargetHandle source)
{
this.source = source;
}
/// <inheritdoc/>
public override void Execute(ScriptableRenderContext context, ref RenderingData renderingData)
{
if (m_CopyDepthMaterial == null)
{
Debug.LogErrorFormat("Missing {0}. {1} render pass will not execute. Check for missing reference in the renderer resources.", m_CopyDepthMaterial, GetType().Name);
return;
}
// Restore Render target for additional editor rendering.
// Note: Scene view camera always perform depth prepass
CommandBuffer cmd = CommandBufferPool.Get();
using (new ProfilingScope(cmd, m_ProfilingSampler))
{
CoreUtils.SetRenderTarget(cmd, BuiltinRenderTextureType.CameraTarget);
cmd.SetGlobalTexture("_CameraDepthAttachment", source.Identifier());
cmd.EnableShaderKeyword(ShaderKeywordStrings.DepthNoMsaa);
cmd.DisableShaderKeyword(ShaderKeywordStrings.DepthMsaa2);
cmd.DisableShaderKeyword(ShaderKeywordStrings.DepthMsaa4);
cmd.DisableShaderKeyword(ShaderKeywordStrings.DepthMsaa8);
// Blit has logic to flip projection matrix when rendering to render texture.
// Currently the y-flip is handled in CopyDepthPass.hlsl by checking _ProjectionParams.x
// If you replace this Blit with a Draw* that sets projection matrix double check
// to also update shader.
// scaleBias.x = flipSign
// scaleBias.y = scale
// scaleBias.z = bias
// scaleBias.w = unused
ref CameraData cameraData = ref renderingData.cameraData;
float flipSign = (cameraData.IsCameraProjectionMatrixFlipped()) ? -1.0f : 1.0f;
Vector4 scaleBiasRt = (flipSign < 0.0f)
? new Vector4(flipSign, 1.0f, -1.0f, 1.0f)
: new Vector4(flipSign, 0.0f, 1.0f, 1.0f);
cmd.SetGlobalVector(ShaderPropertyId.scaleBiasRt, scaleBiasRt);
cmd.DrawMesh(RenderingUtils.fullscreenMesh, Matrix4x4.identity, m_CopyDepthMaterial);
}
context.ExecuteCommandBuffer(cmd);
CommandBufferPool.Release(cmd);
}
}
}

Some files were not shown because too many files have changed in this diff Show More