diff --git a/Packages/com.unity.render-pipelines.core/Editor-PrivateShared/Tools/Converter/RenderPipelineConverterAssetItem.cs b/Packages/com.unity.render-pipelines.core/Editor-PrivateShared/Tools/Converter/RenderPipelineConverterAssetItem.cs index de8db770eb8..6ea909ec366 100644 --- a/Packages/com.unity.render-pipelines.core/Editor-PrivateShared/Tools/Converter/RenderPipelineConverterAssetItem.cs +++ b/Packages/com.unity.render-pipelines.core/Editor-PrivateShared/Tools/Converter/RenderPipelineConverterAssetItem.cs @@ -30,7 +30,13 @@ public string guid public string GlobalObjectId => m_GlobalObjectId; - public string name => System.IO.Path.GetFileNameWithoutExtension(assetPath); + [SerializeField] + private string m_Name; + public string name + { + get => string.IsNullOrEmpty(m_Name) ? System.IO.Path.GetFileNameWithoutExtension(assetPath) : m_Name; + set => m_Name = value; + } [SerializeField] private string m_Info; diff --git a/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/ProbeGIBaking.LightTransport.cs b/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/ProbeGIBaking.LightTransport.cs index 2dd1f2683fb..561a3569f50 100644 --- a/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/ProbeGIBaking.LightTransport.cs +++ b/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/ProbeGIBaking.LightTransport.cs @@ -670,7 +670,7 @@ internal static void BakeAdjustmentVolume(ProbeVolumeBakingSet bakingSet, ProbeA failed |= !layerMaskJob.Step(); // Bake probe SH - s_BakeData.InitLightingJob(m_BakingSet, uniquePositions, BakeType.ApvOnly); + s_BakeData.InitLightingJob(bakingSet, touchup, uniquePositions, BakeType.ApvOnly); LightingBaker lightingJob = s_BakeData.lightingJob; while (!failed && lightingJob.currentStep < lightingJob.stepCount) failed |= !lightingJob.Step(); @@ -680,7 +680,7 @@ internal static void BakeAdjustmentVolume(ProbeVolumeBakingSet bakingSet, ProbeA foreach ((int uniqueProbeIndex, int cellIndex, int i) in bakedProbes) { ref var cell = ref bakingCells[cellIndex]; - cell.SetBakedData(m_BakingSet, m_BakingBatch, cellVolumes[cellIndex], i, uniqueProbeIndex, + cell.SetBakedData(bakingSet, m_BakingBatch, cellVolumes[cellIndex], i, uniqueProbeIndex, lightingJob.irradiance[uniqueProbeIndex], lightingJob.validity[uniqueProbeIndex], layerMaskJob.renderingLayerMasks, virtualOffsetJob.offsets, skyOcclusionJob.occlusion, skyOcclusionJob.encodedDirections, lightingJob.occlusion); @@ -696,8 +696,8 @@ internal static void BakeAdjustmentVolume(ProbeVolumeBakingSet bakingSet, ProbeA { // Validate baking cells size before any global state modifications var chunkSizeInProbes = ProbeBrickPool.GetChunkSizeInProbeCount(); - var hasVirtualOffsets = m_BakingSet.settings.virtualOffsetSettings.useVirtualOffset; - var hasRenderingLayers = m_BakingSet.useRenderingLayers; + var hasVirtualOffsets = bakingSet.settings.virtualOffsetSettings.useVirtualOffset; + var hasRenderingLayers = bakingSet.useRenderingLayers; if (ValidateBakingCellsSize(bakingCells, chunkSizeInProbes, hasVirtualOffsets, hasRenderingLayers)) { @@ -707,8 +707,8 @@ internal static void BakeAdjustmentVolume(ProbeVolumeBakingSet bakingSet, ProbeA ComputeValidityMasks(cell); } - // Attempt to write the result to disk - if (WriteBakingCells(bakingCells)) + // Attempt to write the result to disk. + if (WriteBakingCells(bakingSet, bakingCells)) { // Reload everything AssetDatabase.SaveAssets(); diff --git a/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/ProbeGIBaking.Placement.cs b/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/ProbeGIBaking.Placement.cs index 325d9f899f8..b6f66387d96 100644 --- a/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/ProbeGIBaking.Placement.cs +++ b/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/ProbeGIBaking.Placement.cs @@ -133,6 +133,15 @@ static NativeList RunPlacement(ProbeVolumeProfileInfo profileInfo, Prob { Debug.Assert(profileInfo != null); + // APV baking requires compute shader support that is not always available on OpenGL devices + if (SystemInfo.graphicsDeviceType == GraphicsDeviceType.OpenGLCore || + SystemInfo.graphicsDeviceType == GraphicsDeviceType.OpenGLES3) + { + Debug.LogError("Adaptive Probe Volume baking is not supported on OpenGL. Please switch to Direct3D, Vulkan, or Metal in Project Settings > Player > Other Settings > Graphics API."); + canceledByUser = true; + return new NativeList(Allocator.Temp); + } + // Overwrite loaded settings with data from profile. Note that the m_BakingSet.profile is already patched up if isFreezingPlacement float prevBrickSize = refVolume.MinBrickSize(); int prevMaxSubdiv = refVolume.GetMaxSubdivision(); diff --git a/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/ProbeGIBaking.Serialization.cs b/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/ProbeGIBaking.Serialization.cs index 150525aeff7..a83ed59c3ef 100644 --- a/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/ProbeGIBaking.Serialization.cs +++ b/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/ProbeGIBaking.Serialization.cs @@ -740,8 +740,8 @@ static int CalculateSupportDataChunkSize(int chunkSizeInProbes, bool hasVirtualO int supportTouchupChunkSize = UnsafeUtility.SizeOf() * chunkSizeInProbes; int supportLayerMaskChunkSize = hasRenderingLayers ? UnsafeUtility.SizeOf() * chunkSizeInProbes : 0; int supportOffsetsChunkSize = hasVirtualOffsets ? UnsafeUtility.SizeOf() * chunkSizeInProbes : 0; - - return supportPositionChunkSize + supportValidityChunkSize + + + return supportPositionChunkSize + supportValidityChunkSize + supportOffsetsChunkSize + supportLayerMaskChunkSize + supportTouchupChunkSize; } @@ -760,7 +760,7 @@ static bool ValidateBakingCellsSize(BakingCell[] bakingCells, int chunkSizeInPro return true; int supportDataChunkSize = CalculateSupportDataChunkSize(chunkSizeInProbes, hasVirtualOffsets, hasRenderingLayers); - + // Calculate total chunks count - need to call AnalyzeBrickForIndirectionEntries to get shChunkCount // Create a copy to avoid modifying the original cells during validation var tempCells = new BakingCell[bakingCells.Length]; @@ -771,7 +771,7 @@ static bool ValidateBakingCellsSize(BakingCell[] bakingCells, int chunkSizeInPro AnalyzeBrickForIndirectionEntries(ref tempCells[i]); totalChunksCount += tempCells[i].shChunkCount; } - + // Perform the critical size check long supportDataTotalSize = (long)totalChunksCount * supportDataChunkSize; if (supportDataTotalSize > int.MaxValue) @@ -802,19 +802,20 @@ static void WriteNativeArray(System.IO.FileStream fs, NativeArray array) w /// CellSharedData: a binary flat file containing bricks data /// CellSupportData: a binary flat file containing debug data (stripped from player builds if building without debug shaders) /// - static unsafe bool WriteBakingCells(BakingCell[] bakingCells) + static unsafe bool WriteBakingCells(ProbeVolumeBakingSet bakingSet, BakingCell[] bakingCells) { - m_BakingSet.GetBlobFileNames(m_BakingSet.lightingScenario, out var cellDataFilename, out var cellBricksDataFilename, out var cellOptionalDataFilename, out var cellProbeOcclusionDataFilename, out var cellSharedDataFilename, out var cellSupportDataFilename); - - m_BakingSet.cellDescs = new SerializedDictionary(); - m_BakingSet.bakedMinDistanceBetweenProbes = m_ProfileInfo.minDistanceBetweenProbes; - m_BakingSet.bakedSimplificationLevels = m_ProfileInfo.simplificationLevels; - m_BakingSet.bakedProbeOffset = m_ProfileInfo.probeOffset; - m_BakingSet.bakedProbeOcclusion = false; - m_BakingSet.bakedSkyOcclusion = m_BakingSet.skyOcclusion; - m_BakingSet.bakedSkyShadingDirection = m_BakingSet.bakedSkyOcclusion && m_BakingSet.skyOcclusionShadingDirection; - m_BakingSet.bakedMaskCount = m_BakingSet.useRenderingLayers ? APVDefinitions.probeMaxRegionCount : 1; - m_BakingSet.bakedLayerMasks = m_BakingSet.ComputeRegionMasks(); + bakingSet.GetBlobFileNames(bakingSet.lightingScenario, out var cellDataFilename, out var cellBricksDataFilename, + out var cellOptionalDataFilename, out var cellProbeOcclusionDataFilename, out var cellSharedDataFilename, out var cellSupportDataFilename); + + bakingSet.cellDescs = new SerializedDictionary(); + bakingSet.bakedMinDistanceBetweenProbes = m_ProfileInfo.minDistanceBetweenProbes; + bakingSet.bakedSimplificationLevels = m_ProfileInfo.simplificationLevels; + bakingSet.bakedProbeOffset = m_ProfileInfo.probeOffset; + bakingSet.bakedProbeOcclusion = false; + bakingSet.bakedSkyOcclusion = bakingSet.skyOcclusion; + bakingSet.bakedSkyShadingDirection = bakingSet.bakedSkyOcclusion && bakingSet.skyOcclusionShadingDirection; + bakingSet.bakedMaskCount = bakingSet.useRenderingLayers ? APVDefinitions.probeMaxRegionCount : 1; + bakingSet.bakedLayerMasks = bakingSet.ComputeRegionMasks(); var cellSharedDataDescs = new SerializedDictionary(); var cellL0L1DataDescs = new SerializedDictionary(); @@ -823,12 +824,12 @@ static unsafe bool WriteBakingCells(BakingCell[] bakingCells) var cellBricksDescs = new SerializedDictionary(); var cellSupportDescs = new SerializedDictionary(); - var voSettings = m_BakingSet.settings.virtualOffsetSettings; + var voSettings = bakingSet.settings.virtualOffsetSettings; bool hasVirtualOffsets = voSettings.useVirtualOffset; - bool handlesSkyOcclusion = m_BakingSet.bakedSkyOcclusion; - bool handlesSkyShading = m_BakingSet.bakedSkyShadingDirection && m_BakingSet.bakedSkyShadingDirection; - bool hasRenderingLayers = m_BakingSet.useRenderingLayers; - int validityRegionCount = m_BakingSet.bakedMaskCount; + bool handlesSkyOcclusion = bakingSet.bakedSkyOcclusion; + bool handlesSkyShading = bakingSet.bakedSkyShadingDirection && bakingSet.bakedSkyShadingDirection; + bool hasRenderingLayers = bakingSet.useRenderingLayers; + int validityRegionCount = bakingSet.bakedMaskCount; for (var i = 0; i < bakingCells.Length; ++i) { @@ -836,9 +837,9 @@ static unsafe bool WriteBakingCells(BakingCell[] bakingCells) var bakingCell = bakingCells[i]; // If any cell had probe occlusion, the baking set has probe occlusion. - m_BakingSet.bakedProbeOcclusion |= bakingCell.probeOcclusion?.Length > 0; + bakingSet.bakedProbeOcclusion |= bakingCell.probeOcclusion?.Length > 0; - m_BakingSet.cellDescs.Add(bakingCell.index, new CellDesc + bakingSet.cellDescs.Add(bakingCell.index, new CellDesc { position = bakingCell.position, index = bakingCell.index, @@ -850,7 +851,7 @@ static unsafe bool WriteBakingCells(BakingCell[] bakingCells) bricksCount = bakingCell.bricks.Length, }); - m_BakingSet.maxSHChunkCount = Mathf.Max(m_BakingSet.maxSHChunkCount, bakingCell.shChunkCount); + bakingSet.maxSHChunkCount = Mathf.Max(bakingSet.maxSHChunkCount, bakingCell.shChunkCount); m_TotalCellCounts.Add(new CellCounts { @@ -872,8 +873,8 @@ static unsafe bool WriteBakingCells(BakingCell[] bakingCells) var L0L1TotalSize = m_TotalCellCounts.chunksCount * L0L1ChunkSize; using var probesL0L1 = new NativeArray(L0L1TotalSize, Allocator.Persistent, NativeArrayOptions.UninitializedMemory); - m_BakingSet.L0ChunkSize = L0L1R1xChunkSize; - m_BakingSet.L1ChunkSize = L1ChunkSize; + bakingSet.L0ChunkSize = L0L1R1xChunkSize; + bakingSet.L1ChunkSize = L1ChunkSize; // CellOptionalData // L2 Data: 15 Coeffs stored in 4 byte4 textures. @@ -882,39 +883,39 @@ static unsafe bool WriteBakingCells(BakingCell[] bakingCells) var L2TotalSize = m_TotalCellCounts.chunksCount * L2ChunkSize; // 4 textures using var probesL2 = new NativeArray(L2TotalSize, Allocator.Persistent, NativeArrayOptions.UninitializedMemory); - m_BakingSet.L2TextureChunkSize = L2TextureChunkSize; + bakingSet.L2TextureChunkSize = L2TextureChunkSize; // Probe occlusion data - int probeOcclusionChunkSize = m_BakingSet.bakedProbeOcclusion ? sizeof(byte) * 4 * chunkSizeInProbes : 0; // 4 unorm per probe + int probeOcclusionChunkSize = bakingSet.bakedProbeOcclusion ? sizeof(byte) * 4 * chunkSizeInProbes : 0; // 4 unorm per probe int probeOcclusionTotalSize = m_TotalCellCounts.chunksCount * probeOcclusionChunkSize; using var probeOcclusion = new NativeArray(probeOcclusionTotalSize, Allocator.Persistent, NativeArrayOptions.UninitializedMemory); - m_BakingSet.ProbeOcclusionChunkSize = probeOcclusionChunkSize; + bakingSet.ProbeOcclusionChunkSize = probeOcclusionChunkSize; // CellSharedData - m_BakingSet.sharedValidityMaskChunkSize = sizeof(byte) * validityRegionCount * chunkSizeInProbes; - m_BakingSet.sharedSkyOcclusionL0L1ChunkSize = handlesSkyOcclusion ? sizeof(ushort) * 4 * chunkSizeInProbes : 0; - m_BakingSet.sharedSkyShadingDirectionIndicesChunkSize = handlesSkyShading ? sizeof(byte) * chunkSizeInProbes : 0; - m_BakingSet.sharedDataChunkSize = m_BakingSet.sharedValidityMaskChunkSize + m_BakingSet.sharedSkyOcclusionL0L1ChunkSize + m_BakingSet.sharedSkyShadingDirectionIndicesChunkSize; + bakingSet.sharedValidityMaskChunkSize = sizeof(byte) * validityRegionCount * chunkSizeInProbes; + bakingSet.sharedSkyOcclusionL0L1ChunkSize = handlesSkyOcclusion ? sizeof(ushort) * 4 * chunkSizeInProbes : 0; + bakingSet.sharedSkyShadingDirectionIndicesChunkSize = handlesSkyShading ? sizeof(byte) * chunkSizeInProbes : 0; + bakingSet.sharedDataChunkSize = bakingSet.sharedValidityMaskChunkSize + bakingSet.sharedSkyOcclusionL0L1ChunkSize + bakingSet.sharedSkyShadingDirectionIndicesChunkSize; - var sharedDataTotalSize = m_TotalCellCounts.chunksCount * m_BakingSet.sharedDataChunkSize; + var sharedDataTotalSize = m_TotalCellCounts.chunksCount * bakingSet.sharedDataChunkSize; using var sharedData = new NativeArray(sharedDataTotalSize, Allocator.Persistent, NativeArrayOptions.UninitializedMemory); // Brick data using var bricks = new NativeArray(m_TotalCellCounts.bricksCount, Allocator.Persistent, NativeArrayOptions.UninitializedMemory); // CellSupportData - use pure helper function for calculation - m_BakingSet.supportPositionChunkSize = UnsafeUtility.SizeOf() * chunkSizeInProbes; - m_BakingSet.supportValidityChunkSize = UnsafeUtility.SizeOf() * chunkSizeInProbes; - m_BakingSet.supportOffsetsChunkSize = hasVirtualOffsets ? UnsafeUtility.SizeOf() * chunkSizeInProbes : 0; - m_BakingSet.supportTouchupChunkSize = UnsafeUtility.SizeOf() * chunkSizeInProbes; - m_BakingSet.supportLayerMaskChunkSize = hasRenderingLayers ? UnsafeUtility.SizeOf() * chunkSizeInProbes : 0; - - m_BakingSet.supportDataChunkSize = CalculateSupportDataChunkSize(chunkSizeInProbes, hasVirtualOffsets, hasRenderingLayers); - long supportDataTotalSize = (long)m_TotalCellCounts.chunksCount * m_BakingSet.supportDataChunkSize; + bakingSet.supportPositionChunkSize = UnsafeUtility.SizeOf() * chunkSizeInProbes; + bakingSet.supportValidityChunkSize = UnsafeUtility.SizeOf() * chunkSizeInProbes; + bakingSet.supportOffsetsChunkSize = hasVirtualOffsets ? UnsafeUtility.SizeOf() * chunkSizeInProbes : 0; + bakingSet.supportTouchupChunkSize = UnsafeUtility.SizeOf() * chunkSizeInProbes; + bakingSet.supportLayerMaskChunkSize = hasRenderingLayers ? UnsafeUtility.SizeOf() * chunkSizeInProbes : 0; + + bakingSet.supportDataChunkSize = CalculateSupportDataChunkSize(chunkSizeInProbes, hasVirtualOffsets, hasRenderingLayers); + long supportDataTotalSize = (long)m_TotalCellCounts.chunksCount * bakingSet.supportDataChunkSize; using var supportData = new NativeArray((int)supportDataTotalSize, Allocator.Persistent, NativeArrayOptions.UninitializedMemory); - var sceneStateHash = m_BakingSet.GetBakingHashCode(); + var sceneStateHash = bakingSet.GetBakingHashCode(); var startCounts = new CellCounts(); int sharedChunkOffset = 0; @@ -932,15 +933,15 @@ static unsafe bool WriteBakingCells(BakingCell[] bakingCells) for (var i = 0; i < bakingCells.Length; ++i) { var bakingCell = bakingCells[i]; - var cellDesc = m_BakingSet.cellDescs[bakingCell.index]; + var cellDesc = bakingSet.cellDescs[bakingCell.index]; var chunksCount = cellDesc.shChunkCount; - cellSharedDataDescs.Add(bakingCell.index, new StreamableCellDesc() { offset = startCounts.chunksCount * m_BakingSet.sharedDataChunkSize, elementCount = chunksCount }); + cellSharedDataDescs.Add(bakingCell.index, new StreamableCellDesc() { offset = startCounts.chunksCount * bakingSet.sharedDataChunkSize, elementCount = chunksCount }); cellL0L1DataDescs.Add(bakingCell.index, new StreamableCellDesc() { offset = startCounts.chunksCount * L0L1ChunkSize, elementCount = chunksCount }); cellL2DataDescs.Add(bakingCell.index, new StreamableCellDesc() { offset = startCounts.chunksCount * L2ChunkSize, elementCount = chunksCount }); cellProbeOcclusionDataDescs.Add(bakingCell.index, new StreamableCellDesc() { offset = startCounts.chunksCount * probeOcclusionChunkSize, elementCount = chunksCount }); cellBricksDescs.Add(bakingCell.index, new StreamableCellDesc() { offset = startCounts.bricksCount * sizeof(Brick), elementCount = cellDesc.bricksCount }); - cellSupportDescs.Add(bakingCell.index, new StreamableCellDesc() { offset = startCounts.chunksCount * m_BakingSet.supportDataChunkSize, elementCount = chunksCount }); + cellSupportDescs.Add(bakingCell.index, new StreamableCellDesc() { offset = startCounts.chunksCount * bakingSet.supportDataChunkSize, elementCount = chunksCount }); sceneStateHash = sceneStateHash * 23 + bakingCell.GetBakingHashCode(); @@ -954,14 +955,14 @@ static unsafe bool WriteBakingCells(BakingCell[] bakingCells) int cellL1BL1RzOffset = cellL1GL1RyOffset + chunksCount * L1ChunkSize; int validityMaskOffset = sharedChunkOffset; - int skyOcclusionL0L1Offset = validityMaskOffset + chunksCount * m_BakingSet.sharedValidityMaskChunkSize; - int skyShadingIndicesOffset = skyOcclusionL0L1Offset + chunksCount * m_BakingSet.sharedSkyOcclusionL0L1ChunkSize; + int skyOcclusionL0L1Offset = validityMaskOffset + chunksCount * bakingSet.sharedValidityMaskChunkSize; + int skyShadingIndicesOffset = skyOcclusionL0L1Offset + chunksCount * bakingSet.sharedSkyOcclusionL0L1ChunkSize; int positionOffset = supportChunkOffset; - int validityOffset = positionOffset + chunksCount * m_BakingSet.supportPositionChunkSize; - int touchupOffset = validityOffset + chunksCount * m_BakingSet.supportValidityChunkSize; - int layerOffset = touchupOffset + chunksCount * m_BakingSet.supportTouchupChunkSize; // This is optional - int offsetsOffset = layerOffset + chunksCount * m_BakingSet.supportLayerMaskChunkSize; // Keep last as it's optional. + int validityOffset = positionOffset + chunksCount * bakingSet.supportPositionChunkSize; + int touchupOffset = validityOffset + chunksCount * bakingSet.supportValidityChunkSize; + int layerOffset = touchupOffset + chunksCount * bakingSet.supportTouchupChunkSize; // This is optional + int offsetsOffset = layerOffset + chunksCount * bakingSet.supportLayerMaskChunkSize; // Keep last as it's optional. // Here we directly map each chunk to the layout of the 3D textures in order to be able to copy the data directly to the GPU. // The granularity at runtime is one chunk at a time currently so the temporary data loc used is sized accordingly. @@ -971,24 +972,24 @@ static unsafe bool WriteBakingCells(BakingCell[] bakingCells) NativeArray probesTargetL1GL1Ry = probesL0L1.GetSubArray(cellL1GL1RyOffset + chunkIndex * L1ChunkSize, L1ChunkSize); NativeArray probesTargetL1BL1Rz = probesL0L1.GetSubArray(cellL1BL1RzOffset + chunkIndex * L1ChunkSize, L1ChunkSize); - NativeArray validityNeighboorMaskChunkTarget = sharedData.GetSubArray(validityMaskOffset + chunkIndex * m_BakingSet.sharedValidityMaskChunkSize, m_BakingSet.sharedValidityMaskChunkSize); - NativeArray skyOcclusionL0L1ChunkTarget = sharedData.GetSubArray(skyOcclusionL0L1Offset + chunkIndex * m_BakingSet.sharedSkyOcclusionL0L1ChunkSize, m_BakingSet.sharedSkyOcclusionL0L1ChunkSize).Reinterpret(1); - NativeArray skyShadingIndicesChunkTarget = sharedData.GetSubArray(skyShadingIndicesOffset + chunkIndex * m_BakingSet.sharedSkyShadingDirectionIndicesChunkSize, m_BakingSet.sharedSkyShadingDirectionIndicesChunkSize); + NativeArray validityNeighboorMaskChunkTarget = sharedData.GetSubArray(validityMaskOffset + chunkIndex * bakingSet.sharedValidityMaskChunkSize, bakingSet.sharedValidityMaskChunkSize); + NativeArray skyOcclusionL0L1ChunkTarget = sharedData.GetSubArray(skyOcclusionL0L1Offset + chunkIndex * bakingSet.sharedSkyOcclusionL0L1ChunkSize, bakingSet.sharedSkyOcclusionL0L1ChunkSize).Reinterpret(1); + NativeArray skyShadingIndicesChunkTarget = sharedData.GetSubArray(skyShadingIndicesOffset + chunkIndex * bakingSet.sharedSkyShadingDirectionIndicesChunkSize, bakingSet.sharedSkyShadingDirectionIndicesChunkSize); - NativeArray positionsChunkTarget = supportData.GetSubArray(positionOffset + chunkIndex * m_BakingSet.supportPositionChunkSize, m_BakingSet.supportPositionChunkSize).Reinterpret(1); - NativeArray validityChunkTarget = supportData.GetSubArray(validityOffset + chunkIndex * m_BakingSet.supportValidityChunkSize, m_BakingSet.supportValidityChunkSize).Reinterpret(1); - NativeArray touchupVolumeInteractionChunkTarget = supportData.GetSubArray(touchupOffset + chunkIndex * m_BakingSet.supportTouchupChunkSize, m_BakingSet.supportTouchupChunkSize).Reinterpret(1); - NativeArray regionChunkTarget = supportData.GetSubArray(layerOffset + chunkIndex * m_BakingSet.supportLayerMaskChunkSize, m_BakingSet.supportLayerMaskChunkSize).Reinterpret(1); - NativeArray offsetChunkTarget = supportData.GetSubArray(offsetsOffset + chunkIndex * m_BakingSet.supportOffsetsChunkSize, m_BakingSet.supportOffsetsChunkSize).Reinterpret(1); + NativeArray positionsChunkTarget = supportData.GetSubArray(positionOffset + chunkIndex * bakingSet.supportPositionChunkSize, bakingSet.supportPositionChunkSize).Reinterpret(1); + NativeArray validityChunkTarget = supportData.GetSubArray(validityOffset + chunkIndex * bakingSet.supportValidityChunkSize, bakingSet.supportValidityChunkSize).Reinterpret(1); + NativeArray touchupVolumeInteractionChunkTarget = supportData.GetSubArray(touchupOffset + chunkIndex * bakingSet.supportTouchupChunkSize, bakingSet.supportTouchupChunkSize).Reinterpret(1); + NativeArray regionChunkTarget = supportData.GetSubArray(layerOffset + chunkIndex * bakingSet.supportLayerMaskChunkSize, bakingSet.supportLayerMaskChunkSize).Reinterpret(1); + NativeArray offsetChunkTarget = supportData.GetSubArray(offsetsOffset + chunkIndex * bakingSet.supportOffsetsChunkSize, bakingSet.supportOffsetsChunkSize).Reinterpret(1); NativeArray probesTargetL2_0 = probesL2.GetSubArray(shL2ChunkOffset + chunksCount * L2TextureChunkSize * 0 + chunkIndex * L2TextureChunkSize, L2TextureChunkSize); NativeArray probesTargetL2_1 = probesL2.GetSubArray(shL2ChunkOffset + chunksCount * L2TextureChunkSize * 1 + chunkIndex * L2TextureChunkSize, L2TextureChunkSize); NativeArray probesTargetL2_2 = probesL2.GetSubArray(shL2ChunkOffset + chunksCount * L2TextureChunkSize * 2 + chunkIndex * L2TextureChunkSize, L2TextureChunkSize); NativeArray probesTargetL2_3 = probesL2.GetSubArray(shL2ChunkOffset + chunksCount * L2TextureChunkSize * 3 + chunkIndex * L2TextureChunkSize, L2TextureChunkSize); - NativeArray probeOcclusionTarget = probeOcclusion.GetSubArray(probeOcclusionChunkOffset + chunkIndex * m_BakingSet.ProbeOcclusionChunkSize, m_BakingSet.ProbeOcclusionChunkSize); + NativeArray probeOcclusionTarget = probeOcclusion.GetSubArray(probeOcclusionChunkOffset + chunkIndex * bakingSet.ProbeOcclusionChunkSize, bakingSet.ProbeOcclusionChunkSize); - for (int brickIndex = 0; brickIndex < m_BakingSet.chunkSizeInBricks; brickIndex++) + for (int brickIndex = 0; brickIndex < bakingSet.chunkSizeInBricks; brickIndex++) { for (int z = 0; z < ProbeBrickPool.kBrickProbeCountPerDim; z++) { @@ -1008,16 +1009,16 @@ static unsafe bool WriteBakingCells(BakingCell[] bakingCells) for (int l = 0; l < validityRegionCount; l++) validityNeighboorMaskChunkTarget[index * validityRegionCount + l] = 0; - if (m_BakingSet.bakedSkyOcclusion) + if (bakingSet.bakedSkyOcclusion) { WriteToShaderSkyOcclusion(Vector4.zero, skyOcclusionL0L1ChunkTarget, index * 4); - if (m_BakingSet.bakedSkyShadingDirection) + if (bakingSet.bakedSkyShadingDirection) { skyShadingIndicesChunkTarget[index] = 255; } } - if (m_BakingSet.bakedProbeOcclusion) + if (bakingSet.bakedProbeOcclusion) { WriteToShaderProbeOcclusion(Vector4.one, probeOcclusionTarget, index * 4); } @@ -1039,16 +1040,16 @@ static unsafe bool WriteBakingCells(BakingCell[] bakingCells) for (int l = 0; l < validityRegionCount; l++) validityNeighboorMaskChunkTarget[index * validityRegionCount + l] = bakingCell.validityNeighbourMask[l, shidx]; - if (m_BakingSet.bakedSkyOcclusion) + if (bakingSet.bakedSkyOcclusion) { WriteToShaderSkyOcclusion(bakingCell.skyOcclusionDataL0L1[shidx], skyOcclusionL0L1ChunkTarget, index * 4); - if (m_BakingSet.bakedSkyShadingDirection) + if (bakingSet.bakedSkyShadingDirection) { skyShadingIndicesChunkTarget[index] = bakingCell.skyShadingDirectionIndices[shidx]; } } - if (m_BakingSet.bakedProbeOcclusion) + if (bakingSet.bakedProbeOcclusion) { WriteToShaderProbeOcclusion(bakingCell.probeOcclusion[shidx], probeOcclusionTarget, index * 4); } @@ -1071,8 +1072,8 @@ static unsafe bool WriteBakingCells(BakingCell[] bakingCells) shL0L1ChunkOffset += (chunksCount * L0L1ChunkSize); shL2ChunkOffset += (chunksCount * L2ChunkSize); probeOcclusionChunkOffset += (chunksCount * probeOcclusionChunkSize); - supportChunkOffset += (chunksCount * m_BakingSet.supportDataChunkSize); - sharedChunkOffset += (chunksCount * m_BakingSet.sharedDataChunkSize); + supportChunkOffset += (chunksCount * bakingSet.supportDataChunkSize); + sharedChunkOffset += (chunksCount * bakingSet.sharedDataChunkSize); bricks.GetSubArray(startCounts.bricksCount, cellDesc.bricksCount).CopyFrom(bakingCell.bricks); @@ -1084,7 +1085,7 @@ static unsafe bool WriteBakingCells(BakingCell[] bakingCells) } // Need to save here because the forced import below discards the changes. - EditorUtility.SetDirty(m_BakingSet); + EditorUtility.SetDirty(bakingSet); AssetDatabase.SaveAssets(); // Explicitly make sure the binary output files are writable since we write them using the C# file API (i.e. check out Perforce files if applicable) @@ -1136,20 +1137,20 @@ static unsafe bool WriteBakingCells(BakingCell[] bakingCells) AssetDatabase.ImportAsset(cellSharedDataFilename); AssetDatabase.ImportAsset(cellSupportDataFilename); - var bakingSetGUID = AssetDatabase.AssetPathToGUID(AssetDatabase.GetAssetPath(m_BakingSet)); + var bakingSetGUID = AssetDatabase.AssetPathToGUID(AssetDatabase.GetAssetPath(bakingSet)); - m_BakingSet.scenarios[ProbeReferenceVolume.instance.lightingScenario] = new ProbeVolumeBakingSet.PerScenarioDataInfo + bakingSet.scenarios[bakingSet.lightingScenario] = new ProbeVolumeBakingSet.PerScenarioDataInfo { sceneHash = sceneStateHash, cellDataAsset = new ProbeVolumeStreamableAsset(kAPVStreamingAssetsPath, cellL0L1DataDescs, L0L1ChunkSize, bakingSetGUID, AssetDatabase.AssetPathToGUID(cellDataFilename)), cellOptionalDataAsset = new ProbeVolumeStreamableAsset(kAPVStreamingAssetsPath, cellL2DataDescs, L2ChunkSize, bakingSetGUID, AssetDatabase.AssetPathToGUID(cellOptionalDataFilename)), cellProbeOcclusionDataAsset = new ProbeVolumeStreamableAsset(kAPVStreamingAssetsPath, cellProbeOcclusionDataDescs, probeOcclusionChunkSize, bakingSetGUID, AssetDatabase.AssetPathToGUID(cellProbeOcclusionDataFilename)), }; - m_BakingSet.cellSharedDataAsset = new ProbeVolumeStreamableAsset(kAPVStreamingAssetsPath, cellSharedDataDescs, m_BakingSet.sharedDataChunkSize, bakingSetGUID, AssetDatabase.AssetPathToGUID(cellSharedDataFilename)); - m_BakingSet.cellBricksDataAsset = new ProbeVolumeStreamableAsset(kAPVStreamingAssetsPath, cellBricksDescs, sizeof(Brick), bakingSetGUID, AssetDatabase.AssetPathToGUID(cellBricksDataFilename)); - m_BakingSet.cellSupportDataAsset = new ProbeVolumeStreamableAsset(kAPVStreamingAssetsPath, cellSupportDescs, m_BakingSet.supportDataChunkSize, bakingSetGUID, AssetDatabase.AssetPathToGUID(cellSupportDataFilename)); + bakingSet.cellSharedDataAsset = new ProbeVolumeStreamableAsset(kAPVStreamingAssetsPath, cellSharedDataDescs, bakingSet.sharedDataChunkSize, bakingSetGUID, AssetDatabase.AssetPathToGUID(cellSharedDataFilename)); + bakingSet.cellBricksDataAsset = new ProbeVolumeStreamableAsset(kAPVStreamingAssetsPath, cellBricksDescs, sizeof(Brick), bakingSetGUID, AssetDatabase.AssetPathToGUID(cellBricksDataFilename)); + bakingSet.cellSupportDataAsset = new ProbeVolumeStreamableAsset(kAPVStreamingAssetsPath, cellSupportDescs, bakingSet.supportDataChunkSize, bakingSetGUID, AssetDatabase.AssetPathToGUID(cellSupportDataFilename)); - EditorUtility.SetDirty(m_BakingSet); + EditorUtility.SetDirty(bakingSet); return true; } @@ -1198,7 +1199,7 @@ unsafe static void WriteDilatedCells(List cells) probeOcclusion.CopyFrom(System.IO.File.ReadAllBytes(cellProbeOcclusionDataFilename)); } - var lightingScenario = ProbeReferenceVolume.instance.lightingScenario; + var lightingScenario = m_BakingSet.lightingScenario; Debug.Assert(m_BakingSet.scenarios.ContainsKey(lightingScenario)); var scenarioDataInfo = m_BakingSet.scenarios[lightingScenario]; diff --git a/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/ProbeGIBaking.VirtualOffset.cs b/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/ProbeGIBaking.VirtualOffset.cs index 7e12d43d7df..08269a06bb3 100644 --- a/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/ProbeGIBaking.VirtualOffset.cs +++ b/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/ProbeGIBaking.VirtualOffset.cs @@ -356,7 +356,7 @@ static internal void RecomputeVOForDebugOnly() if (ValidateBakingCellsSize(bakingCellsArray, chunkSizeInProbes, hasVirtualOffsets, hasRenderingLayers)) { // Write back the assets. - WriteBakingCells(bakingCellsArray); + WriteBakingCells(m_BakingSet, bakingCellsArray); } m_BakingBatch?.Dispose(); diff --git a/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/ProbeGIBaking.cs b/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/ProbeGIBaking.cs index 3467867e883..f2bcb0a0077 100644 --- a/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/ProbeGIBaking.cs +++ b/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/ProbeGIBaking.cs @@ -479,6 +479,14 @@ struct BakeData // Cancellation public bool failed; + [Flags] + enum BakeJobRequests + { + MAIN_REQUEST = 1, + TOUCHUP_REQUESTS = 2, + ADDITIONAL_REQUEST = 4 + } + internal static void InitVirtualOffsetJob(IntPtr pVirtualOffsetsBuffer, ref bool bakeVirtualOffsets) { bool usingVirtualOffset = m_BakingSet.settings.virtualOffsetSettings.useVirtualOffset; @@ -541,7 +549,11 @@ public void Init(ProbeVolumeBakingSet bakingSet, NativeList probePositi probeCount = probePositions.Length; reflectionProbeCount = requests.Count; - jobs = CreateBakingJobs(bakingSet, requests.Count != 0); + var probeJobRequests = BakeJobRequests.MAIN_REQUEST | BakeJobRequests.TOUCHUP_REQUESTS; + if (requests.Count > 0) + probeJobRequests |= BakeJobRequests.ADDITIONAL_REQUEST; + + jobs = CreateBakingJobs(bakingSet, probeJobRequests); originalPositions = probePositions.ToArray(Allocator.Persistent); SortPositions(probePositions, requests); @@ -587,13 +599,22 @@ public void InitAdditionalRequests(NativeList probePositions, List probePositions, BakeType bakeType) + public void InitLightingJob(ProbeVolumeBakingSet bakingSet, ProbeAdjustmentVolume touchup, NativeList probePositions, BakeType bakeType) { probeCount = probePositions.Length; s_AdjustmentVolumes = new TouchupVolumeWithBoundsList(); + touchup.GetOBBandAABB(out var obb, out var aabb); + s_AdjustmentVolumes.Add((obb, aabb, touchup)); + touchup.skyDirection.Normalize(); - jobs = CreateBakingJobs(bakingSet, false); + var probeJobRequests = BakeJobRequests.TOUCHUP_REQUESTS; + if (touchup.mode != ProbeAdjustmentVolume.Mode.OverrideSampleCount) + { + // Other touchup volumes don't need a job of their own but they do need a main request job + probeJobRequests |= BakeJobRequests.MAIN_REQUEST; + } + jobs = CreateBakingJobs(bakingSet, probeJobRequests); SortPositions(probePositions, new List()); lightingJob = lightingOverride ?? new DefaultLightTransport(); @@ -622,16 +643,19 @@ public void ExecuteLightingAsync() bakingThread.Start(); } - static BakeJob[] CreateBakingJobs(ProbeVolumeBakingSet bakingSet, bool hasAdditionalRequests) + static BakeJob[] CreateBakingJobs(ProbeVolumeBakingSet bakingSet, BakeJobRequests bakeJobRequests) { // Build the list of adjustment volumes affecting sample count var touchupVolumesAndBounds = new TouchupVolumeWithBoundsList(); + if (bakeJobRequests.HasFlag(BakeJobRequests.TOUCHUP_REQUESTS)) { // This is slow, but we should have very little amount of touchup volumes. foreach (var adjustment in s_AdjustmentVolumes) { if (adjustment.volume.mode == ProbeAdjustmentVolume.Mode.OverrideSampleCount) + { touchupVolumesAndBounds.Add(adjustment); + } } // Sort by volume to give priority to smaller volumes @@ -640,18 +664,33 @@ static BakeJob[] CreateBakingJobs(ProbeVolumeBakingSet bakingSet, bool hasAdditi var lightingSettings = ProbeVolumeLightingTab.GetLightingSettings(); bool skyOcclusion = bakingSet.skyOcclusion; + var jobs = new List(); - int additionalJobs = hasAdditionalRequests ? 2 : 1; - var jobs = new BakeJob[touchupVolumesAndBounds.Count + additionalJobs]; + if (bakeJobRequests.HasFlag(BakeJobRequests.TOUCHUP_REQUESTS)) + { + foreach (var touchupVolume in touchupVolumesAndBounds) + { + BakeJob job = new BakeJob(); + job.Create(lightingSettings, skyOcclusion, touchupVolume); + jobs.Add(job); + } + } - for (int i = 0; i < touchupVolumesAndBounds.Count; i++) - jobs[i].Create(lightingSettings, skyOcclusion, touchupVolumesAndBounds[i]); + if (bakeJobRequests.HasFlag(BakeJobRequests.MAIN_REQUEST)) + { + BakeJob job = new BakeJob(); + job.Create(bakingSet, lightingSettings, skyOcclusion); + jobs.Add(job); + } - jobs[touchupVolumesAndBounds.Count + 0].Create(bakingSet, lightingSettings, skyOcclusion); - if (hasAdditionalRequests) - jobs[touchupVolumesAndBounds.Count + 1].Create(bakingSet, lightingSettings, false); + if (bakeJobRequests.HasFlag(BakeJobRequests.ADDITIONAL_REQUEST)) + { + BakeJob job = new BakeJob(); + job.Create(bakingSet, lightingSettings, false); + jobs.Add(job); + } - return jobs; + return jobs.ToArray(); } static BakeJob[] CreateAdditionalBakingJobs() @@ -1802,7 +1841,7 @@ static void ApplyPostBakeOperations() // Attempt to convert baking cells to runtime cells bool succeededWritingBakingCells; using (new BakingCompleteProfiling(BakingCompleteProfiling.Stages.WriteBakedData)) - succeededWritingBakingCells = WriteBakingCells(m_BakedCells.Values.ToArray()); + succeededWritingBakingCells = WriteBakingCells(m_BakingSet, m_BakedCells.Values.ToArray()); if (!succeededWritingBakingCells) return; diff --git a/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/ProbePlacement.cs b/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/ProbePlacement.cs index def047fed8e..3ed26d6e864 100644 --- a/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/ProbePlacement.cs +++ b/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/ProbePlacement.cs @@ -170,13 +170,30 @@ static ComputeShader subdivideSceneCS { _subdivideSceneCS = GraphicsSettings.GetRenderPipelineSettings().subdivideSceneCS; - s_ClearBufferKernel = subdivideSceneCS.FindKernel("ClearBuffer"); - s_ClearKernel = subdivideSceneCS.FindKernel("Clear"); - s_JumpFloodingKernel = subdivideSceneCS.FindKernel("JumpFlooding"); - s_FillUVKernel = subdivideSceneCS.FindKernel("FillUVMap"); - s_FinalPassKernel = subdivideSceneCS.FindKernel("FinalPass"); - s_VoxelizeProbeVolumesKernel = subdivideSceneCS.FindKernel("VoxelizeProbeVolumeData"); - s_SubdivideKernel = subdivideSceneCS.FindKernel("Subdivide"); + // The compute shader is not supported on OpenGL (see #pragma only_renderers in ProbeVolumeSubdivide.compute) + // The kernels won't exist, so we skip initialization. This is caught earlier in RunPlacement with a proper error message. + try + { + s_ClearBufferKernel = subdivideSceneCS.FindKernel("ClearBuffer"); + s_ClearKernel = subdivideSceneCS.FindKernel("Clear"); + s_JumpFloodingKernel = subdivideSceneCS.FindKernel("JumpFlooding"); + s_FillUVKernel = subdivideSceneCS.FindKernel("FillUVMap"); + s_FinalPassKernel = subdivideSceneCS.FindKernel("FinalPass"); + s_VoxelizeProbeVolumesKernel = subdivideSceneCS.FindKernel("VoxelizeProbeVolumeData"); + s_SubdivideKernel = subdivideSceneCS.FindKernel("Subdivide"); + } + catch (System.ArgumentException) + { + // Kernels not found - likely running on unsupported graphics API + string message = "ProbeVolumeSubdivide compute shader kernels not found."; + if (SystemInfo.graphicsDeviceType == GraphicsDeviceType.OpenGLCore || + SystemInfo.graphicsDeviceType == GraphicsDeviceType.OpenGLES3) + { + message += " This is expected on OpenGL which is not supported for APV baking."; + } + Debug.LogWarning(message); + _subdivideSceneCS = null; + } } return _subdivideSceneCS; } diff --git a/Packages/com.unity.render-pipelines.core/Runtime/GPUDriven/Batching/InstanceCullingBatcherBurst.cs b/Packages/com.unity.render-pipelines.core/Runtime/GPUDriven/Batching/InstanceCullingBatcherBurst.cs index 33ec5d56f1d..f9ba0a8f3c9 100644 --- a/Packages/com.unity.render-pipelines.core/Runtime/GPUDriven/Batching/InstanceCullingBatcherBurst.cs +++ b/Packages/com.unity.render-pipelines.core/Runtime/GPUDriven/Batching/InstanceCullingBatcherBurst.cs @@ -141,15 +141,24 @@ private static void ProcessRenderer(InstanceHandle instance, // Scan all materials once to retrieve whether this renderer is indirect-compatible or not (and store it in the RangeKey). // Also cache hash map lookups since we need them right after. bool supportsIndirect = true; - NativeArray subMaterials = new NativeArray(subMaterialIDs.Length, Allocator.Temp); + bool isMetal = SystemInfo.graphicsDeviceType == GraphicsDeviceType.Metal; + var subMaterials = new NativeArray(subMaterialIDs.Length, Allocator.Temp); for (int i = 0; i < subMaterialIDs.Length; i++) { EntityId subMaterialID = subMaterialIDs[i]; if (!materialMap.TryGetValue(subMaterialID, out GPUDrivenMaterial subMaterial)) continue; - supportsIndirect &= subMaterial.isIndirectSupported; subMaterials[i] = subMaterial; + + int subMeshIndex = subMeshStartIndex + i; + int lodLoopCount = math.max(mesh.meshLodCount, 1); + var subMesh = mesh.subMeshes[subMeshIndex * lodLoopCount]; + + // The indirect path does not support topology adjustment; use the direct path when this is required. + // Concretely, for quads, only use the indirect path if we allow quads natively (e.g. tessellation shaders). + supportsIndirect &= subMesh.topology != MeshTopology.Quads || subMaterial.hasTessellation; + supportsIndirect &= !isMetal || !subMaterial.hasTessellation; } var rangeKey = new RangeKey diff --git a/Packages/com.unity.render-pipelines.core/Runtime/Settings/RenderingDebuggerRuntimeResources.cs b/Packages/com.unity.render-pipelines.core/Runtime/Settings/RenderingDebuggerRuntimeResources.cs index ab84b521a5d..12c12f6e6f8 100644 --- a/Packages/com.unity.render-pipelines.core/Runtime/Settings/RenderingDebuggerRuntimeResources.cs +++ b/Packages/com.unity.render-pipelines.core/Runtime/Settings/RenderingDebuggerRuntimeResources.cs @@ -30,12 +30,12 @@ enum Version #if ENABLE_RENDERING_DEBUGGER_UI [SerializeField, ResourcePath("Runtime/Debugging/Runtime UI Resources/RuntimeDebugWindow_PanelSettings.asset")] - private LazyLoadReference m_PanelSettings; + private PanelSettings m_PanelSettings; - /// StyleSheet for the Rendering Debugger Runtime UI + /// Panel Settings Asset for the Rendering Debugger Runtime UI public PanelSettings panelSettings { - get => m_PanelSettings.asset; + get => m_PanelSettings; set => this.SetValueAndNotify(ref m_PanelSettings, value, nameof(m_PanelSettings)); } @@ -54,12 +54,12 @@ public StyleSheet[] styleSheets } [SerializeField, ResourcePath("Runtime/Debugging/Runtime UI Resources/RuntimeDebugWindow.uxml")] - private LazyLoadReference m_VisualTreeAsset; + private VisualTreeAsset m_VisualTreeAsset; /// Visual Tree Asset for the Rendering Debugger Runtime UI public VisualTreeAsset visualTreeAsset { - get => m_VisualTreeAsset.asset; + get => m_VisualTreeAsset; set => this.SetValueAndNotify(ref m_VisualTreeAsset, value, nameof(m_VisualTreeAsset)); } #endif diff --git a/Packages/com.unity.render-pipelines.high-definition/Editor/BuildProcessors/HDRPPreprocessBuild.cs b/Packages/com.unity.render-pipelines.high-definition/Editor/BuildProcessors/HDRPPreprocessBuild.cs index d29693c64ad..548cfa00565 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Editor/BuildProcessors/HDRPPreprocessBuild.cs +++ b/Packages/com.unity.render-pipelines.high-definition/Editor/BuildProcessors/HDRPPreprocessBuild.cs @@ -101,6 +101,21 @@ private static bool IsConfigurationValid() validConfiguration &= config; } + { + bool config = ValidateVolumetricCloudsConfiguration(m_BuildData.renderPipelineAssets); + validConfiguration &= config; + } + + { + bool config = ValidateHighQualityLineRenderingConfiguration(m_BuildData.renderPipelineAssets); + validConfiguration &= config; + } + + { + bool config = ValidateGraphicsCompositorConfiguration(m_BuildData.renderPipelineAssets); + validConfiguration &= config; + } + return validConfiguration; } @@ -163,7 +178,7 @@ internal static bool ValidateRayTracingConfiguration(List return true; // No ray tracing enabled, skip validation var currentBuildTarget = EditorUserBuildSettings.activeBuildTarget; - if (HDRenderPipeline.PlatformHasRaytracingIssues(currentBuildTarget, out var warning)) + if (HDRenderPipeline.CheckPlatformRaytracingCompatability(currentBuildTarget, out var warning)) { Debug.LogWarning($"HDRP Build Validation - Ray Tracing:{warning}"); return false; @@ -174,140 +189,281 @@ internal static bool ValidateRayTracingConfiguration(List internal static bool ValidateSubsurfaceScatteringConfiguration(List assetsList) { - var currentBuildTarget = EditorUserBuildSettings.activeBuildTarget; - - // Only validate for Switch 2 - if (currentBuildTarget != BuildTarget.Switch2) + if (!EditorGraphicsSettings.ShouldValidateGraphicsForActiveBuildTarget()) return true; - // Check if any asset has Subsurface Scattering enabled + var validationSettings = HDProjectSettings.validationSettings; + bool anyAssetHasSSSEnabled = false; + HDRenderPipelineAsset foundAsset = null; foreach (var hdrpAsset in assetsList) { - if (hdrpAsset != null && hdrpAsset.currentPlatformRenderPipelineSettings.supportSubsurfaceScattering) + if (hdrpAsset != null) { - anyAssetHasSSSEnabled = true; - break; + if (hdrpAsset.currentPlatformRenderPipelineSettings.supportSubsurfaceScattering && !validationSettings.k_SubsurfaceScattering_Recommended) + { + anyAssetHasSSSEnabled = true; + foundAsset = hdrpAsset; + + break; + } } } if (!anyAssetHasSSSEnabled) return true; // No SSS enabled, skip validation - var activeBuildTargetGroup = BuildPipeline.GetBuildTargetGroup(BuildTarget.Switch2); - var namedBuildTarget = NamedBuildTarget.FromBuildTargetGroup(activeBuildTargetGroup); - Debug.LogWarning($"HDRP Build Validation - Subsurface Scattering: Subsurface Scattering is enabled for {namedBuildTarget.TargetName}. For optimal performance, set the Downsample Level to the maximum value (2) for this platform."); + Debug.LogWarning($"HDRP Build Validation [{(foundAsset != null ? foundAsset.name : "")}] - {HDRenderPipelineUI.Styles.supportedSSSContent.text}: Enabled for the active platform. {HDRenderPipelineUI.Styles.featureNotRecommendedWarning}\nAsset: {AssetDatabase.GetAssetPath(foundAsset)}", foundAsset); return false; } internal static bool ValidateFilmGrainConfiguration(List assetsList) { - var currentBuildTarget = EditorUserBuildSettings.activeBuildTarget; + static bool CheckVolumeProfileValid(VolumeProfile volumeProfile, HDRenderPipelineAsset hdAsset = null) + { + if (volumeProfile.TryGet(out var filmGrain)) + { + var validationSettings = HDProjectSettings.validationSettings; + var defaultFilmGrain = HDEditorUtils.GetVolumeComponentDefaultState(); + + // Get effective intensity value (use local if overridden, otherwise use default) + bool useDefaultIntensity = !filmGrain.intensity.overrideState; + float effectiveIntensity = useDefaultIntensity + ? (defaultFilmGrain?.intensity.value ?? 0.0f) + : filmGrain.intensity.value; + + if (effectiveIntensity > 0.0f && !validationSettings.k_FilmGrain_Recommended) + { + Debug.LogWarning($"HDRP Build Validation [{volumeProfile.name}] - Film Grain: Enabled for the active platform. {HDRenderPipelineUI.Styles.featureNotRecommendedWarning}\nAsset: {AssetDatabase.GetAssetPath(volumeProfile)}", volumeProfile); + return false; + } + } + + return true; + } - // Only validate for Switch 2 - if (currentBuildTarget != BuildTarget.Switch2) + if (!EditorGraphicsSettings.ShouldValidateGraphicsForActiveBuildTarget()) return true; // Check default volume profile from HDRP Global Settings - bool foundFilmGrain = false; + bool isValidConfiguration = true; var defaultVolumeProfileSettings = GraphicsSettings.GetRenderPipelineSettings(); if (defaultVolumeProfileSettings?.volumeProfile != null) { - if (defaultVolumeProfileSettings.volumeProfile.TryGet(out var filmGrain) && filmGrain.intensity.value > 0) - { - foundFilmGrain = true; - } + isValidConfiguration &= CheckVolumeProfileValid(defaultVolumeProfileSettings.volumeProfile); } - // Check volume profiles in each HDRP asset - if (!foundFilmGrain) + foreach (var hdrpAsset in assetsList) { - foreach (var hdrpAsset in assetsList) + if (hdrpAsset != null && hdrpAsset.volumeProfile != null) { - if (hdrpAsset != null && hdrpAsset.volumeProfile != null) - { - if (hdrpAsset.volumeProfile.TryGet(out var filmGrain) && filmGrain.intensity.value > 0) - { - foundFilmGrain = true; - break; - } - } + isValidConfiguration &= CheckVolumeProfileValid(defaultVolumeProfileSettings.volumeProfile, hdrpAsset); } } - if (!foundFilmGrain) - return true; // No Film Grain with intensity > 0, skip validation - - var activeBuildTargetGroup = BuildPipeline.GetBuildTargetGroup(BuildTarget.Switch2); - var namedBuildTarget = NamedBuildTarget.FromBuildTargetGroup(activeBuildTargetGroup); - Debug.LogWarning($"HDRP Build Validation - Film Grain: Film Grain is enabled for {namedBuildTarget.TargetName}. This may significantly impact performance and should be disabled for this platform."); - return false; + return isValidConfiguration; } internal static bool ValidateVolumetricFogConfiguration(List assetsList) { - bool CheckVolumeProfileValid(VolumeProfile volumeProfile, ref bool foundFogHighSetting, ref bool fogNeedsDensityCutoff) + static bool CheckVolumeProfileValid(VolumeProfile volumeProfile, HDRenderPipelineAsset hdAsset = null) { if (volumeProfile.TryGet(out var fog)) { - if (fog.quality.value > 1) + var validationSettings = HDProjectSettings.validationSettings; + Fog defaultFog = HDEditorUtils.GetVolumeComponentDefaultState(); + + // Get effective quality value (use local if overridden, otherwise use default) + bool useDefaultQuality = !fog.quality.overrideState; + int effectiveQuality = useDefaultQuality + ? (defaultFog?.quality.value ?? 0) + : fog.quality.value; + + bool useDefaultDensityCutoff = !fog.volumetricLightingDensityCutoff.overrideState; + float effectiveDensityCutoff = useDefaultDensityCutoff + ? (defaultFog?.volumetricLightingDensityCutoff.value ?? 0.0f) + : fog.volumetricLightingDensityCutoff.value; + + float effectiveFogBudget = fog.volumetricFogBudget; + const int k_CustomQuality = ScalableSettingLevelParameter.LevelCount; + if (effectiveQuality == k_CustomQuality) // Custom quality tier { - foundFogHighSetting = true; + bool useDefaultFogBudget = !fog.volumetricFogBudgetOverrideState; + effectiveFogBudget = useDefaultFogBudget + ? (defaultFog?.volumetricFogBudget ?? 0.0f) + : fog.volumetricFogBudget; + } + else if (hdAsset != null) + { + effectiveFogBudget = hdAsset.currentPlatformRenderPipelineSettings.lightingQualitySettings.Fog_Budget[effectiveQuality]; + } + + bool warningsFound = false; + if (effectiveFogBudget >= validationSettings.k_Fog_MaximumFogBudget && fog.fogControlMode == FogControl.Balance) + { + string tierName = $"{(effectiveQuality == k_CustomQuality ? "Custom" : ((ScalableSettingLevelParameter.Level)effectiveQuality).ToString())} (Budget: {effectiveFogBudget})"; + string warningMessage = string.Format(HDRenderPipelineUI.Styles.maxFogBudgetWarning, validationSettings.k_Fog_MaximumFogBudget); + Debug.LogWarning($"HDRP Build Validation [{volumeProfile.name}] - {HDRenderPipelineUI.Styles.FogSettingsSubTitle.text}: {HDEditorUtils.CreateParameterWarningMessage("Tier", tierName, null, warningMessage)}\nAsset: {AssetDatabase.GetAssetPath(volumeProfile)}", volumeProfile); + warningsFound = true; } - if (fog.quality.value > 0 && fog.volumetricLightingDensityCutoff.value <= 0.0f) + if (effectiveDensityCutoff <= 0.0f && effectiveFogBudget >= validationSettings.k_Fog_MinimumFogBudgetForCutoff && fog.fogControlMode == FogControl.Balance) { - fogNeedsDensityCutoff = true; + string warningMessage = string.Format(HDRenderPipelineUI.Styles.minFogBudgetForDensityCutoffWarning, validationSettings.k_Fog_MinimumFogBudgetForCutoff); + Debug.LogWarning($"HDRP Build Validation [{volumeProfile.name}] - {HDRenderPipelineUI.Styles.FogSettingsSubTitle.text}: {HDEditorUtils.CreateParameterWarningMessage("Density Cutoff", "0", null, warningMessage)}\nAsset: {AssetDatabase.GetAssetPath(volumeProfile)}", volumeProfile); + warningsFound = true; } + + return !warningsFound; } - if (foundFogHighSetting) + return true; + } + + if (!EditorGraphicsSettings.ShouldValidateGraphicsForActiveBuildTarget()) + return true; + + // Check default volume profile from HDRP Global Settings + bool isValidConfiguration = true; + var defaultVolumeProfileSettings = GraphicsSettings.GetRenderPipelineSettings(); + if (defaultVolumeProfileSettings?.volumeProfile != null) + { + isValidConfiguration &= CheckVolumeProfileValid(defaultVolumeProfileSettings.volumeProfile); + } + + // Check volume profiles in each HDRP asset + foreach (var hdrpAsset in assetsList) + { + if (hdrpAsset != null && hdrpAsset.volumeProfile != null) { - var activeBuildTargetGroup = BuildPipeline.GetBuildTargetGroup(BuildTarget.Switch2); - var namedBuildTarget = NamedBuildTarget.FromBuildTargetGroup(activeBuildTargetGroup); - string assetPath = AssetDatabase.GetAssetPath(volumeProfile); - Debug.LogWarning($"HDRP Build Validation [{volumeProfile.name}] - Volumetric Fog: Tier = High {namedBuildTarget.TargetName}. This will have a significant performance impact. It is recommended to either disable this feature, or lower the quality tier to Low/Medium.\nAsset: {assetPath}", volumeProfile); + isValidConfiguration &= CheckVolumeProfileValid(hdrpAsset.volumeProfile, hdrpAsset); } + } - if (fogNeedsDensityCutoff) + return isValidConfiguration; + } + internal static bool ValidateVolumetricCloudsConfiguration(List assetsList) + { + static bool CheckVolumeProfileValid(VolumeProfile volumeProfile, HDRenderPipelineAsset hdAsset = null) + { + if (volumeProfile.TryGet(out var clouds)) { - var activeBuildTargetGroup = BuildPipeline.GetBuildTargetGroup(BuildTarget.Switch2); - var namedBuildTarget = NamedBuildTarget.FromBuildTargetGroup(activeBuildTargetGroup); - string assetPath = AssetDatabase.GetAssetPath(volumeProfile); - Debug.LogWarning($"HDRP Build Validation [{volumeProfile.name}]- Volumetric Fog: Density Cutoff = 0 with Tier > Low for {namedBuildTarget.TargetName}. This may impact performance. It is recommended to use Density cutoff when using Medium/High tier.\nAsset: {assetPath}", volumeProfile); + var validationSettings = HDProjectSettings.validationSettings; + VolumetricClouds defaultClouds = HDEditorUtils.GetVolumeComponentDefaultState(); + + bool useDefaultEnabled = !clouds.enable.overrideState; + bool effectiveEnabled = useDefaultEnabled + ? (defaultClouds?.enable.value ?? false) + : clouds.enable.value; + + if (effectiveEnabled && !validationSettings.k_VolumetricClouds_Recommended) + { + Debug.LogWarning($"HDRP Build Validation [{volumeProfile.name}] - {HDRenderPipelineUI.Styles.volumetricCloudsSubTitle.text}: Enabled for the active platform. {HDRenderPipelineUI.Styles.featureNotRecommendedWarning}\nAsset: {AssetDatabase.GetAssetPath(volumeProfile)}", volumeProfile); + return false; + } } - return !(foundFogHighSetting || fogNeedsDensityCutoff); + return true; } - var currentBuildTarget = EditorUserBuildSettings.activeBuildTarget; - - // Only validate for Switch 2 - if (currentBuildTarget != BuildTarget.Switch2) + if (!EditorGraphicsSettings.ShouldValidateGraphicsForActiveBuildTarget()) return true; // Check default volume profile from HDRP Global Settings bool isValidConfiguration = true; - bool foundFogHighSetting = false; - bool fogNeedsDensityCutoff = false; var defaultVolumeProfileSettings = GraphicsSettings.GetRenderPipelineSettings(); if (defaultVolumeProfileSettings?.volumeProfile != null) { - isValidConfiguration &= CheckVolumeProfileValid(defaultVolumeProfileSettings.volumeProfile, ref foundFogHighSetting, ref fogNeedsDensityCutoff); + isValidConfiguration &= CheckVolumeProfileValid(defaultVolumeProfileSettings.volumeProfile); } // Check volume profiles in each HDRP asset - if (!foundFogHighSetting || !fogNeedsDensityCutoff) + foreach (var hdrpAsset in assetsList) { - foreach (var hdrpAsset in assetsList) + if (hdrpAsset != null && hdrpAsset.volumeProfile != null) { - if (hdrpAsset != null && hdrpAsset.volumeProfile != null) - { - if (foundFogHighSetting || fogNeedsDensityCutoff) - break; + isValidConfiguration &= CheckVolumeProfileValid(hdrpAsset.volumeProfile, hdrpAsset); + } + } + + return isValidConfiguration; + } + + internal static bool ValidateHighQualityLineRenderingConfiguration(List assetsList) + { + static bool CheckVolumeProfileValid(VolumeProfile volumeProfile, HDRenderPipelineAsset hdAsset = null) + { + if (volumeProfile.TryGet(out var hqLines)) + { + var validationSettings = HDProjectSettings.validationSettings; + var defaultHQLines = HDEditorUtils.GetVolumeComponentDefaultState(); - isValidConfiguration &= CheckVolumeProfileValid(hdrpAsset.volumeProfile, ref foundFogHighSetting, ref fogNeedsDensityCutoff); + // Get effective enable value (use local if overridden, otherwise use default) + bool useDefaultEnable = !hqLines.enable.overrideState; + bool effectiveEnable = useDefaultEnable + ? (defaultHQLines?.enable.value ?? false) + : hqLines.enable.value; + + bool warningsFound = false; + if (effectiveEnable && !validationSettings.k_HighQualityLineRendering_Recommended) + { + Debug.LogWarning($"HDRP Build Validation [{volumeProfile.name}] - {HDRenderPipelineUI.Styles.highQualityLineRenderingSubTitle.text}: Enabled for the active platform. {HDRenderPipelineUI.Styles.featureNotRecommendedWarning}\nAsset: {AssetDatabase.GetAssetPath(volumeProfile)}", volumeProfile); + warningsFound = true; } + + return !warningsFound; + } + + return true; + } + + if (!EditorGraphicsSettings.ShouldValidateGraphicsForActiveBuildTarget()) + return true; + + // Check default volume profile from HDRP Global Settings + bool isValidConfiguration = true; + var defaultVolumeProfileSettings = GraphicsSettings.GetRenderPipelineSettings(); + if (GraphicsSettings.defaultRenderPipeline is HDRenderPipelineAsset hdAsset && defaultVolumeProfileSettings?.volumeProfile != null) + { + isValidConfiguration &= CheckVolumeProfileValid(defaultVolumeProfileSettings.volumeProfile, hdAsset); + } + + // Check volume profiles in each HDRP asset + foreach (var hdrpAsset in assetsList) + { + if (hdrpAsset != null && hdrpAsset.volumeProfile != null) + { + isValidConfiguration &= CheckVolumeProfileValid(hdrpAsset.volumeProfile, hdrpAsset); + } + } + + return isValidConfiguration; + } + + internal static bool ValidateGraphicsCompositorConfiguration(List assetsList) + { + static bool CheckHDAssetValid(HDRenderPipelineAsset hdAsset) + { + var validationSettings = HDProjectSettings.validationSettings; + if (!validationSettings.k_GraphicsCompositor_Recommended && hdAsset.compositorCustomVolumeComponentsList.Count > 0) + { + Debug.LogWarning($"HDRP Build Validation - Graphics Compositor: Enabled for the active platform. {HDRenderPipelineUI.Styles.featureNotRecommendedWarning}\nGo to Window -> Rendering -> Graphics Compositor to disable."); + return false; + } + + return true; + } + + if (!EditorGraphicsSettings.ShouldValidateGraphicsForActiveBuildTarget()) + return true; + + // Check default volume profile from HDRP Global Settings + bool isValidConfiguration = true; + foreach (var hdrpAsset in assetsList) + { + if (hdrpAsset != null) + { + isValidConfiguration &= CheckHDAssetValid(hdrpAsset); } } diff --git a/Packages/com.unity.render-pipelines.high-definition/Editor/Compositor/CompositionManagerEditor.cs b/Packages/com.unity.render-pipelines.high-definition/Editor/Compositor/CompositionManagerEditor.cs index 5ac345d7837..3ff82d25d9e 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Editor/Compositor/CompositionManagerEditor.cs +++ b/Packages/com.unity.render-pipelines.high-definition/Editor/Compositor/CompositionManagerEditor.cs @@ -146,6 +146,13 @@ public override void OnInspectorGUI() } m_SerializedProperties.Update(); + if (EditorGraphicsSettings.ShouldValidateGraphicsForActiveBuildTarget()) + { + var validationSettings = HDProjectSettings.validationSettings; + if (!validationSettings.k_GraphicsCompositor_Recommended && m_compositionManager.enableInternal) + HDEditorUtils.ShowFeatureOptimisationWarning("Graphics Compositor"); + } + m_EnablePreview = EditorGUILayout.Toggle(Styles.k_EnablePreview, m_compositionManager.enableOutput); { m_compositionManager.enableOutput = m_EnablePreview; diff --git a/Packages/com.unity.render-pipelines.high-definition/Editor/Lighting/VolumetricClouds/VolumetricCloudsEditor.cs b/Packages/com.unity.render-pipelines.high-definition/Editor/Lighting/VolumetricClouds/VolumetricCloudsEditor.cs index 9dd5d931daf..c0680af4cc8 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Editor/Lighting/VolumetricClouds/VolumetricCloudsEditor.cs +++ b/Packages/com.unity.render-pipelines.high-definition/Editor/Lighting/VolumetricClouds/VolumetricCloudsEditor.cs @@ -519,9 +519,35 @@ public override void OnInspectorGUI() } using var disableScope = new EditorGUI.DisabledScope(notSupported); + if (EditorGraphicsSettings.ShouldValidateGraphicsForActiveBuildTarget()) + { + var validationSettings = HDProjectSettings.validationSettings; + VolumetricClouds defaultClouds = HDEditorUtils.GetVolumeComponentDefaultState(); + + // Get effective quality value (use local if overridden, otherwise use default) + bool useDefaultEnabled = !m_Enable.overrideState.boolValue; + bool effectiveEnabled = useDefaultEnabled + ? (defaultClouds?.enable.value ?? false) + : m_Enable.value.boolValue; + + if (effectiveEnabled && !validationSettings.k_VolumetricClouds_Recommended) + { + if (useDefaultEnabled && HDEditorUtils.TryGetVolumeParameterSource( + clouds => clouds.enable.overrideState && clouds.enable.value == effectiveEnabled, + out var sourceProfile, + out var sourceName)) + { + HDEditorUtils.ShowFeatureOptimisationWarning(HDRenderPipelineUI.Styles.volumetricCloudsSubTitle.text, sourceName, () => Selection.activeObject = sourceProfile); + } + else + { + HDEditorUtils.ShowFeatureOptimisationWarning(HDRenderPipelineUI.Styles.volumetricCloudsSubTitle.text); + } + } + } + EditorGUILayout.LabelField("General", EditorStyles.miniLabel); PropertyField(m_Enable, EditorGUIUtility.TrTextContent("State")); - if (m_Enable.value.boolValue && !notSupported) HDEditorUtils.EnsureFrameSetting(FrameSettingsField.VolumetricClouds); diff --git a/Packages/com.unity.render-pipelines.high-definition/Editor/PostProcessing/FilmGrainEditor.cs b/Packages/com.unity.render-pipelines.high-definition/Editor/PostProcessing/FilmGrainEditor.cs index 2c726eb732f..1b97295abc5 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Editor/PostProcessing/FilmGrainEditor.cs +++ b/Packages/com.unity.render-pipelines.high-definition/Editor/PostProcessing/FilmGrainEditor.cs @@ -25,31 +25,35 @@ public override void OnInspectorGUI() { HDEditorUtils.EnsureFrameSetting(FrameSettingsField.FilmGrain); - var defaultFilmGrain = HDEditorUtils.GetVolumeComponentDefaultState(); + if (EditorGraphicsSettings.ShouldValidateGraphicsForActiveBuildTarget()) + { + var defaultFilmGrain = HDEditorUtils.GetVolumeComponentDefaultState(); - // Get effective intensity value (use local if overridden, otherwise use default) - bool useDefaultIntensity = !m_Intensity.overrideState.boolValue; - float effectiveIntensity = useDefaultIntensity - ? (defaultFilmGrain?.intensity.value ?? -1.0f) - : m_Intensity.value.floatValue; + // Get effective intensity value (use local if overridden, otherwise use default) + bool useDefaultIntensity = !m_Intensity.overrideState.boolValue; + float effectiveIntensity = useDefaultIntensity + ? (defaultFilmGrain?.intensity.value ?? 0.0f) + : m_Intensity.value.floatValue; - if (effectiveIntensity > 0.0f) - { - using (new IndentLevelScope()) + var validationSettings = HDProjectSettings.validationSettings; + if (effectiveIntensity > 0.0f && !validationSettings.k_FilmGrain_Recommended) { - if (useDefaultIntensity && HDEditorUtils.TryGetVolumeParameterSource( - filmGrain => filmGrain.intensity.overrideState && filmGrain.intensity.value > 0.0f, - out var sourceProfile, - out var sourceName)) - { - HDEditorUtils.ShowPlatformPerformanceWarning(BuildTarget.Switch2, "Film Grain", sourceName, () => Selection.activeObject = sourceProfile); - } - else + using (new IndentLevelScope()) { - HDEditorUtils.ShowPlatformPerformanceWarning(BuildTarget.Switch2, "Film Grain"); + if (useDefaultIntensity && HDEditorUtils.TryGetVolumeParameterSource( + filmGrain => filmGrain.intensity.overrideState && filmGrain.intensity.value > 0.0f, + out var sourceProfile, + out var sourceName)) + { + HDEditorUtils.ShowFeatureOptimisationWarning("Film Grain", sourceName, () => Selection.activeObject = sourceProfile); + } + else + { + HDEditorUtils.ShowFeatureOptimisationWarning("Film Grain"); + } } - } - } + + } } PropertyField(m_Type); diff --git a/Packages/com.unity.render-pipelines.high-definition/Editor/RenderPipeline/HDEditorUtils.cs b/Packages/com.unity.render-pipelines.high-definition/Editor/RenderPipeline/HDEditorUtils.cs index 7ac1a63403b..da77652339d 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Editor/RenderPipeline/HDEditorUtils.cs +++ b/Packages/com.unity.render-pipelines.high-definition/Editor/RenderPipeline/HDEditorUtils.cs @@ -498,18 +498,11 @@ internal static bool TryGetVolumeParameterSource( /// /// Shows a platform-specific performance warning help box for a given feature. /// - /// The build target platform to check and display /// The name of the feature (e.g., "Ray Tracing", "Film Grain") /// Optional recommendation text. If null, uses default "is not recommended for this platform" - internal static void ShowPlatformPerformanceWarning(BuildTarget targetPlatform, string featureName, string recommendation = null) + internal static void ShowFeatureOptimisationWarning(string featureName, string recommendation = null) { - if (EditorUserBuildSettings.activeBuildTarget != targetPlatform) - return; - - var activeBuildTargetGroup = BuildPipeline.GetBuildTargetGroup(targetPlatform); - var namedBuildTarget = NamedBuildTarget.FromBuildTargetGroup(activeBuildTargetGroup); - - string message = $"{featureName} is enabled for {namedBuildTarget.TargetName}. "; + string message = $"{featureName} is enabled for the active platform.\n"; if (!string.IsNullOrEmpty(recommendation)) { @@ -517,7 +510,7 @@ internal static void ShowPlatformPerformanceWarning(BuildTarget targetPlatform, } else { - message += "\nThis may significantly impact performance and is not recommended for this platform."; + message += HDRenderPipelineUI.Styles.featureNotRecommendedWarning; } EditorGUILayout.HelpBox(message, MessageType.Warning, wide: true); @@ -526,18 +519,11 @@ internal static void ShowPlatformPerformanceWarning(BuildTarget targetPlatform, /// /// Shows a platform-specific performance warning help box for a given feature. /// - /// The build target platform to check and display /// The name of the feature (e.g., "Ray Tracing", "Film Grain") /// Optional recommendation text. If null, uses default "is not recommended for this platform" - internal static void ShowPlatformPerformanceWarning(BuildTarget targetPlatform, string featureName, string sourceAssetName, Action onButtonClicked, string recommendation = null) + internal static void ShowFeatureOptimisationWarning(string featureName, string sourceAssetName, Action onButtonClicked, string recommendation = null) { - if (EditorUserBuildSettings.activeBuildTarget != targetPlatform) - return; - - var activeBuildTargetGroup = BuildPipeline.GetBuildTargetGroup(targetPlatform); - var namedBuildTarget = NamedBuildTarget.FromBuildTargetGroup(activeBuildTargetGroup); - - string message = $"{featureName} is enabled in {sourceAssetName} for {namedBuildTarget.TargetName}. "; + string message = $"{featureName} is enabled in {sourceAssetName} for the active platform.\n"; if (!string.IsNullOrEmpty(recommendation)) { @@ -545,7 +531,7 @@ internal static void ShowPlatformPerformanceWarning(BuildTarget targetPlatform, } else { - message += "\nThis may significantly impact performance and is not recommended for this platform."; + message += HDRenderPipelineUI.Styles.featureNotRecommendedWarning; } CoreEditorUtils.DrawFixMeBox( @@ -555,54 +541,32 @@ internal static void ShowPlatformPerformanceWarning(BuildTarget targetPlatform, onButtonClicked); } - /// - /// Shows a platform-specific performance warning help box for a given setting within a feature. - /// - /// The build target platform to check and display - /// The name of the setting responsible for the performance warning in the feature. - /// The current value of the setting. - /// Optional recommendation text. If null, uses default "is not recommended for this platform" - internal static void ShowPlatformParameterPerformanceWarning(BuildTarget targetPlatform, string settingName, string settingValue, string recommendation = null) + internal static void ShowFeatureParameterOptimisationWarning(string settingName, string settingValue, string recommendation = null) { - if (EditorUserBuildSettings.activeBuildTarget != targetPlatform) - return; - - var activeBuildTargetGroup = BuildPipeline.GetBuildTargetGroup(targetPlatform); - var namedBuildTarget = NamedBuildTarget.FromBuildTargetGroup(activeBuildTargetGroup).TargetName; + EditorGUILayout.HelpBox(CreateParameterWarningMessage(settingName, settingValue, null, recommendation), MessageType.Warning, wide: true); + } - string message = $"{settingName}: {settingValue} is used for {namedBuildTarget}."; + internal static void ShowFeatureParameterOptimisationWarning(string settingName, string settingValue, string sourceAssetName, Action onButtonClicked, string recommendation = null) + { + CoreEditorUtils.DrawFixMeBox( + CreateParameterWarningMessage(settingName, settingValue, sourceAssetName, recommendation), + MessageType.Warning, + "Open", + onButtonClicked); + } - if (!string.IsNullOrEmpty(recommendation)) + internal static string CreateParameterWarningMessage(string settingName, string settingValue, string sourceAssetName = null, string recommendation = null) + { + string message = $"{settingName}: {settingValue} "; + if (sourceAssetName != null) { - message += '\n' + recommendation; + message += $"is set in {sourceAssetName}."; } else { - message += "\nThis may significantly impact performance and is not recommended for this platform."; + message += $"is used for the active platform."; } - EditorGUILayout.HelpBox(message, MessageType.Warning, wide: true); - } - - /// - /// Shows a platform-specific performance warning help box for a given setting within a feature. - /// - /// The build target platform to check and display - /// The name of the setting responsible for the performance warning in the feature. - /// The current value of the setting. - /// The name of the asset responsible for producing this warning. - /// Action to perform when the helpbox button is clicked. - /// Optional recommendation text. If null, uses default "is not recommended for this platform" - internal static void ShowPlatformParameterPerformanceWarning(BuildTarget targetPlatform, string settingName, string settingValue, string sourceAssetName, Action onButtonClicked, string recommendation = null) - { - if (EditorUserBuildSettings.activeBuildTarget != targetPlatform) - return; - - var activeBuildTargetGroup = BuildPipeline.GetBuildTargetGroup(targetPlatform); - var namedBuildTarget = NamedBuildTarget.FromBuildTargetGroup(activeBuildTargetGroup).TargetName; - - string message = $"{settingName}: {settingValue} is set in {sourceAssetName} for {namedBuildTarget}. "; - if (!string.IsNullOrEmpty(recommendation)) { message += '\n' + recommendation; @@ -612,11 +576,7 @@ internal static void ShowPlatformParameterPerformanceWarning(BuildTarget targetP message += $"\nThis may impact performance and is not recommended for this platform."; } - CoreEditorUtils.DrawFixMeBox( - message, - MessageType.Warning, - "Open", - onButtonClicked); + return message; } internal static bool IsInTestSuiteOrBatchMode() diff --git a/Packages/com.unity.render-pipelines.high-definition/Editor/RenderPipeline/HDRenderPipelineUI.Skin.cs b/Packages/com.unity.render-pipelines.high-definition/Editor/RenderPipeline/HDRenderPipelineUI.Skin.cs index 6c559747363..b530c5a6edd 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Editor/RenderPipeline/HDRenderPipelineUI.Skin.cs +++ b/Packages/com.unity.render-pipelines.high-definition/Editor/RenderPipeline/HDRenderPipelineUI.Skin.cs @@ -422,6 +422,10 @@ int CountBits(uint bitMask) // System.Numerics.BitOperations not available public static GUIContent staticBatchingInfoMessage = EditorGUIUtility.TrTextContent("Static Batching is not recommended when using GPU draw submission modes, performance may improve if Static Batching is disabled in Player Settings."); + public static readonly string featureNotRecommendedWarning = L10n.Tr("This feature is not optimized for this platform, enabling it may affect performance."); + public static readonly string maxFogBudgetWarning = L10n.Tr("The maximum recommended fog budget for this platform is {0}. Lower the tier to stay under this for optimal performance."); + public static readonly string minFogBudgetForDensityCutoffWarning = L10n.Tr("It is recommended to use Density cutoff when the fog budget is > {0} for this platform."); + public const string memoryDrawback = "Adds GPU memory"; public const string shaderVariantDrawback = "Adds Shader Variants"; public const string lotShaderVariantDrawback = "Adds multiple Shader Variants"; diff --git a/Packages/com.unity.render-pipelines.high-definition/Editor/RenderPipeline/HDRenderPipelineUI.cs b/Packages/com.unity.render-pipelines.high-definition/Editor/RenderPipeline/HDRenderPipelineUI.cs index 7f29c53659a..5b0305dcca8 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Editor/RenderPipeline/HDRenderPipelineUI.cs +++ b/Packages/com.unity.render-pipelines.high-definition/Editor/RenderPipeline/HDRenderPipelineUI.cs @@ -230,6 +230,13 @@ static void Drawer_Volumetric(SerializedHDRenderPipelineAsset serialized, Editor } EditorGUILayout.PropertyField(serialized.renderPipelineSettings.supportVolumetricClouds, Styles.supportVolumetricCloudsContent); + + if (EditorGraphicsSettings.ShouldValidateGraphicsForActiveBuildTarget()) + { + var validationSettings = HDProjectSettings.validationSettings; + if (serialized.renderPipelineSettings.supportVolumetricClouds.boolValue && !validationSettings.k_VolumetricClouds_Recommended) + HDEditorUtils.ShowFeatureOptimisationWarning(Styles.volumetricCloudsSubTitle.text); + } } static void Drawer_SectionProbeVolume(SerializedHDRenderPipelineAsset serialized, Editor owner) @@ -1231,6 +1238,13 @@ static void Drawer_SectionHighQualityLineRenderingSettings(SerializedHDRenderPip { EditorGUILayout.PropertyField(serialized.renderPipelineSettings.supportHighQualityLineRendering, Styles.supportHighQualityLineRenderingContent); + if (EditorGraphicsSettings.ShouldValidateGraphicsForActiveBuildTarget()) + { + var validationSettings = HDProjectSettings.validationSettings; + if (serialized.renderPipelineSettings.supportHighQualityLineRendering.boolValue && !validationSettings.k_HighQualityLineRendering_Recommended) + HDEditorUtils.ShowFeatureOptimisationWarning(Styles.highQualityLineRenderingSubTitle.text); + } + ++EditorGUI.indentLevel; using (new EditorGUI.DisabledScope(!serialized.renderPipelineSettings.supportHighQualityLineRendering.boolValue)) { @@ -1448,7 +1462,7 @@ static void DrawSSGIQualitySetting(SerializedHDRenderPipelineAsset serialized, i internal static void DisplayRayTracingSupportBox() { var currentBuildTarget = EditorUserBuildSettings.activeBuildTarget; - if (HDRenderPipeline.PlatformHasRaytracingIssues(currentBuildTarget, out var warning)) + if (HDRenderPipeline.CheckPlatformRaytracingCompatability(currentBuildTarget, out var warning)) { EditorGUILayout.HelpBox(warning, MessageType.Warning); } @@ -1604,8 +1618,13 @@ static void Drawer_SectionMaterialUnsorted(SerializedHDRenderPipelineAsset seria EditorGUILayout.PropertyField(serialized.renderPipelineSettings.supportDistortion, Styles.supportDistortion); EditorGUILayout.PropertyField(serialized.renderPipelineSettings.supportSubsurfaceScattering, Styles.supportedSSSContent); - if (serialized.renderPipelineSettings.supportSubsurfaceScattering.boolValue) - HDEditorUtils.ShowPlatformPerformanceWarning(BuildTarget.Switch2, "Subsurface Scattering"); + + if (EditorGraphicsSettings.ShouldValidateGraphicsForActiveBuildTarget()) + { + var validationSettings = HDProjectSettings.validationSettings; + if (serialized.renderPipelineSettings.supportSubsurfaceScattering.boolValue && !validationSettings.k_SubsurfaceScattering_Recommended) + HDEditorUtils.ShowFeatureOptimisationWarning(Styles.supportedSSSContent.text); + } using (new EditorGUI.DisabledScope(serialized.renderPipelineSettings.supportSubsurfaceScattering.hasMultipleDifferentValues || !serialized.renderPipelineSettings.supportSubsurfaceScattering.boolValue)) diff --git a/Packages/com.unity.render-pipelines.high-definition/Editor/RenderPipeline/LineRendering/HDRenderPipeline.LineRendering.VolumeComponentEditor.cs b/Packages/com.unity.render-pipelines.high-definition/Editor/RenderPipeline/LineRendering/HDRenderPipeline.LineRendering.VolumeComponentEditor.cs index f7589519388..39dc98ae9eb 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Editor/RenderPipeline/LineRendering/HDRenderPipeline.LineRendering.VolumeComponentEditor.cs +++ b/Packages/com.unity.render-pipelines.high-definition/Editor/RenderPipeline/LineRendering/HDRenderPipeline.LineRendering.VolumeComponentEditor.cs @@ -43,6 +43,36 @@ public override void OnInspectorGUI() } using var disableScope = new EditorGUI.DisabledScope(notSupported); + if (!notSupported && EditorGraphicsSettings.ShouldValidateGraphicsForActiveBuildTarget()) + { + var validationSettings = HDProjectSettings.validationSettings; + var defaultHQLines = HDEditorUtils.GetVolumeComponentDefaultState(); + + // Get effective enable value (use local if overridden, otherwise use default) + bool useDefaultEnable = !m_Enable.overrideState.boolValue; + bool effectiveEnable = useDefaultEnable + ? (defaultHQLines?.enable.value ?? false) + : m_Enable.value.boolValue; + + if (effectiveEnable && !validationSettings.k_HighQualityLineRendering_Recommended) + { + using (new IndentLevelScope()) + { + if (useDefaultEnable && HDEditorUtils.TryGetVolumeParameterSource( + hqLines => hqLines.enable.overrideState && hqLines.enable.value == effectiveEnable, + out var sourceProfile, + out var sourceName)) + { + HDEditorUtils.ShowFeatureOptimisationWarning(HDRenderPipelineUI.Styles.highQualityLineRenderingSubTitle.text, sourceName, () => Selection.activeObject = sourceProfile); + } + else + { + HDEditorUtils.ShowFeatureOptimisationWarning(HDRenderPipelineUI.Styles.highQualityLineRenderingSubTitle.text); + } + } + } + } + EditorGUILayout.LabelField("General", EditorStyles.miniLabel); PropertyField(m_Enable); PropertyField(m_CompositionMode); diff --git a/Packages/com.unity.render-pipelines.high-definition/Editor/RenderPipeline/VolumeComponentWithQualityEditor.cs b/Packages/com.unity.render-pipelines.high-definition/Editor/RenderPipeline/VolumeComponentWithQualityEditor.cs index b908537351e..3d06e72db2f 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Editor/RenderPipeline/VolumeComponentWithQualityEditor.cs +++ b/Packages/com.unity.render-pipelines.high-definition/Editor/RenderPipeline/VolumeComponentWithQualityEditor.cs @@ -130,7 +130,7 @@ public void Dispose() // We use ConditionalWeakTable instead of a Dictionary of InstanceIDs to get automatic clean-up of dead entries in the table static ConditionalWeakTable s_CustomSettingsHistory = new ConditionalWeakTable(); - static readonly int k_CustomQuality = ScalableSettingLevelParameter.LevelCount; + protected static readonly int k_CustomQuality = ScalableSettingLevelParameter.LevelCount; public override void OnEnable() { @@ -210,6 +210,7 @@ public override void OnInspectorGUI() protected bool useCustomValue => m_QualitySetting.value.intValue == k_CustomQuality; protected bool overrideState => m_QualitySetting.overrideState.boolValue; + protected int value => m_QualitySetting.value.intValue; /// /// This utility can be used to copy a value into a volume component setting visible in the inspector. diff --git a/Packages/com.unity.render-pipelines.high-definition/Editor/RenderPipelineResources/HDProjectSettings.cs b/Packages/com.unity.render-pipelines.high-definition/Editor/RenderPipelineResources/HDProjectSettings.cs index 6c17dde14c7..de5c93259e2 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Editor/RenderPipelineResources/HDProjectSettings.cs +++ b/Packages/com.unity.render-pipelines.high-definition/Editor/RenderPipelineResources/HDProjectSettings.cs @@ -99,6 +99,43 @@ public static long pluginSubTargetLastSeenSubTargetVersionsSum get => instance.m_InProjectPluginLastSeenSubTargetVersionsSum; } + // This is a class of performance thresholds/recommendations used in HDRP to validate graphics settings for certain platforms. + // This needs to be refactored into a per-platform data-driven object. + internal class GraphicsValidationSettings + { + // Volumetric Fog + public readonly float k_Fog_MaximumFogBudget = 0.35f; + public readonly float k_Fog_MinimumFogBudgetForCutoff = 0.2f; + + // Volumetric Clouds + public readonly bool k_VolumetricClouds_Recommended = false; + + // Film Grain + public readonly bool k_FilmGrain_Recommended = false; + + // Water + public readonly bool k_Water_TessellationRecommended = false; + public readonly bool k_Water_ScriptInteractionsRecommended = false; + public readonly bool k_Water_CausticsRecommended = false; + public readonly bool k_Water_UnderwaterRecommended = false; + + // Subsurface Scattering + public readonly bool k_SubsurfaceScattering_Recommended = false; + + // High Quality Line Rendering + public readonly bool k_HighQualityLineRendering_Recommended = false; + + // Graphics Compositor + public readonly bool k_GraphicsCompositor_Recommended = false; + } + + [NonSerialized] + private GraphicsValidationSettings m_GraphicsValidationSettings = new GraphicsValidationSettings(); + internal static GraphicsValidationSettings validationSettings + { + get { return instance.m_GraphicsValidationSettings; } + } + //singleton pattern static HDProjectSettings instance { diff --git a/Packages/com.unity.render-pipelines.high-definition/Editor/Sky/AtmosphericScattering/FogEditor.cs b/Packages/com.unity.render-pipelines.high-definition/Editor/Sky/AtmosphericScattering/FogEditor.cs index 78fc1f35924..50156fb5bbf 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Editor/Sky/AtmosphericScattering/FogEditor.cs +++ b/Packages/com.unity.render-pipelines.high-definition/Editor/Sky/AtmosphericScattering/FogEditor.cs @@ -1,5 +1,4 @@ using UnityEngine; -using UnityEngine.Rendering; using UnityEngine.Rendering.HighDefinition; namespace UnityEditor.Rendering.HighDefinition @@ -28,7 +27,6 @@ class FogEditor : VolumeComponentWithQualityEditor protected SerializedDataParameter m_DepthExtent; protected SerializedDataParameter m_GlobalLightProbeDimmer; protected SerializedDataParameter m_SliceDistributionUniformity; - protected SerializedDataParameter m_VolumetricFogQuality; protected SerializedDataParameter m_FogControlMode; protected SerializedDataParameter m_ScreenResolutionPercentage; protected SerializedDataParameter m_VolumeSliceCount; @@ -72,7 +70,6 @@ public override void OnEnable() m_EnableVolumetricFog = Unpack(o.Find(x => x.enableVolumetricFog)); m_DepthExtent = Unpack(o.Find(x => x.depthExtent)); m_SliceDistributionUniformity = Unpack(o.Find(x => x.sliceDistributionUniformity)); - m_VolumetricFogQuality = Unpack(o.Find(x => x.quality)); m_FogControlMode = Unpack(o.Find(x => x.fogControlMode)); m_ScreenResolutionPercentage = Unpack(o.Find(x => x.screenResolutionPercentage)); m_VolumeSliceCount = Unpack(o.Find(x => x.volumeSliceCount)); @@ -141,28 +138,54 @@ public override void OnInspectorGUI() base.OnInspectorGUI(); // Quality Setting - var defaultFog = HDEditorUtils.GetVolumeComponentDefaultState(); + float effectiveFogBudget = 0.0f; + bool useDefaultDensityCutoff = !m_VolumetricLightingDensityCutoff.overrideState.boolValue; + float effectiveDensityCutoff = 0.0f; + if (EditorGraphicsSettings.ShouldValidateGraphicsForActiveBuildTarget()) + { + var validationSettings = HDProjectSettings.validationSettings; + Fog defaultFog = HDEditorUtils.GetVolumeComponentDefaultState(); - // Get effective quality value (use local if overridden, otherwise use default) - bool useDefaultQuality = !m_VolumetricFogQuality.overrideState.boolValue; - int effectiveQuality = useDefaultQuality - ? (defaultFog?.quality.value ?? -1) - : m_VolumetricFogQuality.value.intValue; + // Get effective quality value (use local if overridden, otherwise use default) + bool useDefaultQuality = !overrideState; + int effectiveQuality = useDefaultQuality + ? (defaultFog?.quality.value ?? 0) + : value; - if (effectiveQuality > 1) - { - using (new IndentLevelScope()) + effectiveDensityCutoff = useDefaultDensityCutoff + ? (defaultFog?.volumetricLightingDensityCutoff.value ?? 0.0f) + : m_VolumetricLightingDensityCutoff.value.floatValue; + + // Custom quality tier + if (effectiveQuality == k_CustomQuality) { - if (useDefaultQuality && HDEditorUtils.TryGetVolumeParameterSource( - fog => fog.quality.overrideState && fog.quality.value > 1, - out var sourceProfile, - out var sourceName)) - { - HDEditorUtils.ShowPlatformParameterPerformanceWarning(BuildTarget.Switch2, "Tier", "High", sourceName, () => Selection.activeObject = sourceProfile, "It is recommended to either disable Volumetric Fog, or lower the quality tier to Low/Medium"); - } - else + bool useDefaultFogBudget = !m_VolumetricFogBudget.overrideState.boolValue; + effectiveFogBudget = useDefaultFogBudget + ? (defaultFog?.volumetricFogBudget ?? 0.0f) + : m_VolumetricFogBudget.value.floatValue; + } + else if (hdpipe != null) + { + effectiveFogBudget = hdpipe.currentPlatformRenderPipelineSettings.lightingQualitySettings.Fog_Budget[effectiveQuality]; + } + + if (effectiveFogBudget >= validationSettings.k_Fog_MaximumFogBudget && (FogControl)m_FogControlMode.value.intValue == FogControl.Balance) + { + using (new IndentLevelScope()) { - HDEditorUtils.ShowPlatformParameterPerformanceWarning(BuildTarget.Switch2, "Tier", "High", "It is recommended to either disable Volumetric Fog, or lower the quality tier to Low/Medium."); + string tierName = $"{(effectiveQuality == k_CustomQuality ? "Custom" : ((ScalableSettingLevelParameter.Level)effectiveQuality).ToString())} (Budget: {effectiveFogBudget})"; + string warningMessage = string.Format(HDRenderPipelineUI.Styles.maxFogBudgetWarning, validationSettings.k_Fog_MaximumFogBudget); + if (useDefaultQuality && HDEditorUtils.TryGetVolumeParameterSource( + fog => fog.quality.overrideState && fog.quality.value == effectiveQuality, + out var sourceProfile, + out var sourceName)) + { + HDEditorUtils.ShowFeatureParameterOptimisationWarning("Tier", tierName, sourceName, () => Selection.activeObject = sourceProfile, warningMessage); + } + else + { + HDEditorUtils.ShowFeatureParameterOptimisationWarning("Tier", tierName, warningMessage); + } } } } @@ -180,6 +203,11 @@ public override void OnInspectorGUI() } else { + if (EditorGraphicsSettings.ShouldValidateGraphicsForActiveBuildTarget()) + { + HDEditorUtils.ShowFeatureParameterOptimisationWarning(m_FogControlMode.displayName, ((FogControl)m_FogControlMode.value.intValue).ToString(), "Manual fog control mode can have performance impacts if misused, use with care."); + } + PropertyField(m_ScreenResolutionPercentage); PropertyField(m_VolumeSliceCount); } @@ -203,13 +231,7 @@ public override void OnInspectorGUI() } PropertyField(m_VolumetricLightingDensityCutoff); - // Get effective density cutoff value (use local if overridden, otherwise use default) - bool useDefaultDensityCutoff = !m_VolumetricLightingDensityCutoff.overrideState.boolValue; - float effectiveDensityCutoff = useDefaultDensityCutoff - ? (defaultFog?.volumetricLightingDensityCutoff.value ?? -1.0f) - : m_VolumetricLightingDensityCutoff.value.floatValue; - - if (effectiveDensityCutoff > 0.0f) + if (m_VolumetricLightingDensityCutoff.value.floatValue > 0.0f) { using (new IndentLevelScope()) { @@ -217,10 +239,13 @@ public override void OnInspectorGUI() EditorGUILayout.HelpBox($"The current minimum density for the fog is {currentMinExtinction:F3} (calculated from the Fog Distance).", MessageType.Info, wide: true); } } - else + + if (EditorGraphicsSettings.ShouldValidateGraphicsForActiveBuildTarget()) { - if (effectiveQuality > 0) + var validationSettings = HDProjectSettings.validationSettings; + if (effectiveDensityCutoff <= 0.0f && effectiveFogBudget >= validationSettings.k_Fog_MinimumFogBudgetForCutoff && (FogControl)m_FogControlMode.value.intValue == FogControl.Balance) { + string warningMessage = string.Format(HDRenderPipelineUI.Styles.minFogBudgetForDensityCutoffWarning, validationSettings.k_Fog_MinimumFogBudgetForCutoff); using (new IndentLevelScope()) { if (useDefaultDensityCutoff && HDEditorUtils.TryGetVolumeParameterSource( @@ -228,17 +253,18 @@ public override void OnInspectorGUI() out var sourceProfile, out var sourceName)) { - HDEditorUtils.ShowPlatformParameterPerformanceWarning(BuildTarget.Switch2, "Density Cutoff", "0", sourceName, () => Selection.activeObject = sourceProfile, "It is recommended to use Density cutoff when using Medium/High tier."); + HDEditorUtils.ShowFeatureParameterOptimisationWarning("Density Cutoff", "0", sourceName, () => Selection.activeObject = sourceProfile, warningMessage); } else { - HDEditorUtils.ShowPlatformParameterPerformanceWarning(BuildTarget.Switch2, "Density Cutoff", "0", "It is recommended to use Density cutoff when using Medium/High tier."); + HDEditorUtils.ShowFeatureParameterOptimisationWarning("Density Cutoff", "0", warningMessage); } } } } } } + PropertyField(m_MultipleScatteringIntensity); } diff --git a/Packages/com.unity.render-pipelines.high-definition/Editor/Water/WaterSurface/WaterSurfaceEditor.Appearance.cs b/Packages/com.unity.render-pipelines.high-definition/Editor/Water/WaterSurface/WaterSurfaceEditor.Appearance.cs index f189fa99855..11104820697 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Editor/Water/WaterSurface/WaterSurfaceEditor.Appearance.cs +++ b/Packages/com.unity.render-pipelines.high-definition/Editor/Water/WaterSurface/WaterSurfaceEditor.Appearance.cs @@ -231,10 +231,18 @@ static internal void WaterSurfaceAppearanceSection(WaterSurfaceEditor serialized // Caustics using (new BoldLabelScope()) EditorGUILayout.PropertyField(serialized.m_Caustics, k_Caustics); - if (serialized.m_Caustics.boolValue) + + using (new IndentLevelScope()) { - using (new IndentLevelScope()) + if (serialized.m_Caustics.boolValue) { + if (EditorGraphicsSettings.ShouldValidateGraphicsForActiveBuildTarget()) + { + var validationSettings = HDProjectSettings.validationSettings; + if (!validationSettings.k_Water_CausticsRecommended) + HDEditorUtils.ShowFeatureOptimisationWarning(k_Caustics.text); + } + EditorGUILayout.PropertyField(serialized.m_CausticsResolution); int bandCount = WaterSystem.EvaluateBandCount(surfaceType, serialized.m_Ripples.boolValue); @@ -306,6 +314,13 @@ static internal void WaterSurfaceAppearanceSection(WaterSurfaceEditor serialized { if (serialized.m_UnderWater.boolValue) { + if (EditorGraphicsSettings.ShouldValidateGraphicsForActiveBuildTarget()) + { + var validationSettings = HDProjectSettings.validationSettings; + if (!validationSettings.k_Water_UnderwaterRecommended) + HDEditorUtils.ShowFeatureOptimisationWarning(k_UnderWater.text); + } + // Bounds data if ((WaterGeometryType)serialized.m_GeometryType.enumValueIndex != WaterGeometryType.Infinite) { diff --git a/Packages/com.unity.render-pipelines.high-definition/Editor/Water/WaterSurface/WaterSurfaceEditor.cs b/Packages/com.unity.render-pipelines.high-definition/Editor/Water/WaterSurface/WaterSurfaceEditor.cs index bae8d83f2fc..6a5cbad91d5 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Editor/Water/WaterSurface/WaterSurfaceEditor.cs +++ b/Packages/com.unity.render-pipelines.high-definition/Editor/Water/WaterSurface/WaterSurfaceEditor.cs @@ -174,8 +174,16 @@ static internal void WaterSurfaceGeneralSection(WaterSurfaceEditor serialized, E using (new EditorGUI.IndentLevelScope()) { + if (serialized.m_ScriptInteractions.boolValue) { + if (EditorGraphicsSettings.ShouldValidateGraphicsForActiveBuildTarget()) + { + var validationSettings = HDProjectSettings.validationSettings; + if (!validationSettings.k_Water_ScriptInteractionsRecommended) + HDEditorUtils.ShowFeatureOptimisationWarning(serialized.m_ScriptInteractions.displayName); + } + WaterSurfaceType surfaceType = (WaterSurfaceType)(serialized.m_SurfaceType.enumValueIndex); // Does the surface support ripples @@ -203,10 +211,18 @@ static internal void WaterSurfaceGeneralSection(WaterSurfaceEditor serialized, E using (new BoldLabelScope()) EditorGUILayout.PropertyField(serialized.m_Tessellation); - if (serialized.m_Tessellation.boolValue) + + using (new EditorGUI.IndentLevelScope()) { - using (new EditorGUI.IndentLevelScope()) + if (serialized.m_Tessellation.boolValue) { + if (EditorGraphicsSettings.ShouldValidateGraphicsForActiveBuildTarget()) + { + var validationSettings = HDProjectSettings.validationSettings; + if (!validationSettings.k_Water_TessellationRecommended) + HDEditorUtils.ShowFeatureOptimisationWarning(serialized.m_Tessellation.displayName); + } + EditorGUILayout.PropertyField(serialized.m_MaxTessellationFactor); if (AdvancedProperties.BeginGroup()) { @@ -295,18 +311,6 @@ public override void OnInspectorGUI() HDEditorUtils.EnsureVolume((WaterRendering water) => !water.enable.value ? "This Water Surface cannot render properly because Water Rendering override state property in the Volume System is either set to disabled or the current camera is currently not rendering." : null); HDEditorUtils.EnsureFrameSetting(FrameSettingsField.Water); - if (m_Tessellation.boolValue) - HDEditorUtils.ShowPlatformPerformanceWarning(BuildTarget.Switch2, "Water Tessellation"); - - if (m_ScriptInteractions.boolValue) - HDEditorUtils.ShowPlatformPerformanceWarning(BuildTarget.Switch2, "Water Script Interactions"); - - if (m_Caustics.boolValue) - HDEditorUtils.ShowPlatformPerformanceWarning(BuildTarget.Switch2, "Water Caustics"); - - if (m_UnderWater.boolValue) - HDEditorUtils.ShowPlatformPerformanceWarning(BuildTarget.Switch2, "Underwater"); - if (target is WaterSurface surface && surface.surfaceIndex == -1) { EditorGUILayout.HelpBox("Only up to 16 water surfaces are supported simultaneously. This surface will not be rendered.", MessageType.Warning); diff --git a/Packages/com.unity.render-pipelines.high-definition/Runtime/Lighting/AtmosphericScattering/Fog.cs b/Packages/com.unity.render-pipelines.high-definition/Runtime/Lighting/AtmosphericScattering/Fog.cs index f8b7eccc101..76013f03a6d 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Runtime/Lighting/AtmosphericScattering/Fog.cs +++ b/Packages/com.unity.render-pipelines.high-definition/Runtime/Lighting/AtmosphericScattering/Fog.cs @@ -127,6 +127,10 @@ public float volumetricFogBudget } set { m_VolumetricFogBudget.value = value; } } + + /// The override state for the fog component's volumetric fog budget. + public bool volumetricFogBudgetOverrideState => m_VolumetricFogBudget.overrideState; + [AdditionalProperty] [SerializeField, FormerlySerializedAs("volumetricFogBudget")] [Tooltip("Controls the performance to quality ratio of the volumetric fog. A value of 0 being the least resource-intensive and a value of 1 being the highest quality.")] diff --git a/Packages/com.unity.render-pipelines.high-definition/Runtime/Lighting/LightLoop/lightlistbuild.compute b/Packages/com.unity.render-pipelines.high-definition/Runtime/Lighting/LightLoop/lightlistbuild.compute index 9d5f151d8f7..ccf51274c66 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Runtime/Lighting/LightLoop/lightlistbuild.compute +++ b/Packages/com.unity.render-pipelines.high-definition/Runtime/Lighting/LightLoop/lightlistbuild.compute @@ -166,7 +166,13 @@ void TileLightListGen(uint3 dispatchThreadId : SV_DispatchThreadID, uint threadI prunedList[i] = 0; } +#if SHADER_API_SWITCH + // Only used for internal testing and not for scenes with large number of lights + // Currently when enabled this can trigger a hang on tests + bool lightSortRequired = false; +#else bool lightSortRequired = g_iNrVisibLights > LIGHT_LIST_MAX_COARSE_ENTRIES; // Uniform runtime branch +#endif if (lightSortRequired) { UNITY_UNROLLX(DIV_ROUND_UP(MAX_NR_VISIBLE_LIGHTS/32, NR_THREADS)) diff --git a/Packages/com.unity.render-pipelines.high-definition/Runtime/Lighting/Shadow/HDShadowManager.cs b/Packages/com.unity.render-pipelines.high-definition/Runtime/Lighting/Shadow/HDShadowManager.cs index 6bd98cc55c7..35d3b5b30ad 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Runtime/Lighting/Shadow/HDShadowManager.cs +++ b/Packages/com.unity.render-pipelines.high-definition/Runtime/Lighting/Shadow/HDShadowManager.cs @@ -911,9 +911,6 @@ internal int ReserveShadowResolutions(Vector2 resolution, ShadowMapType shadowMa { if (m_ShadowRequestCount >= m_MaxShadowRequests) { -#if UNITY_EDITOR || DEVELOPMENT_BUILD - Debug.LogWarning("Max shadow requests count reached, dropping all exceeding requests. You can increase this limit by changing the Maximum Shadows on Screen property in the HDRP asset."); -#endif return -1; } diff --git a/Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/HDRenderPipeline.cs b/Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/HDRenderPipeline.cs index 5fbe99a9563..a48c12f3948 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/HDRenderPipeline.cs +++ b/Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/HDRenderPipeline.cs @@ -2163,6 +2163,11 @@ internal void InternalRender(ScriptableRenderContext renderContext, List Render(renderContext, cameras); } +#if UNITY_EDITOR + // Throttle logging for WaterUpdate. Remove when FrameDebugger is improved to handle variable updates. + double m_RealtimeSinceLastLogUpdateWaterSurfaces; +#endif + /// /// RenderPipeline Render implementation. /// @@ -2229,15 +2234,36 @@ protected override void Render(ScriptableRenderContext renderContext, List 60.0) + { + Debug.Log("Water surface updates are disabled while the FrameDebugger is active. Some events might not be visible in the FrameDebugger."); + m_RealtimeSinceLastLogUpdateWaterSurfaces = Time.realtimeSinceStartupAsDouble; + } + } + else + { + m_RealtimeSinceLastLogUpdateWaterSurfaces = 0.0; // Make sure we notify on FrameDebugger activation. +#else + { +#endif + + // Update the water surfaces + var commandBuffer = CommandBufferPool.Get(""); + waterSystem.UpdateWaterSurfaces(commandBuffer); + renderContext.ExecuteCommandBuffer(commandBuffer); + renderContext.Submit(); + commandBuffer.Clear(); + CommandBufferPool.Release(commandBuffer); + } + } #if DEVELOPMENT_BUILD || UNITY_EDITOR @@ -3471,7 +3497,7 @@ void ReAllocateOffscreenUIColorBufferIfNeeded() } } } - + void AllocateCustomPassBuffers() { m_CustomPassColorBuffer = new Lazy(() => RTHandles.Alloc(Vector2.one, TextureXR.slices, dimension: TextureXR.dimension, colorFormat: GetCustomBufferFormat(), enableRandomWrite: true, useDynamicScale: true, name: "CustomPassColorBuffer")); diff --git a/Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/Raytracing/HDRaytracingManager.cs b/Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/Raytracing/HDRaytracingManager.cs index 3b9e55c7b89..74e56607fb3 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/Raytracing/HDRaytracingManager.cs +++ b/Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/Raytracing/HDRaytracingManager.cs @@ -938,7 +938,7 @@ static internal bool IsSupportedRayTracingTarget(BuildTarget buildTarget) return false; } - static internal bool PlatformHasRaytracingIssues(BuildTarget buildTarget, out string message) + static internal bool CheckPlatformRaytracingCompatability(BuildTarget buildTarget, out string message) { message = string.Empty; diff --git a/Packages/com.unity.render-pipelines.high-definition/Samples~/MaterialSamples/Scenes/Scene Resources/EyeSamplesDescriptions.json b/Packages/com.unity.render-pipelines.high-definition/Samples~/MaterialSamples/Scenes/Scene Resources/EyeSamplesDescriptions.json index 276879a5322..942e493a260 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Samples~/MaterialSamples/Scenes/Scene Resources/EyeSamplesDescriptions.json +++ b/Packages/com.unity.render-pipelines.high-definition/Samples~/MaterialSamples/Scenes/Scene Resources/EyeSamplesDescriptions.json @@ -4,7 +4,7 @@ Human eyeballs are about 25mm in diameter. The shader uses separate diffusion profiles for the iris and sclera. Scaling the eye changes how subsurface scattering looks. To get the same result for eyes of different sizes, duplicate the diffusion profiles and adjust the world scale, as shown in the 100mm eye example. -This Shader Graph is designed exclusively for this 3D model with an import scale factor of one. When working with different models or scale factors, you'll need to create a separate graph. In this example, we've chosen the 'EyeCinematicWithCaustics' type within the Material Type, located under Surface Options in the Graph Inspector. +This Shader Graph is designed exclusively for this 3D model with an import scale factor of one. When working with different models or scale factors, you'll need to create a separate graph. In this example, we've chosen the 'EyeCinematicWithCaustics' type within the Material Type, located under Surface Options in the Graph Inspector. """, "samples": [] diff --git a/Packages/com.unity.render-pipelines.universal/Editor/Tools/Converters/AnimationClipConverter/AnimationClipConverter.cs b/Packages/com.unity.render-pipelines.universal/Editor/Tools/Converters/AnimationClipConverter/AnimationClipConverter.cs index d2474e9b732..2c4ec3b1f90 100644 --- a/Packages/com.unity.render-pipelines.universal/Editor/Tools/Converters/AnimationClipConverter/AnimationClipConverter.cs +++ b/Packages/com.unity.render-pipelines.universal/Editor/Tools/Converters/AnimationClipConverter/AnimationClipConverter.cs @@ -106,14 +106,21 @@ void OnAnimationClipDependenciesSearchFinish() ( SearchServiceUtils.IndexingOptions.DeepSearch, query, - (searchItem, path) => + (item, path) => { - if (searchItem.ToObject() is not GameObject go || go.scene == null) + var unityObject = item.ToObject(); + + if (unityObject == null) return; - var gid = GlobalObjectId.GetGlobalObjectIdSlow(go); + var gid = GlobalObjectId.GetGlobalObjectIdSlow(unityObject); + int type = gid.identifierType; // 1=Asset, 2=SceneObject - var assetItem = new RenderPipelineConverterAssetItem(gid, go.scene.path); + var assetItem = new RenderPipelineConverterAssetItem(gid.ToString()) + { + name = unityObject.name, + info = path, + }; if (animatorUsingClip.TryGetValue(path, out var list)) { diff --git a/Packages/com.unity.render-pipelines.universal/Runtime/Debug/DebugDisplaySettingsRendering.cs b/Packages/com.unity.render-pipelines.universal/Runtime/Debug/DebugDisplaySettingsRendering.cs index 8f6360fea04..4fbafd82804 100644 --- a/Packages/com.unity.render-pipelines.universal/Runtime/Debug/DebugDisplaySettingsRendering.cs +++ b/Packages/com.unity.render-pipelines.universal/Runtime/Debug/DebugDisplaySettingsRendering.cs @@ -195,6 +195,20 @@ public enum TaaDebugMode /// public TaaDebugMode taaDebugMode { get; set; } = TaaDebugMode.None; + /// + /// Whether to block the Reflection Probe Atlas overlay display. Returns true when the relevant overlay is selected + /// but the main camera's rendering mode doesn't support displaying the Reflection Probe Atlas. + /// + internal bool blockReflectionProbeAtlasOverlay + { + get + { + var camera = Camera.main; + return camera == null ? false : + (fullScreenDebugMode == DebugFullScreenMode.ReflectionProbeAtlas && !(camera.GetUniversalAdditionalCameraData().scriptableRenderer as UniversalRenderer).usesClusterLightLoop); + } + } + /// /// Whether to block the STP overlay display. Returns true when STP debug mode is selected /// but the Universal Render Pipeline's upscaling filter is not set to STP. @@ -232,6 +246,7 @@ static class Strings public const string RangeValidationSettingsContainerName = "Pixel Range Settings"; public static readonly NameAndTooltip MapOverlays = new() { name = "Map Overlays", tooltip = "Overlays render pipeline textures to validate the scene." }; + public static readonly NameAndTooltip ReflectionProbeAtlasDebugWarning = new() { name = "Warning: Reflection Probe Atlas visualization not supported for the current rendering path. Switch to Forward+ or Deferred+ to use this debug view.", tooltip = "Switch to Forward+ or Deferred+ in the main camera's Universal Renderer Data asset to use this debug overlay." }; public static readonly NameAndTooltip StpDebugWarning = new() { name = "Warning: STP Overlay Not Active. Enable STP upscaling filter in the render pipeline asset to use these debug view.", tooltip = "Enable STP upscaling filter in the render pipeline asset to use these debug views." }; public static readonly NameAndTooltip StpDebugViews = new() { name = "STP Debug Views", tooltip = "Debug visualizations provided by STP." }; public static readonly NameAndTooltip MapSize = new() { name = "Map Size", tooltip = "Set the size of the render pipeline texture in the scene." }; @@ -300,9 +315,16 @@ internal static class WidgetFactory { children = { + new DebugUI.MessageBox + { + nameAndTooltip = Strings.ReflectionProbeAtlasDebugWarning, + style = DebugUI.MessageBox.Style.Warning, + isHiddenCallback = () => !data.blockReflectionProbeAtlasOverlay, + }, new DebugUI.IntField { nameAndTooltip = Strings.MapSize, + isHiddenCallback = () => data.blockReflectionProbeAtlasOverlay, getter = () => data.fullScreenDebugModeOutputSizeScreenPercent, setter = value => data.fullScreenDebugModeOutputSizeScreenPercent = value, incStep = 10, diff --git a/Packages/com.unity.render-pipelines.universal/Runtime/Passes/PostProcess/UberPostProcessPass.cs b/Packages/com.unity.render-pipelines.universal/Runtime/Passes/PostProcess/UberPostProcessPass.cs index 1f27b28e442..1c8beab4b0e 100644 --- a/Packages/com.unity.render-pipelines.universal/Runtime/Passes/PostProcess/UberPostProcessPass.cs +++ b/Packages/com.unity.render-pipelines.universal/Runtime/Passes/PostProcess/UberPostProcessPass.cs @@ -9,8 +9,15 @@ internal sealed class UberPostProcessPass : PostProcessPass Material m_Material; Texture2D[] m_FilmGrainTextures; + public enum FilteringOperation + { + Linear, + Point + } + Texture m_DitherTexture; RTHandle m_UserLut; + FilteringOperation m_FilteringOperation; HDROutputUtils.Operation m_HdrOperations; bool m_IsValid; bool m_IsFinalPass; @@ -28,6 +35,7 @@ public UberPostProcessPass(Shader shader, Texture2D[] filmGrainTextures) // Defaults m_DitherTexture = null; // Dither disabled. + m_FilteringOperation = FilteringOperation.Linear; // Common case. m_HdrOperations = HDROutputUtils.Operation.None; // HDR disabled. m_RequireSRGBConversionBlit = false; // sRGB conversion is typically automatic based on format. m_IsFinalPass = false; // Assume other passes. @@ -42,12 +50,14 @@ public override void Dispose() } public void Setup(Texture ditherTexture, + FilteringOperation filteringOperation, HDROutputUtils.Operation hdrOperations, bool requireSRGBConversionBlit, bool isFinalPass, bool renderOverlayUI) { m_DitherTexture = ditherTexture; + m_FilteringOperation = filteringOperation; m_HdrOperations = hdrOperations; m_RequireSRGBConversionBlit = requireSRGBConversionBlit; m_IsFinalPass = isFinalPass; @@ -64,6 +74,7 @@ private class UberPostPassData internal UniversalCameraData cameraData; internal Tonemapping tonemapping; + internal FilteringOperation filteringOperation; internal HDROutputUtils.Operation hdrOperations; internal bool isHdrGrading; @@ -166,6 +177,7 @@ public override void RecordRenderGraph(RenderGraph renderGraph, ContextContainer // HDR passData.tonemapping = tonemapping; + passData.filteringOperation = m_FilteringOperation; passData.hdrOperations = m_HdrOperations; passData.isHdrGrading = postProcessingData.gradingMode == ColorGradingMode.HighDynamicRange; @@ -187,10 +199,21 @@ public override void RecordRenderGraph(RenderGraph renderGraph, ContextContainer { var cameraData = data.cameraData; var material = data.material; + var filteringOperation = data.filteringOperation; // Reset keywords material.shaderKeywords = null; + switch (filteringOperation) + { + case FilteringOperation.Point: + material.EnableKeyword(ShaderKeywordStrings.PointSampling); + break; + case FilteringOperation.Linear: goto default; + default: + break; + } + data.lut.Apply(material); if (data.bloom.IsActive()) diff --git a/Packages/com.unity.render-pipelines.universal/Runtime/PostProcess.cs b/Packages/com.unity.render-pipelines.universal/Runtime/PostProcess.cs index a8614efdca3..95b181dedc0 100644 --- a/Packages/com.unity.render-pipelines.universal/Runtime/PostProcess.cs +++ b/Packages/com.unity.render-pipelines.universal/Runtime/PostProcess.cs @@ -213,8 +213,26 @@ public void RenderPostProcessing(RenderGraph renderGraph, ContextContainer frame hdrOperations = !hasFinalPass && enableColorEncodingIfNeeded ? HDROutputUtils.Operation.ColorEncoding : HDROutputUtils.Operation.None; } + UberPostProcessPass.FilteringOperation filteringOperation = UberPostProcessPass.FilteringOperation.Linear; + + // Point sampling is only used for upscaling so the default linear sampler should be used if there is a final pass + if (cameraData.imageScalingMode == ImageScalingMode.Upscaling && !hasFinalPass) + { +#if ENABLE_UPSCALER_FRAMEWORK + if (cameraData.resolvedUpscalerHash == UniversalRenderPipeline.k_UpscalerHash_Point) + { + filteringOperation = UberPostProcessPass.FilteringOperation.Point; + } +#else + if (cameraData.upscalingFilter == ImageUpscalingFilter.Point) + { + filteringOperation = UberPostProcessPass.FilteringOperation.Point; + } +#endif + } + bool renderOverlayUI = requireHDROutput && enableColorEncodingIfNeeded; - m_UberPass.Setup(ditherTexture, hdrOperations, applySrgbEncoding, !hasFinalPass, renderOverlayUI); + m_UberPass.Setup(ditherTexture, filteringOperation, hdrOperations, applySrgbEncoding, !hasFinalPass, renderOverlayUI); m_UberPass.RecordRenderGraph(renderGraph, frameData); } diff --git a/Packages/com.unity.render-pipelines.universal/Runtime/RendererFeatures/FullScreenPassRendererFeature.cs b/Packages/com.unity.render-pipelines.universal/Runtime/RendererFeatures/FullScreenPassRendererFeature.cs index 202b3039b14..a127ee66a2a 100644 --- a/Packages/com.unity.render-pipelines.universal/Runtime/RendererFeatures/FullScreenPassRendererFeature.cs +++ b/Packages/com.unity.render-pipelines.universal/Runtime/RendererFeatures/FullScreenPassRendererFeature.cs @@ -123,7 +123,7 @@ public override void AddRenderPasses(ScriptableRenderer renderer, ref RenderingD m_FullScreenPass.SetupMembers(passMaterial, passIndex, fetchColorBuffer, bindDepthStencilAttachment); m_FullScreenPass.requiresIntermediateTexture = fetchColorBuffer; - + renderer.EnqueuePass(m_FullScreenPass); } @@ -150,14 +150,14 @@ public void SetupMembers(Material material, int passIndex, bool fetchActiveColor m_BindDepthStencilAttachment = bindDepthStencilAttachment; } - private static void ExecuteMainPass(RasterCommandBuffer cmd, RTHandle sourceTexture, Material material, int passIndex) + private static void ExecuteMainPass(RasterCommandBuffer cmd, RTHandle sourceTexture, Material material, int passIndex, Vector4 blitScaleBias) { s_SharedPropertyBlock.Clear(); if (sourceTexture != null) s_SharedPropertyBlock.SetTexture(ShaderPropertyId.blitTexture, sourceTexture); // We need to set the "_BlitScaleBias" uniform for user materials with shaders relying on core Blit.hlsl to work - s_SharedPropertyBlock.SetVector(ShaderPropertyId.blitScaleBias, new Vector4(1, 1, 0, 0)); + s_SharedPropertyBlock.SetVector(ShaderPropertyId.blitScaleBias, blitScaleBias); cmd.DrawProcedural(Matrix4x4.identity, material, passIndex, MeshTopology.Triangles, 3, 1, s_SharedPropertyBlock); } @@ -169,20 +169,21 @@ public override void RecordRenderGraph(RenderGraph renderGraph, ContextContainer TextureHandle source, destination; - Debug.Assert(resourcesData.cameraColor.IsValid()); - if (m_FetchActiveColor) { + // The pass requests the intermediate textures so this should always be valid + Debug.Assert(resourcesData.cameraColor.IsValid()); + var targetDesc = renderGraph.GetTextureDesc(resourcesData.cameraColor); targetDesc.name = "_CameraColorFullScreenPass"; targetDesc.clearBuffer = false; - source = resourcesData.activeColorTexture; + source = resourcesData.cameraColor; destination = renderGraph.CreateTexture(targetDesc); renderGraph.AddBlitPass(source, destination, Vector2.one, Vector2.zero, passName: "Copy Color Full Screen"); - //Swap for next pass; + // Swap for next pass; source = destination; } else @@ -190,21 +191,11 @@ public override void RecordRenderGraph(RenderGraph renderGraph, ContextContainer source = TextureHandle.nullHandle; } + // If resourcesData.isActiveTargetBackBuffer == true, then the backbuffer is alread written to and this could overwrite it. + // However, the user might want to blend into the backbuffer so we allow it here. destination = resourcesData.activeColorTexture; - - // The AddBlitPass utility is not used when m_BindDepthStencilAttachment is active since SetRenderAttachmentDepth is not available with the returned builder of AddBlitPass. - bool useCustomPass = input != ScriptableRenderPassInput.None || m_BindDepthStencilAttachment; - - if (useCustomPass) - { - AddFullscreenRenderPassInputPass(renderGraph, resourcesData, cameraData, source, destination); - } - else - { - var blitMaterialParameters = new BlitMaterialParameters(source, destination, m_Material, m_PassIndex); - - renderGraph.AddBlitPass(blitMaterialParameters, passName: "Blit Color Full Screen"); - } + + AddFullscreenRenderPassInputPass(renderGraph, resourcesData, cameraData, source, destination); } private void AddFullscreenRenderPassInputPass(RenderGraph renderGraph, UniversalResourceData resourcesData, UniversalCameraData cameraData, in TextureHandle source, in TextureHandle destination) @@ -214,10 +205,11 @@ private void AddFullscreenRenderPassInputPass(RenderGraph renderGraph, Universal passData.material = m_Material; passData.passIndex = m_PassIndex; - passData.inputTexture = source; + passData.source = source; + passData.destination = destination; - if (passData.inputTexture.IsValid()) - builder.UseTexture(passData.inputTexture, AccessFlags.Read); + if (passData.source.IsValid()) + builder.UseTexture(passData.source, AccessFlags.Read); bool needsColor = (input & ScriptableRenderPassInput.Color) != ScriptableRenderPassInput.None; bool needsDepth = (input & ScriptableRenderPassInput.Depth) != ScriptableRenderPassInput.None; @@ -262,21 +254,17 @@ private void AddFullscreenRenderPassInputPass(RenderGraph renderGraph, Universal builder.SetRenderFunc(static (MainPassData data, RasterGraphContext rgContext) => { - ExecuteMainPass(rgContext.cmd, data.inputTexture, data.material, data.passIndex); + Vector4 scaleBias = RenderingUtils.GetFinalBlitScaleBias(rgContext, in data.source, in data.destination); + ExecuteMainPass(rgContext.cmd, data.source, data.material, data.passIndex, scaleBias); }); } } - - private class CopyPassData - { - internal TextureHandle inputTexture; - } - private class MainPassData { internal Material material; internal int passIndex; - internal TextureHandle inputTexture; + internal TextureHandle source; + internal TextureHandle destination; } } } diff --git a/Packages/com.unity.render-pipelines.universal/Runtime/UniversalRenderPipelineGlobalSettings.cs b/Packages/com.unity.render-pipelines.universal/Runtime/UniversalRenderPipelineGlobalSettings.cs index 9ac8ee13785..434430962f5 100644 --- a/Packages/com.unity.render-pipelines.universal/Runtime/UniversalRenderPipelineGlobalSettings.cs +++ b/Packages/com.unity.render-pipelines.universal/Runtime/UniversalRenderPipelineGlobalSettings.cs @@ -139,18 +139,12 @@ public static void UpgradeAsset(EntityId assetInstanceID) asset.m_AssetVersion = 8; } - // URPReflectionProbeSetings is introduced set the values for older projects. + // URPReflectionProbeSettings is introduced; disable rotation for older projects to preserve + // pre-existing behavior (rotation was not supported before this version). if (asset.m_AssetVersion < 9) { - if (GraphicsSettings.TryGetRenderPipelineSettings(out var reflectionProbeSettings)) - { - reflectionProbeSettings.UseReflectionProbeRotation = false; - } - else - { - Debug.LogError("Failed to upgrade global settings for URPReflectionProbeSettings since it doesn't exists."); - } - + var reflectionProbeSettings = GetOrCreateGraphicsSettings(asset); + reflectionProbeSettings.UseReflectionProbeRotation = false; asset.m_AssetVersion = 9; } diff --git a/Packages/com.unity.render-pipelines.universal/Runtime/UniversalRendererDebug.cs b/Packages/com.unity.render-pipelines.universal/Runtime/UniversalRendererDebug.cs index 882a26bc82e..8adbfc4a441 100644 --- a/Packages/com.unity.render-pipelines.universal/Runtime/UniversalRendererDebug.cs +++ b/Packages/com.unity.render-pipelines.universal/Runtime/UniversalRendererDebug.cs @@ -38,15 +38,6 @@ private Rect CalculateUVRect(UniversalCameraData cameraData, float width, float return new Rect(1 - normalizedSizeX, 1 - normalizedSizeY, normalizedSizeX, normalizedSizeY); } - private Rect CalculateUVRect(UniversalCameraData cameraData, int textureHeightPercent) - { - var relativeSize = Mathf.Clamp01(textureHeightPercent / 100f); - var width = relativeSize * cameraData.pixelWidth; - var height = relativeSize * cameraData.pixelHeight; - - return CalculateUVRect(cameraData, width, height); - } - private void CorrectForTextureAspectRatio(ref float width, ref float height, float sourceWidth, float sourceHeight) { if (sourceWidth != 0 && sourceHeight != 0) @@ -72,9 +63,20 @@ private void SetupRenderGraphFinalPassDebug(RenderGraph renderGraph, ContextCont if ((DebugHandler != null) && DebugHandler.IsActiveForCamera(cameraData.isPreviewCamera)) { if (DebugHandler.TryGetFullscreenDebugMode(out DebugFullScreenMode fullScreenDebugMode, out int textureHeightPercent) && - (fullScreenDebugMode != DebugFullScreenMode.ReflectionProbeAtlas || usesClusterLightLoop) && - (fullScreenDebugMode != DebugFullScreenMode.STP)) + (fullScreenDebugMode != DebugFullScreenMode.ReflectionProbeAtlas || usesClusterLightLoop)) { + // if we want to visualize RG internal resources, we need to create an RTHandle external to RG and copy to it the textures to visualize + // this is required because the lifetime of these resources is limited to the RenderGraph execution, and we cannot access the actual resources here + + // we also copy external resources into the debug texture below to make them "read only". CreateDebugTexture() can lead to (external) texture reallocation. + + var debugDescriptor = cameraData.cameraTargetDescriptor; + // Ensure target can hold all source values. Source can be signed for example. + if (SystemInfo.IsFormatSupported(GraphicsFormat.R16G16B16A16_SFloat, GraphicsFormatUsage.Linear | GraphicsFormatUsage.Render)) + debugDescriptor.graphicsFormat = GraphicsFormat.R16G16B16A16_SFloat; + + CreateDebugTexture(debugDescriptor); + float screenWidth = cameraData.pixelWidth; float screenHeight = cameraData.pixelHeight; @@ -85,107 +87,98 @@ private void SetupRenderGraphFinalPassDebug(RenderGraph renderGraph, ContextCont bool supportsStereo = false; Vector4 dataRangeRemap = Vector4.zero; // zero = off, .x = old min, .y = old max, .z = new min, .w = new max - // visualize RG internal resources + // The debug texture for STP is filled later during post-process rendering so we cannot assign the resource now or it will be invalid + if (fullScreenDebugMode != DebugFullScreenMode.STP) { - // if we want to visualize RG internal resources, we need to create an RTHandle external to RG and copy to it the textures to visualize - // this is required because the lifetime of these resources is limited to the RenderGraph execution, and we cannot access the actual resources here - - // we also copy external resources to make them "read only". CreateDebugTexture() can lead to (external) texture reallocation. - - var debugDescriptor = cameraData.cameraTargetDescriptor; - // Ensure target can hold all source values. Source can be signed for example. - if(SystemInfo.IsFormatSupported(GraphicsFormat.R16G16B16A16_SFloat, GraphicsFormatUsage.Linear | GraphicsFormatUsage.Render)) - debugDescriptor.graphicsFormat = GraphicsFormat.R16G16B16A16_SFloat; - - CreateDebugTexture(debugDescriptor); - - ImportResourceParams importParams = new ImportResourceParams(); - importParams.clearOnFirstUse = false; - importParams.discardOnLastUse = false; - TextureHandle debugTexture = renderGraph.ImportTexture(s_RenderGraphDebugTextureHandle, importParams); + // Render into the debug texture + { + ImportResourceParams importParams = new ImportResourceParams(); + importParams.clearOnFirstUse = false; + importParams.discardOnLastUse = false; + TextureHandle debugTexture = renderGraph.ImportTexture(s_RenderGraphDebugTextureHandle, importParams); - switch (fullScreenDebugMode) - { - case DebugFullScreenMode.Depth: - { - BlitToDebugTexture(renderGraph, resourceData.cameraDepthTexture, debugTexture); - supportsStereo = true; - break; - } - case DebugFullScreenMode.MotionVector: - { - BlitToDebugTexture(renderGraph, resourceData.motionVectorColor, debugTexture, isSourceTextureColor: true); - supportsStereo = true; - // Motion vectors are in signed UV space, zoom in and normalize for visualization. (note: maybe add an option to use (angle, mag) visualization) - const float zoom = 0.01f; - dataRangeRemap.x = -zoom; - dataRangeRemap.y = zoom; - dataRangeRemap.z = 0; - dataRangeRemap.w = 1.0f; - break; - } - case DebugFullScreenMode.AdditionalLightsShadowMap: - { - BlitToDebugTexture(renderGraph, resourceData.additionalShadowsTexture, debugTexture); - break; - } - case DebugFullScreenMode.MainLightShadowMap: - { - BlitToDebugTexture(renderGraph, resourceData.mainShadowsTexture, debugTexture); - break; - } - case DebugFullScreenMode.AdditionalLightsCookieAtlas: + switch (fullScreenDebugMode) { - // Copy atlas texture to make it "readonly". Direct reference (debug=atlas) can lead to handle->texture reallocation. - var textureHandle = - m_LightCookieManager is { AdditionalLightsCookieAtlasTexture: not null } - ? renderGraph.ImportTexture(m_LightCookieManager - .AdditionalLightsCookieAtlasTexture) - : TextureHandle.nullHandle; - - BlitToDebugTexture(renderGraph, textureHandle, debugTexture); - break; + case DebugFullScreenMode.Depth: + { + BlitToDebugTexture(renderGraph, resourceData.cameraDepthTexture, debugTexture); + supportsStereo = true; + break; + } + case DebugFullScreenMode.MotionVector: + { + BlitToDebugTexture(renderGraph, resourceData.motionVectorColor, debugTexture, isSourceTextureColor: true); + supportsStereo = true; + // Motion vectors are in signed UV space, zoom in and normalize for visualization. (note: maybe add an option to use (angle, mag) visualization) + const float zoom = 0.01f; + dataRangeRemap.x = -zoom; + dataRangeRemap.y = zoom; + dataRangeRemap.z = 0; + dataRangeRemap.w = 1.0f; + break; + } + case DebugFullScreenMode.AdditionalLightsShadowMap: + { + BlitToDebugTexture(renderGraph, resourceData.additionalShadowsTexture, debugTexture); + break; + } + case DebugFullScreenMode.MainLightShadowMap: + { + BlitToDebugTexture(renderGraph, resourceData.mainShadowsTexture, debugTexture); + break; + } + case DebugFullScreenMode.AdditionalLightsCookieAtlas: + { + // Copy atlas texture to make it "readonly". Direct reference (debug=atlas) can lead to handle->texture reallocation. + var textureHandle = + m_LightCookieManager is { AdditionalLightsCookieAtlasTexture: not null } + ? renderGraph.ImportTexture(m_LightCookieManager + .AdditionalLightsCookieAtlasTexture) + : TextureHandle.nullHandle; + + BlitToDebugTexture(renderGraph, textureHandle, debugTexture); + break; + } + + case DebugFullScreenMode.ReflectionProbeAtlas: + { + // Copy atlas texture to make it "readonly". Direct reference (debug=atlas) can lead to handle->texture reallocation. + var textureHandle = + m_ForwardLights.reflectionProbeManager.atlasRT != null + ? renderGraph.ImportTexture(RTHandles.Alloc( + m_ForwardLights.reflectionProbeManager.atlasRT, transferOwnership: true)) + : TextureHandle.nullHandle; + + BlitToDebugTexture(renderGraph, textureHandle, debugTexture); + break; + } + default: + { + break; + } } + } - case DebugFullScreenMode.ReflectionProbeAtlas: - { - // Copy atlas texture to make it "readonly". Direct reference (debug=atlas) can lead to handle->texture reallocation. - var textureHandle = - m_ForwardLights.reflectionProbeManager.atlasRT != null - ? renderGraph.ImportTexture(RTHandles.Alloc( - m_ForwardLights.reflectionProbeManager.atlasRT, transferOwnership: true)) - : TextureHandle.nullHandle; - - BlitToDebugTexture(renderGraph, textureHandle, debugTexture); - break; - } - default: + // Textures that are not in screen aspect ratio need to be corrected + { + RenderTexture source = null; + switch (fullScreenDebugMode) { - break; + case DebugFullScreenMode.AdditionalLightsShadowMap: source = m_AdditionalLightsShadowCasterPass?.m_AdditionalLightsShadowmapHandle?.rt; break; + case DebugFullScreenMode.MainLightShadowMap: source = m_MainLightShadowCasterPass?.m_MainLightShadowmapTexture?.rt; break; + case DebugFullScreenMode.AdditionalLightsCookieAtlas: source = m_LightCookieManager?.AdditionalLightsCookieAtlasTexture?.rt; break; + case DebugFullScreenMode.ReflectionProbeAtlas: source = m_ForwardLights?.reflectionProbeManager.atlasRT; break; + default: + break; } - } - } - // Textures that are not in screen aspect ratio need to be corrected - { - RenderTexture source = null; - switch (fullScreenDebugMode) - { - case DebugFullScreenMode.AdditionalLightsShadowMap: source = m_AdditionalLightsShadowCasterPass?.m_AdditionalLightsShadowmapHandle?.rt; break; - case DebugFullScreenMode.MainLightShadowMap: source = m_MainLightShadowCasterPass?.m_MainLightShadowmapTexture?.rt; break; - case DebugFullScreenMode.AdditionalLightsCookieAtlas: source = m_LightCookieManager?.AdditionalLightsCookieAtlasTexture?.rt; break; - case DebugFullScreenMode.ReflectionProbeAtlas: source = m_ForwardLights?.reflectionProbeManager.atlasRT; break; - default: - break; + // Ensure that atlas is not stretched, but doesn't take up more than the percentage in any dimension. + if (source != null) + CorrectForTextureAspectRatio(ref width, ref height, source.width, source.height); } - - // Ensure that atlas is not stretched, but doesn't take up more than the percentage in any dimension. - if (source != null) - CorrectForTextureAspectRatio(ref width, ref height, source.width, source.height); } - Rect uvRect = CalculateUVRect(cameraData, width, height); DebugHandler.SetDebugRenderTarget(s_RenderGraphDebugTextureHandle, uvRect, supportsStereo, dataRangeRemap); } @@ -221,17 +214,11 @@ private void SetupAfterPostRenderGraphFinalPassDebug(RenderGraph renderGraph, Co if (DebugHandler.TryGetFullscreenDebugMode(out var debugFullscreenMode, out int textureHeightPercent) && (debugFullscreenMode == DebugFullScreenMode.STP)) { - CreateDebugTexture(cameraData.cameraTargetDescriptor); - ImportResourceParams importParams = new ImportResourceParams(); importParams.clearOnFirstUse = false; importParams.discardOnLastUse = false; TextureHandle debugTexture = renderGraph.ImportTexture(s_RenderGraphDebugTextureHandle, importParams); BlitToDebugTexture(renderGraph, resourceData.stpDebugView, debugTexture); - - Rect uvRect = CalculateUVRect(cameraData, textureHeightPercent); - Vector4 rangeRemap = Vector4.zero; // Off - DebugHandler.SetDebugRenderTarget(s_RenderGraphDebugTextureHandle, uvRect, true, rangeRemap); } } } diff --git a/Packages/com.unity.render-pipelines.universal/Shaders/PostProcessing/UberPost.shader b/Packages/com.unity.render-pipelines.universal/Shaders/PostProcessing/UberPost.shader index 3fb34366c8e..8a793dc0605 100644 --- a/Packages/com.unity.render-pipelines.universal/Shaders/PostProcessing/UberPost.shader +++ b/Packages/com.unity.render-pipelines.universal/Shaders/PostProcessing/UberPost.shader @@ -1,6 +1,7 @@ Shader "Hidden/Universal Render Pipeline/UberPost" { HLSLINCLUDE + #pragma multi_compile_local_fragment _ _POINT_SAMPLING #pragma multi_compile_local_fragment _ _DISTORTION #pragma multi_compile_local_fragment _ _CHROMATIC_ABERRATION #pragma multi_compile_local_fragment _ _BLOOM_LQ _BLOOM_HQ _BLOOM_LQ_DIRT _BLOOM_HQ_DIRT @@ -146,6 +147,15 @@ Shader "Hidden/Universal Render Pipeline/UberPost" return uv; } + half4 SampleColor(float2 uv) + { + #if _POINT_SAMPLING + return SAMPLE_TEXTURE2D_X(_BlitTexture, sampler_PointClamp, uv); + #else + return SAMPLE_TEXTURE2D_X(_BlitTexture, sampler_LinearClamp, uv); + #endif + } + half4 FragUberPost(Varyings input) : SV_Target { UNITY_SETUP_STEREO_EYE_INDEX_POST_VERTEX(input); @@ -155,7 +165,7 @@ Shader "Hidden/Universal Render Pipeline/UberPost" // NOTE: Hlsl specifies missing input.a to fill 1 (0 for .rgb). // InputColor is a "bottom" layer for alpha output. - half4 inputColor = SAMPLE_TEXTURE2D_X(_BlitTexture, sampler_LinearClamp, ClampUVForBilinear(SCREEN_COORD_REMOVE_SCALEBIAS(uvDistorted), _BlitTexture_TexelSize.xy)); + half4 inputColor = SampleColor(ClampUVForBilinear(SCREEN_COORD_REMOVE_SCALEBIAS(uvDistorted), _BlitTexture_TexelSize.xy)); half3 color = inputColor.rgb; #if _CHROMATIC_ABERRATION @@ -167,8 +177,8 @@ Shader "Hidden/Universal Render Pipeline/UberPost" float2 delta = (end - uv) / 3.0; half r = color.r; - half g = SAMPLE_TEXTURE2D_X(_BlitTexture, sampler_LinearClamp, ClampUVForBilinear(SCREEN_COORD_REMOVE_SCALEBIAS(DistortUV(delta + uv) ), _BlitTexture_TexelSize.xy)).y; - half b = SAMPLE_TEXTURE2D_X(_BlitTexture, sampler_LinearClamp, ClampUVForBilinear(SCREEN_COORD_REMOVE_SCALEBIAS(DistortUV(delta * 2.0 + uv)), _BlitTexture_TexelSize.xy)).z; + half g = SampleColor(ClampUVForBilinear(SCREEN_COORD_REMOVE_SCALEBIAS(DistortUV(delta + uv) ), _BlitTexture_TexelSize.xy)).y; + half b = SampleColor(ClampUVForBilinear(SCREEN_COORD_REMOVE_SCALEBIAS(DistortUV(delta * 2.0 + uv)), _BlitTexture_TexelSize.xy)).z; color = half3(r, g, b); } diff --git a/Packages/com.unity.shadergraph/Documentation~/Create-Shader-Graph.md b/Packages/com.unity.shadergraph/Documentation~/Create-Shader-Graph.md index bf5b07eeb02..fcf20b0fb0b 100644 --- a/Packages/com.unity.shadergraph/Documentation~/Create-Shader-Graph.md +++ b/Packages/com.unity.shadergraph/Documentation~/Create-Shader-Graph.md @@ -55,3 +55,5 @@ You can now open the asset and edit the graph in the [Shader Graph window](Shade * [Shader Graph template browser](template-browser.md) * [Create a custom shader graph template](template-browser.md#create-a-custom-shader-graph-template) * [Shader Graph window](Shader-Graph-Window.md) +* [Shader Graph Asset reference](Shader-Graph-Asset.md) + diff --git a/Packages/com.unity.shadergraph/Documentation~/Lerp-Node.md b/Packages/com.unity.shadergraph/Documentation~/Lerp-Node.md index 90009c2d2f8..47de1677e45 100644 --- a/Packages/com.unity.shadergraph/Documentation~/Lerp-Node.md +++ b/Packages/com.unity.shadergraph/Documentation~/Lerp-Node.md @@ -4,20 +4,26 @@ Returns the result of linearly interpolating between input **A** and input **B** by input **T**. -The output is calculated as `A + T * (B - A)`. The value of input **T** acts as a weight factor applied to the difference between **B** and **A**: +Unity calculates the output as: + +A + T × (B − A) + +The value of input **T** acts as a weight factor applied to the difference between **B** and **A**: - When **T** is `0`, the output equals **A**. - When **T** is `1`, the output equals **B**. - When **T** is `0.5`, the output is the midpoint between **A** and **B**. +The Lerp node uses Dynamic Vector slots, so **A**, **B**, and **T** always resolve to the same component count, which matches the smallest connected vector (larger vectors truncate). Scalars promote to the resolved size by duplicating their value across components. + ## Ports | Name | Direction | Type | Description | |:-----|:----------|:---------------|:------------| -| A | Input | Dynamic Vector | First input value | -| B | Input | Dynamic Vector | Second input value | -| T | Input | Dynamic Vector | Time value. Typical range: 0 to 1. Though you can use values outside of this range they may cause unpredictable results. | -| Out | Output | Dynamic Vector | Output value | +| **A** | Input | Dynamic Vector | First input value | +| **B** | Input | Dynamic Vector | Second input value | +| **T** | Input | Dynamic Vector | Time value. Typical range: 0 to 1. Though you can use values outside of this range they may cause unpredictable results. | +| **Out** | Output | Dynamic Vector | Output value | ## Generated Code Example diff --git a/Packages/com.unity.shadergraph/Documentation~/Node.md b/Packages/com.unity.shadergraph/Documentation~/Node.md index 7ccb65f58a9..8fde6148615 100644 --- a/Packages/com.unity.shadergraph/Documentation~/Node.md +++ b/Packages/com.unity.shadergraph/Documentation~/Node.md @@ -40,3 +40,5 @@ Right clicking on a **Node** will open a context menu. This menu contains many o **Nodes** interact with the Shader Graph Window's Color Modes. Colors are displayed on nodes underneath the text on the node title bar. See [Color Modes](Color-Modes.md) for more information on available colors for nodes. + +Unity applies each component of T as a weight factor to each component to A and B. If T has fewer components than A and B, Unity casts T to the required number of components. Unity copies the values of the original components of T to the added components. diff --git a/Packages/com.unity.shadergraph/Documentation~/images/template-browser.png b/Packages/com.unity.shadergraph/Documentation~/images/template-browser.png index 7a736e64d7f..6af4d137283 100644 Binary files a/Packages/com.unity.shadergraph/Documentation~/images/template-browser.png and b/Packages/com.unity.shadergraph/Documentation~/images/template-browser.png differ diff --git a/Packages/com.unity.shadergraph/Documentation~/template-browser.md b/Packages/com.unity.shadergraph/Documentation~/template-browser.md index ae6a01d2670..556ded3c230 100644 --- a/Packages/com.unity.shadergraph/Documentation~/template-browser.md +++ b/Packages/com.unity.shadergraph/Documentation~/template-browser.md @@ -4,17 +4,20 @@ The Shader Graph template browser allows you to create a new shader graph from a To access the Shader Graph template browser, right-click in your Project window and select **Create** > **Shader Graph** > **From Template**. +**Note**: The template browser displays only templates that are compatible with the current project. + ![The template browser](images/template-browser.png) | Label | Name | Description | | :--- | :--- | :--- | | **A** | Template list | Lists all the available templates you can select and start from to create a new shader graph. | | **B** | Template details | Displays a picture and description of the selected template. | -| **C** | Action buttons | Finish the asset creation flow. The options are:
  • **Create**: Creates a new shader graph asset based on the selected template.
  • **Cancel**: Closes the window and cancels the shader graph asset creation.
| - -**Note**: The template browser displays only templates that are compatible with the current project. +| **C** | Search and filtering tool | Filters the template list using the [Unity Search](https://docs.unity3d.com/Manual/search-overview.html) functionality. Type text to search templates by name or select **Add** (+) to filter templates based on specific characteristics.
In addition to some of the default Unity Search options, Shader Graph includes the following filters:
  • **Category**: Filters by template grouping category.
  • **material**: Filters by the target material type.
  • **renderpipeline**: Filters by render pipeline.
  • **vfx**: Filters by Visual Effect Graph support.
| +| **D** | Sorting tool | Sorts the templates within their respective categories. The categories remain listed in alphabetical order. The options are:
  • **Sort By Name**: Lists templates in alphabetical order.
  • **Sort By Order**: Lists templates in Shader Graph's default order.
  • **Sort By Modification Date**: Lists the last modified templates first.
  • **Sort By Last Used**: Lists the last used templates first.
  • **Sort By Favorite**: Lists templates marked as favorites first.
**Note**: To mark a template as a favorite, hover over the template in the list and select the gray star that appears. To remove a template as a favorite, select the star again. | +| **E** | **Cancel** | Closes the window and cancels the shader graph asset creation. | +| **F** | **Create** | Creates a new shader graph asset based on the selected template. | -### Create a custom shader graph template +## Create a custom shader graph template You can create your own shader graph templates to have them available in the template browser. You can share these templates with your team to maintain consistency across shaders, for example in projects with unique lighting setups or specific shader requirements. @@ -34,3 +37,4 @@ To create a custom shader graph template, follow these steps: ## Additional resources * [Create a new shader graph](Create-Shader-Graph.md) +* [Shader Graph Asset reference](Shader-Graph-Asset.md) diff --git a/Packages/com.unity.shadergraph/Editor/Generation/Targets/UITK/UISubTarget.cs b/Packages/com.unity.shadergraph/Editor/Generation/Targets/UITK/UISubTarget.cs index d69a5deb115..791af02fe9c 100644 --- a/Packages/com.unity.shadergraph/Editor/Generation/Targets/UITK/UISubTarget.cs +++ b/Packages/com.unity.shadergraph/Editor/Generation/Targets/UITK/UISubTarget.cs @@ -223,18 +223,26 @@ public string GetValidatorKey() public INodeValidationExtension.Status GetValidationStatus(AbstractMaterialNode node, out string msg) { - // Make sure node is in our graph first if (node.owner == null) { msg = null; return INodeValidationExtension.Status.None; } - // Clear all Warning/Error message from other providers. - // The message from the graph (when loading the graph) will not be removed - // since it's not the same provider as the UISubTarget. It then stays present - // even if the UV0 is selected. - node.owner.messageManager.ClearNodeFromOtherProvider(this, new[] { node }); + if (!IsIUISubTarget(node)) + { + msg = null; + return INodeValidationExtension.Status.None; + } + + if (!HasUVMaterialSlotOrIsUVNode(node)) + { + msg = null; + return INodeValidationExtension.Status.None; + } + + node.owner.messageManager.ClearNodesFromProvider(node.owner, new[] { node }); + node.owner.messageManager.ClearNodesFromProvider(this, new[] { node }); foreach (var item in node.owner.activeTargets) { @@ -251,8 +259,53 @@ public INodeValidationExtension.Status GetValidationStatus(AbstractMaterialNode return INodeValidationExtension.Status.None; } - private bool ValidateUV(AbstractMaterialNode node, out string warningMessage) + static bool IsIUISubTarget(AbstractMaterialNode node) { + bool isIUISubTarget = false; + foreach (var target in node.owner.activeTargets) + { + var subTarget = target.activeSubTarget; + if (subTarget is IUISubTarget) + { + isIUISubTarget = true; + break; + } + } + return isIUISubTarget; + } + + static bool HasUVMaterialSlotOrIsUVNode(AbstractMaterialNode node) + { + List uvSlots = new(); + node.GetInputSlots(uvSlots); + + if (uvSlots.Count > 0) + { + return true; + } + + UVNode uvNode = node as UVNode; + if (uvNode != null) + { + return true; + } + return false; + } + + static bool ValidateUV(AbstractMaterialNode node, out string warningMessage) + { + if (!IsIUISubTarget(node)) + { + warningMessage = null; + return false; + } + + if (!HasUVMaterialSlotOrIsUVNode(node)) + { + warningMessage = null; + return false; + } + List uvSlots = new(); node.GetInputSlots(uvSlots); diff --git a/Packages/com.unity.shadergraph/Editor/Util/MessageManager.cs b/Packages/com.unity.shadergraph/Editor/Util/MessageManager.cs index 90d75b39bb4..bd6e72eb3b6 100644 --- a/Packages/com.unity.shadergraph/Editor/Util/MessageManager.cs +++ b/Packages/com.unity.shadergraph/Editor/Util/MessageManager.cs @@ -123,25 +123,6 @@ public void ClearNodesFromProvider(object messageProvider, IEnumerable nodes) - { - foreach (var key in m_Messages.Keys) - { - if (key != messageProvider) - { - foreach (var node in nodes) - { - if (m_Messages[key].TryGetValue(node.objectId, out var messages)) - { - nodeMessagesChanged |= messages.Count > 0; - messages.Clear(); - } - } - } - } - } - public void ClearAll() { m_Messages.Clear(); diff --git a/Packages/com.unity.shadergraph/Samples~/CustomLighting/Components/AdditionalLights/AdditionalLightsBasic.shadersubgraph b/Packages/com.unity.shadergraph/Samples~/CustomLighting/Components/AdditionalLights/AdditionalLightsBasic.shadersubgraph index b3e5030e73e..f39f62cf8ee 100644 --- a/Packages/com.unity.shadergraph/Samples~/CustomLighting/Components/AdditionalLights/AdditionalLightsBasic.shadersubgraph +++ b/Packages/com.unity.shadergraph/Samples~/CustomLighting/Components/AdditionalLights/AdditionalLightsBasic.shadersubgraph @@ -849,7 +849,7 @@ "m_FunctionName": "AddAdditionalLightsBasic", "m_FunctionSource": "3beadf505dbc54f4cae878435013d751", "m_FunctionSourceUsePragmas": true, - "m_FunctionBody": "Diffuse = MainDiffuse;\r\nColor = MainColor * MainDiffuse;\r\n\r\n#ifndef SHADERGRAPH_PREVIEW\r\n \r\n uint pixelLightCount = GetAdditionalLightsCount();\r\n\r\n#if USE_CLUSTER_LIGHT_LOOP\r\n // for Foward+ LIGHT_LOOP_BEGIN macro uses inputData.normalizedScreenSpaceUV and inputData.positionWS\r\n InputData inputData = (InputData)0;\r\r\n inputData.normalizedScreenSpaceUV = ScreenPosition;\r\n inputData.positionWS = WorldPosition;\r\n#endif\r\n\r\n LIGHT_LOOP_BEGIN(pixelLightCount)\r\n\t\t// Convert the pixel light index to the light data index\r\n\t\t#if !USE_CLUSTER_LIGHT_LOOP\r\n\t\t\tlightIndex = GetPerObjectLightIndex(lightIndex);\r\n\t\t#endif\r\n\t\tLight light = GetAdditionalPerObjectLight(lightIndex, WorldPosition);\r\n float NdotL = saturate(dot(WorldNormal, light.direction));\r\n float thisDiffuse = light.distanceAttenuation * NdotL;\r\n Diffuse += thisDiffuse;\r\n Color += light.color * thisDiffuse;\r\n LIGHT_LOOP_END\r\n float total = Diffuse;\r\n Color = total <= 0 ? MainColor : Color / total;\r\n#endif" + "m_FunctionBody": "Diffuse = MainDiffuse;\r\nColor = MainColor * MainDiffuse;\r\n\r\n#ifndef SHADERGRAPH_PREVIEW\r\n \r\n uint pixelLightCount = GetAdditionalLightsCount();\r\n\r\n#if USE_CLUSTER_LIGHT_LOOP\r\n // for Foward+ LIGHT_LOOP_BEGIN macro uses inputData.normalizedScreenSpaceUV and inputData.positionWS\r\n InputData inputData = (InputData)0;\r\n\r\n inputData.normalizedScreenSpaceUV = ScreenPosition;\r\n inputData.positionWS = WorldPosition;\r\n#endif\r\n\r\n LIGHT_LOOP_BEGIN(pixelLightCount)\r\n\t\tLight light = GetAdditionalLight(lightIndex, WorldPosition);\r\n float NdotL = saturate(dot(WorldNormal, light.direction));\r\n float thisDiffuse = light.distanceAttenuation * NdotL;\r\n Diffuse += thisDiffuse;\r\n Color += light.color * thisDiffuse;\r\n LIGHT_LOOP_END\r\n float total = Diffuse;\r\n Color = total <= 0 ? MainColor : Color / total;\r\n#endif" } { diff --git a/Packages/com.unity.shadergraph/Samples~/CustomLighting/Components/AdditionalLights/AdditionalLightsColorize.shadersubgraph b/Packages/com.unity.shadergraph/Samples~/CustomLighting/Components/AdditionalLights/AdditionalLightsColorize.shadersubgraph index 89485fe06be..87086bfd402 100644 --- a/Packages/com.unity.shadergraph/Samples~/CustomLighting/Components/AdditionalLights/AdditionalLightsColorize.shadersubgraph +++ b/Packages/com.unity.shadergraph/Samples~/CustomLighting/Components/AdditionalLights/AdditionalLightsColorize.shadersubgraph @@ -1269,7 +1269,7 @@ "m_FunctionName": "AddAdditionalLightsColorize", "m_FunctionSource": "3beadf505dbc54f4cae878435013d751", "m_FunctionSourceUsePragmas": true, - "m_FunctionBody": "Diffuse = MainDiffuse;\r\nSpecular = MainSpecular;\r\nColor = MainColor * (MainDiffuse + MainSpecular);\r\nAtten = 0;\r\n\r\n#ifndef SHADERGRAPH_PREVIEW\r\n \r\n uint pixelLightCount = GetAdditionalLightsCount();\r\n\r\n#if USE_CLUSTER_LIGHT_LOOP\r\n // for Foward+ LIGHT_LOOP_BEGIN macro uses inputData.normalizedScreenSpaceUV and inputData.positionWS\r\n InputData inputData = (InputData)0;\r\r\n inputData.normalizedScreenSpaceUV = ScreenPosition;\r\n inputData.positionWS = WorldPosition;\r\n#endif\r\n\r\n LIGHT_LOOP_BEGIN(pixelLightCount)\r\n\t\t// Convert the pixel light index to the light data index\r\n\t\t#if !USE_CLUSTER_LIGHT_LOOP\r\n\t\t\tlightIndex = GetPerObjectLightIndex(lightIndex);\r\n\t\t#endif\r\n\t\t// Call the URP additional light algorithm. This will not calculate shadows, since we don't pass a shadow mask value\r\n\t\tLight light = GetAdditionalPerObjectLight(lightIndex, WorldPosition);\r\n\t\t// Manually set the shadow attenuation by calculating realtime shadows\r\n\t\tlight.shadowAttenuation = AdditionalLightRealtimeShadow(lightIndex, WorldPosition, light.direction);\r\n float NdotL = saturate(dot(WorldNormal, light.direction));\r\n float atten = light.distanceAttenuation * light.shadowAttenuation;\r\n float thisDiffuse = atten * NdotL;\r\n float3 halfAngle = normalize(light.direction + WorldView);\r\n float spec = pow(saturate(dot(halfAngle, WorldNormal)), SpecPower);\r\n float3 thisSpecular = spec * Reflectance * atten;\r\n Diffuse += thisDiffuse;\r\n Specular += thisSpecular;\r\n #if defined(_LIGHT_COOKIES)\r\n float3 cookieColor = SampleAdditionalLightCookie(lightIndex, WorldPosition);\r\n light.color *= cookieColor;\r\n #endif\r\n Color += light.color * (thisDiffuse + thisSpecular);\r\n\t\tAtten += atten;\r\n LIGHT_LOOP_END\r\n float total = Diffuse + dot(Specular, float3(0.333, 0.333, 0.333));\r\n Color = total <= 0 ? MainColor : Color / total;\r\n#endif" + "m_FunctionBody": "Diffuse = MainDiffuse;\r\nSpecular = MainSpecular;\r\nColor = MainColor * (MainDiffuse + MainSpecular);\r\nAtten = 0;\r\n\r\n#ifndef SHADERGRAPH_PREVIEW\r\n \r\n uint pixelLightCount = GetAdditionalLightsCount();\r\n\r\n#if USE_CLUSTER_LIGHT_LOOP\r\n // for Foward+ LIGHT_LOOP_BEGIN macro uses inputData.normalizedScreenSpaceUV and inputData.positionWS\r\n InputData inputData = (InputData)0;\r\n\r\n inputData.normalizedScreenSpaceUV = ScreenPosition;\r\n inputData.positionWS = WorldPosition;\r\n#endif\r\n\r\n LIGHT_LOOP_BEGIN(pixelLightCount)\r\n\t\t// Call the URP additional light algorithm. This will not calculate shadows, since we don't pass a shadow mask value\r\n\t\tLight light = GetAdditionalLight(lightIndex, WorldPosition);\r\n\t\t// Manually set the shadow attenuation by calculating realtime shadows\r\n\t\tlight.shadowAttenuation = AdditionalLightRealtimeShadow(lightIndex, WorldPosition, light.direction);\r\n float NdotL = saturate(dot(WorldNormal, light.direction));\r\n float atten = light.distanceAttenuation * light.shadowAttenuation;\r\n float thisDiffuse = atten * NdotL;\r\n float3 halfAngle = normalize(light.direction + WorldView);\r\n float spec = pow(saturate(dot(halfAngle, WorldNormal)), SpecPower);\r\n float3 thisSpecular = spec * Reflectance * atten;\r\n Diffuse += thisDiffuse;\r\n Specular += thisSpecular;\r\n #if defined(_LIGHT_COOKIES)\r\n float3 cookieColor = SampleAdditionalLightCookie(lightIndex, WorldPosition);\r\n light.color *= cookieColor;\r\n #endif\r\n Color += light.color * (thisDiffuse + thisSpecular);\r\n\t\tAtten += atten;\r\n LIGHT_LOOP_END\r\n float total = Diffuse + dot(Specular, float3(0.333, 0.333, 0.333));\r\n Color = total <= 0 ? MainColor : Color / total;\r\n#endif" } { diff --git a/Packages/com.unity.shadergraph/Samples~/CustomLighting/Components/AdditionalLights/AdditionalLightsHalfLambert.shadersubgraph b/Packages/com.unity.shadergraph/Samples~/CustomLighting/Components/AdditionalLights/AdditionalLightsHalfLambert.shadersubgraph index d252c03310e..2f5c6f4fc13 100644 --- a/Packages/com.unity.shadergraph/Samples~/CustomLighting/Components/AdditionalLights/AdditionalLightsHalfLambert.shadersubgraph +++ b/Packages/com.unity.shadergraph/Samples~/CustomLighting/Components/AdditionalLights/AdditionalLightsHalfLambert.shadersubgraph @@ -1140,7 +1140,7 @@ "m_FunctionName": "AddAdditionalLightsHalfLambert", "m_FunctionSource": "3beadf505dbc54f4cae878435013d751", "m_FunctionSourceUsePragmas": true, - "m_FunctionBody": "Diffuse = MainDiffuse;\r\nSpecular = MainSpecular;\r\nColor = MainColor * (MainDiffuse + MainSpecular);\r\n\r\n#ifndef SHADERGRAPH_PREVIEW\r\n \r\n uint pixelLightCount = GetAdditionalLightsCount();\r\n\r\n#if USE_CLUSTER_LIGHT_LOOP\r\n // for Foward+ LIGHT_LOOP_BEGIN macro uses inputData.normalizedScreenSpaceUV and inputData.positionWS\r\n InputData inputData = (InputData)0;\r\r\n inputData.normalizedScreenSpaceUV = ScreenPosition;\r\n inputData.positionWS = WorldPosition;\r\n#endif\r\n\r\n LIGHT_LOOP_BEGIN(pixelLightCount)\r\n\t\t// Convert the pixel light index to the light data index\r\n\t\t#if !USE_CLUSTER_LIGHT_LOOP\r\n\t\t\tlightIndex = GetPerObjectLightIndex(lightIndex);\r\n\t\t#endif\r\n\t\t// Call the URP additional light algorithm. This will not calculate shadows, since we don't pass a shadow mask value\r\n\t\tLight light = GetAdditionalPerObjectLight(lightIndex, WorldPosition);\r\n\t\t// Manually set the shadow attenuation by calculating realtime shadows\r\n\t\tlight.shadowAttenuation = AdditionalLightRealtimeShadow(lightIndex, WorldPosition, light.direction);\r\n float NdotL = dot(WorldNormal, light.direction);\n float halfLambert = NdotL * 0.5 + 0.5;\r\n float atten = light.distanceAttenuation * light.shadowAttenuation;\r\n float thisDiffuse = atten * halfLambert;\r\n float3 halfAngle = normalize(light.direction + WorldView);\r\n float spec = pow(saturate(dot(halfAngle, WorldNormal)), SpecPower);\r\n float3 thisSpecular = spec * Reflectance * atten;\r\n Diffuse += thisDiffuse;\r\n Specular += thisSpecular;\r\n #if defined(_LIGHT_COOKIES)\r\n float3 cookieColor = SampleAdditionalLightCookie(lightIndex, WorldPosition);\r\n light.color *= cookieColor;\r\n #endif\r\n Color += light.color * (thisDiffuse + thisSpecular);\r\n LIGHT_LOOP_END\r\n float total = Diffuse + dot(Specular, float3(0.333, 0.333, 0.333));\r\n Color = total <= 0 ? MainColor : Color / total;\r\n#endif" + "m_FunctionBody": "Diffuse = MainDiffuse;\r\nSpecular = MainSpecular;\r\nColor = MainColor * (MainDiffuse + MainSpecular);\r\n\r\n#ifndef SHADERGRAPH_PREVIEW\r\n \r\n uint pixelLightCount = GetAdditionalLightsCount();\r\n\r\n#if USE_CLUSTER_LIGHT_LOOP\r\n // for Foward+ LIGHT_LOOP_BEGIN macro uses inputData.normalizedScreenSpaceUV and inputData.positionWS\r\n InputData inputData = (InputData)0;\r\n\r\n inputData.normalizedScreenSpaceUV = ScreenPosition;\r\n inputData.positionWS = WorldPosition;\r\n#endif\r\n\r\n LIGHT_LOOP_BEGIN(pixelLightCount)\r\n\t\t// Call the URP additional light algorithm. This will not calculate shadows, since we don't pass a shadow mask value\r\n\t\tLight light = GetAdditionalLight(lightIndex, WorldPosition);\r\n\t\t// Manually set the shadow attenuation by calculating realtime shadows\r\n\t\tlight.shadowAttenuation = AdditionalLightRealtimeShadow(lightIndex, WorldPosition, light.direction);\r\n float NdotL = dot(WorldNormal, light.direction);\r\n float halfLambert = NdotL * 0.5 + 0.5;\r\n float atten = light.distanceAttenuation * light.shadowAttenuation;\r\n float thisDiffuse = atten * halfLambert;\r\n float3 halfAngle = normalize(light.direction + WorldView);\r\n float spec = pow(saturate(dot(halfAngle, WorldNormal)), SpecPower);\r\n float3 thisSpecular = spec * Reflectance * atten;\r\n Diffuse += thisDiffuse;\r\n Specular += thisSpecular;\r\n #if defined(_LIGHT_COOKIES)\r\n float3 cookieColor = SampleAdditionalLightCookie(lightIndex, WorldPosition);\r\n light.color *= cookieColor;\r\n #endif\r\n Color += light.color * (thisDiffuse + thisSpecular);\r\n LIGHT_LOOP_END\r\n float total = Diffuse + dot(Specular, float3(0.333, 0.333, 0.333));\r\n Color = total <= 0 ? MainColor : Color / total;\r\n#endif" } { diff --git a/Packages/com.unity.shadergraph/Samples~/CustomLighting/Components/AdditionalLights/AdditionalLightsSimple.shadersubgraph b/Packages/com.unity.shadergraph/Samples~/CustomLighting/Components/AdditionalLights/AdditionalLightsSimple.shadersubgraph index 51c8737b85e..56e5d24268b 100644 --- a/Packages/com.unity.shadergraph/Samples~/CustomLighting/Components/AdditionalLights/AdditionalLightsSimple.shadersubgraph +++ b/Packages/com.unity.shadergraph/Samples~/CustomLighting/Components/AdditionalLights/AdditionalLightsSimple.shadersubgraph @@ -1152,7 +1152,7 @@ "m_FunctionName": "AddAdditionalLightsSimple", "m_FunctionSource": "3beadf505dbc54f4cae878435013d751", "m_FunctionSourceUsePragmas": true, - "m_FunctionBody": "Diffuse = MainDiffuse;\r\nSpecular = MainSpecular;\r\nColor = MainColor * (MainDiffuse + MainSpecular);\r\n\r\n#ifndef SHADERGRAPH_PREVIEW\r\n \r\n uint pixelLightCount = GetAdditionalLightsCount();\r\n\r\n#if USE_CLUSTER_LIGHT_LOOP\r\n // for Foward+ LIGHT_LOOP_BEGIN macro uses inputData.normalizedScreenSpaceUV and inputData.positionWS\r\n InputData inputData = (InputData)0;\r\r\n inputData.normalizedScreenSpaceUV = ScreenPosition;\r\n inputData.positionWS = WorldPosition;\r\n#endif\r\n\r\n LIGHT_LOOP_BEGIN(pixelLightCount)\r\n\t\t// Convert the pixel light index to the light data index\r\n\t\t#if !USE_CLUSTER_LIGHT_LOOP\r\n\t\t\tlightIndex = GetPerObjectLightIndex(lightIndex);\r\n\t\t#endif\r\n\t\t// Call the URP additional light algorithm. This will not calculate shadows, since we don't pass a shadow mask value\r\n\t\tLight light = GetAdditionalPerObjectLight(lightIndex, WorldPosition);\r\n\t\t// Manually set the shadow attenuation by calculating realtime shadows\r\n\t\tlight.shadowAttenuation = AdditionalLightRealtimeShadow(lightIndex, WorldPosition, light.direction);\r\n float NdotL = saturate(dot(WorldNormal, light.direction));\r\n float atten = light.distanceAttenuation * light.shadowAttenuation;\r\n float thisDiffuse = atten * NdotL;\r\n float3 halfAngle = normalize(light.direction + WorldView);\r\n float spec = pow(saturate(dot(halfAngle, WorldNormal)), SpecPower);\r\n float3 thisSpecular = spec * Reflectance * atten;\r\n Diffuse += thisDiffuse;\r\n Specular += thisSpecular;\r\n #if defined(_LIGHT_COOKIES)\r\n float3 cookieColor = SampleAdditionalLightCookie(lightIndex, WorldPosition);\r\n light.color *= cookieColor;\r\n #endif\r\n Color += light.color * (thisDiffuse + thisSpecular);\r\n LIGHT_LOOP_END\r\n float total = Diffuse + dot(Specular, float3(0.333, 0.333, 0.333));\r\n Color = total <= 0 ? MainColor : Color / total;\r\n#endif" + "m_FunctionBody": "Diffuse = MainDiffuse;\r\nSpecular = MainSpecular;\r\nColor = MainColor * (MainDiffuse + MainSpecular);\r\n\r\n#ifndef SHADERGRAPH_PREVIEW\r\n \r\n uint pixelLightCount = GetAdditionalLightsCount();\r\n\r\n#if USE_CLUSTER_LIGHT_LOOP\r\n // for Foward+ LIGHT_LOOP_BEGIN macro uses inputData.normalizedScreenSpaceUV and inputData.positionWS\r\n InputData inputData = (InputData)0;\r\n\r\n inputData.normalizedScreenSpaceUV = ScreenPosition;\r\n inputData.positionWS = WorldPosition;\r\n#endif\r\n\r\n LIGHT_LOOP_BEGIN(pixelLightCount)\r\n\t\t// Call the URP additional light algorithm. This will not calculate shadows, since we don't pass a shadow mask value\r\n\t\tLight light = GetAdditionalLight(lightIndex, WorldPosition);\r\n\t\t// Manually set the shadow attenuation by calculating realtime shadows\r\n\t\tlight.shadowAttenuation = AdditionalLightRealtimeShadow(lightIndex, WorldPosition, light.direction);\r\n float NdotL = saturate(dot(WorldNormal, light.direction));\r\n float atten = light.distanceAttenuation * light.shadowAttenuation;\r\n float thisDiffuse = atten * NdotL;\r\n float3 halfAngle = normalize(light.direction + WorldView);\r\n float spec = pow(saturate(dot(halfAngle, WorldNormal)), SpecPower);\r\n float3 thisSpecular = spec * Reflectance * atten;\r\n Diffuse += thisDiffuse;\r\n Specular += thisSpecular;\r\n #if defined(_LIGHT_COOKIES)\r\n float3 cookieColor = SampleAdditionalLightCookie(lightIndex, WorldPosition);\r\n light.color *= cookieColor;\r\n #endif\r\n Color += light.color * (thisDiffuse + thisSpecular);\r\n LIGHT_LOOP_END\r\n float total = Diffuse + dot(Specular, float3(0.333, 0.333, 0.333));\r\n Color = total <= 0 ? MainColor : Color / total;\r\n#endif" } { diff --git a/Packages/com.unity.shadergraph/Samples~/CustomLighting/Components/AdditionalLights/AdditionalLightsURP.shadersubgraph b/Packages/com.unity.shadergraph/Samples~/CustomLighting/Components/AdditionalLights/AdditionalLightsURP.shadersubgraph index e841c8ff87a..f96145ec574 100644 --- a/Packages/com.unity.shadergraph/Samples~/CustomLighting/Components/AdditionalLights/AdditionalLightsURP.shadersubgraph +++ b/Packages/com.unity.shadergraph/Samples~/CustomLighting/Components/AdditionalLights/AdditionalLightsURP.shadersubgraph @@ -1148,7 +1148,7 @@ "m_FunctionName": "AddAdditionalLights", "m_FunctionSource": "3beadf505dbc54f4cae878435013d751", "m_FunctionSourceUsePragmas": true, - "m_FunctionBody": "Diffuse = MainDiffuse;\r\nSpecular = MainSpecular;\r\nColor = MainColor * (MainDiffuse + MainSpecular);\r\n\r\n#ifndef SHADERGRAPH_PREVIEW\r\n \r\n uint pixelLightCount = GetAdditionalLightsCount();\r\n half Roughness = pow(1 - Smoothness, 2);\r\n half Roughness2 = Roughness * Roughness;\r\n\thalf Roughness2Minus1 = Roughness2 - 1;\r\n\thalf normalizationTerm = (Roughness * half(4.0) + half(2.0));\r\n\r\n#if USE_CLUSTER_LIGHT_LOOP\r\n // for Foward+ LIGHT_LOOP_BEGIN macro uses inputData.normalizedScreenSpaceUV and inputData.positionWS\r\n InputData inputData = (InputData)0;\r\r\n inputData.normalizedScreenSpaceUV = ScreenPosition;\r\n inputData.positionWS = WorldPosition;\r\n#endif\r\n\r\n LIGHT_LOOP_BEGIN(pixelLightCount)\r\n\t\t// Convert the pixel light index to the light data index\r\n\t\t#if !USE_CLUSTER_LIGHT_LOOP\r\n\t\t\tlightIndex = GetPerObjectLightIndex(lightIndex);\r\n\t\t#endif\r\n\t\t// Call the URP additional light algorithm. This will not calculate shadows, since we don't pass a shadow mask value\r\n\t\tLight light = GetAdditionalPerObjectLight(lightIndex, WorldPosition);\r\n\t\t// Manually set the shadow attenuation by calculating realtime shadows\r\n\t\tlight.shadowAttenuation = AdditionalLightRealtimeShadow(lightIndex, WorldPosition, light.direction);\r\n #if defined(_LIGHT_COOKIES)\r\n float3 cookieColor = SampleAdditionalLightCookie(lightIndex, WorldPosition);\r\n light.color *= cookieColor;\r\n #endif\r\n float NdotL = saturate(dot(WorldNormal, light.direction));\r\n float atten = light.distanceAttenuation * light.shadowAttenuation;\r\n float thisDiffuse = NdotL * atten;\r\n //DirectBRDFSpecular\r\n\r\n float3 lightDirectionWSFloat3 = float3(light.direction);\r\n float3 halfDir = SafeNormalize(lightDirectionWSFloat3 + float3(WorldView));\r\n float NoH = saturate(dot(float3(WorldNormal), halfDir));\r\n half LoH = half(saturate(dot(lightDirectionWSFloat3, halfDir)));\r\n float d = NoH * NoH * Roughness2Minus1 + 1.00001f;\r\n half LoH2 = LoH * LoH;\r\n half spec = Roughness2 / ((d * d) * max(0.1h, LoH2) * normalizationTerm);\r\n #if REAL_IS_HALF\r\n spec = spec - HALF_MIN;\r\n spec = clamp(spec, 0.0, 1000.0);\r\n #endif\t\t\r\n float3 thisSpecular = spec * Reflectance * NdotL * atten;\r\n\r\n Diffuse += thisDiffuse;\r\n Specular += thisSpecular;\r\n\r\n Color += light.color * (thisDiffuse + thisSpecular);\r\n LIGHT_LOOP_END\r\n float total = Diffuse + dot(Specular, float3(0.333, 0.333, 0.333));\r\n Color = total <= 0 ? MainColor : Color / total;\r\n#endif" + "m_FunctionBody": "Diffuse = MainDiffuse;\r\nSpecular = MainSpecular;\r\nColor = MainColor * (MainDiffuse + MainSpecular);\r\n\r\n#ifndef SHADERGRAPH_PREVIEW\r\n \r\n uint pixelLightCount = GetAdditionalLightsCount();\r\n half Roughness = pow(1 - Smoothness, 2);\r\n half Roughness2 = Roughness * Roughness;\r\n\thalf Roughness2Minus1 = Roughness2 - 1;\r\n\thalf normalizationTerm = (Roughness * half(4.0) + half(2.0));\r\n\r\n#if USE_CLUSTER_LIGHT_LOOP\r\n // for Foward+ LIGHT_LOOP_BEGIN macro uses inputData.normalizedScreenSpaceUV and inputData.positionWS\r\n InputData inputData = (InputData)0;\r\n\r\n inputData.normalizedScreenSpaceUV = ScreenPosition;\r\n inputData.positionWS = WorldPosition;\r\n#endif\r\n\r\n LIGHT_LOOP_BEGIN(pixelLightCount)\r\n\t\t// Call the URP additional light algorithm. This will not calculate shadows, since we don't pass a shadow mask value\r\n\t\tLight light = GetAdditionalLight(lightIndex, WorldPosition);\r\n\t\t// Manually set the shadow attenuation by calculating realtime shadows\r\n\t\tlight.shadowAttenuation = AdditionalLightRealtimeShadow(lightIndex, WorldPosition, light.direction);\r\n #if defined(_LIGHT_COOKIES)\r\n float3 cookieColor = SampleAdditionalLightCookie(lightIndex, WorldPosition);\r\n light.color *= cookieColor;\r\n #endif\r\n float NdotL = saturate(dot(WorldNormal, light.direction));\r\n float atten = light.distanceAttenuation * light.shadowAttenuation;\r\n float thisDiffuse = NdotL * atten;\r\n //DirectBRDFSpecular\r\n\r\n float3 lightDirectionWSFloat3 = float3(light.direction);\r\n float3 halfDir = SafeNormalize(lightDirectionWSFloat3 + float3(WorldView));\r\n float NoH = saturate(dot(float3(WorldNormal), halfDir));\r\n half LoH = half(saturate(dot(lightDirectionWSFloat3, halfDir)));\r\n float d = NoH * NoH * Roughness2Minus1 + 1.00001f;\r\n half LoH2 = LoH * LoH;\r\n half spec = Roughness2 / ((d * d) * max(0.1h, LoH2) * normalizationTerm);\r\n #if REAL_IS_HALF\r\n spec = spec - HALF_MIN;\r\n spec = clamp(spec, 0.0, 1000.0);\r\n #endif\t\t\r\n float3 thisSpecular = spec * Reflectance * NdotL * atten;\r\n\r\n Diffuse += thisDiffuse;\r\n Specular += thisSpecular;\r\n\r\n Color += light.color * (thisDiffuse + thisSpecular);\r\n LIGHT_LOOP_END\r\n float total = Diffuse + dot(Specular, float3(0.333, 0.333, 0.333));\r\n Color = total <= 0 ? MainColor : Color / total;\r\n#endif" } { diff --git a/Packages/com.unity.shadergraph/Tests/Editor/UnitTests/MessageManagerTests.cs b/Packages/com.unity.shadergraph/Tests/Editor/UnitTests/MessageManagerTests.cs index 98d777c11a5..9ff8a425902 100644 --- a/Packages/com.unity.shadergraph/Tests/Editor/UnitTests/MessageManagerTests.cs +++ b/Packages/com.unity.shadergraph/Tests/Editor/UnitTests/MessageManagerTests.cs @@ -320,27 +320,6 @@ public void ReportAnyErrors_EmptyManager_ErrorOneProvider() var ret = m_EmptyMgr.HasSeverity(); Assert.IsTrue(ret); } - - [Test] - public void ClearNodesFromOtherProvider() - { - m_ComplexMgr.ClearNodeFromOtherProvider(p0, new List { node1 }); - - // Verify node1 is still in provider0 - Assert.IsTrue(m_ComplexMgr.Messages.ContainsKey(p0)); - Assert.IsTrue(m_ComplexMgr.Messages[p0].ContainsKey(node1.objectId)); - Assert.AreEqual(1, m_ComplexMgr.Messages[p0][node1.objectId].Count); - Assert.AreEqual(e2, m_ComplexMgr.Messages[p0][node1.objectId][0]); - - // Verify node1 is cleared from provider1 - Assert.IsTrue(m_ComplexMgr.Messages.ContainsKey(p1)); - Assert.IsTrue(m_ComplexMgr.Messages[p1].ContainsKey(node1.objectId)); - Assert.AreEqual(0, m_ComplexMgr.Messages[p1][node1.objectId].Count); - - // Verify other nodes in provider1 are unchanged - Assert.AreEqual(1, m_ComplexMgr.Messages[p1][node0.objectId].Count); - Assert.AreEqual(1, m_ComplexMgr.Messages[p1][node2.objectId].Count); - } } } diff --git a/Packages/com.unity.visualeffectgraph/Documentation~/GettingStarted.md b/Packages/com.unity.visualeffectgraph/Documentation~/GettingStarted.md index 15a676d1793..278d737d09b 100644 --- a/Packages/com.unity.visualeffectgraph/Documentation~/GettingStarted.md +++ b/Packages/com.unity.visualeffectgraph/Documentation~/GettingStarted.md @@ -11,12 +11,12 @@ For information on system requirements for the Visual Effect Graph, see [Require To install the Visual Effect Graph package: 1. Open a Unity project. -1. Open the **Package Manager** window (**Window > Package Manager**). +1. Open the **Package Manager** window (**Window** > **Package Manager**). 1. In the Package Manager window, in the **Packages** field, select **Unity Registry**. 1. Select **Visual Effect Graph** from the list of packages. 1. In the bottom right corner of the Package Manager window, select **Install**. Unity installs Visual Effect Graph into your Project. -__Note:__ When using [HDRP](https://docs.unity3d.com/Packages/com.unity.render-pipelines.high-definition@17.0/manual/index.html), VFX Graph is included with [HDRP Package](https://docs.unity3d.com/Packages/com.unity.render-pipelines.high-definition@17.0/manual/install-hdrp.html#install-the-high-definition-rp-package) and doesn't need to be manually installed +**Note:** When using [HDRP](https://docs.unity3d.com/Packages/com.unity.render-pipelines.high-definition@17.0/manual/index.html), VFX Graph is included with [HDRP Package](https://docs.unity3d.com/Packages/com.unity.render-pipelines.high-definition@17.0/manual/install-hdrp.html#install-the-high-definition-rp-package) and doesn't need to be manually installed ### Using the correct version of Visual Effect Graph Every Visual Effect Graph package works with a Scriptable Render Pipeline package of the same version. If you want to upgrade the Visual Effect Graph package, you must also upgrade the render pipeline package that you’re using. @@ -25,28 +25,28 @@ For example, the Visual Effect Graph package version 6.5.3-preview in Package Ma version 6.5.3-preview. ## Creating Visual Effect Graphs -To use Visual Effect Graph, you must first create a [Visual Effect Graph Asset](VisualEffectGraphAsset.md) . -To create a Visual Effect Graph Asset: +To use Visual Effect Graph, you must first create a [Visual Effect Graph asset](VisualEffectGraphAsset.md). -1. In Unity, click __Assets__ > __Create__ > __Visual Effects__ > __Visual Effect Graph__. -1. Select a Template as a starting point for your new visual effect. -1. Click the Create button in the bottom right corner. +### From the Editor menu -It is also possible to create a Visual Effect Graph Asset from a [GameObject](https://docs.unity3d.com/Manual/class-GameObject.html) in the scene: -1. Select the GameObject and add a [Visual Effect Component](VisualEffectComponent.md). -1. Click on the "New" button next to the Asset Template field. -1. Select a Template as a starting point for your new visual effect and click the create button. +1. From the main menu, select **Assets** > **Create** > **Visual Effects** > **Visual Effect Graph**. +1. Select a template as a starting point for your new visual effect. +1. Select **Create**. -Finally, you can create a new Visual Effect Asset from the Visual Effect Graph window. -1. Open the Visual Effect Graph window in __Window__ > __Visual Effect__ > __Visual Effect Graph__. -1. Click on the __Create New Visual Effect Graph__ button. -1. Select a Template as a starting point for your new visual effect and click the create button. +### From a GameObject -To make a copy of a Visual Effect Graph Asset: +1. Select the GameObject and add a [Visual Effect component](VisualEffectComponent.md). +1. Next to the **Asset Template** property, Select **New**. +1. Select a template as a starting point for your new visual effect. +1. Select **Create**. -1. In the Project window, select the Visual Effect Asset you want to make a copy of. -2. In the top navigation bar, select __Edit__ > __Duplicate__. You’ve now created a copy. +### From the Visual Effect Graph window + +1. From the main menu, select **Window** > **Visual Effect** > **Visual Effect Graph** to open the Visual Effect Graph window. +1. Select the **Create New Visual Effect Graph** button. +1. Select a template as a starting point for your new visual effect. +1. Select **Create**. ## Using Visual Effect Graphs in Scenes @@ -62,9 +62,9 @@ If you created a Visual Effect Asset directly from the Visual Effect Component i ## Editing a Visual Effect Graph To edit Visual Effect Graph Assets in the [Visual Effect Graph window](VisualEffectGraphWindow.md) : -* Open the Visual Effect Graph window (menu: __Window__ > __Visual Effects__) with an empty graph. This prompts you to open a Visual Effect Graph Asset. -* Select an existing Visual Effect Graph Asset, and click the __Edit__ button in the Inspector. This opens the Visual Effect Graph window with the graph contained in this Asset. -* Select the Visual Effect component (menu: next to the Asset template, click __Edit__). This opens the Visual Effect Graph window and with the graph contained in the referenced Asset. +* Open the Visual Effect Graph window (menu: **Window** > **Visual Effects**) with an empty graph. This prompts you to open a Visual Effect Graph Asset. +* Select an existing Visual Effect Graph Asset, and click the **Edit** button in the Inspector. This opens the Visual Effect Graph window with the graph contained in this Asset. +* Select the Visual Effect component (menu: next to the Asset template, click **Edit**). This opens the Visual Effect Graph window and with the graph contained in the referenced Asset. ## Previewing a graph’s effect To preview an effect, you can: @@ -82,8 +82,8 @@ When you attach a Visual Effect in your scene to the current graph, you can use This also allows Unity to display the correct gizmos in the scene, which makes some aspects of your effect easier to edit. To attach a Visual Effect to the opened graph, you can either select the GameObject in the hierarchy, or follow these steps: -1. In the matching graph, open the __Auto Attach Panel__ from the [Toolbar](VisualEffectGraphWindow.md#Toolbar). -1. Click on the **Select a target** field to select a compatible GameObject that exists in the current open scene. +1. In the matching graph, open the **Auto Attach Panel** from the [Toolbar](VisualEffectGraphWindow.md#Toolbar). +1. Click on the**Select a target** field to select a compatible GameObject that exists in the current open scene. ## Manipulating graph elements When you open an Asset inside the Visual Effect Graph window, you can see and edit the graph for that specific Asset. @@ -95,9 +95,15 @@ When you link several Blocks together, these form a Context. For more informatio Every change you make to a graph has immediate consequences on the behavior of your effect, and you can preview the changes in real time. Every time you add, remove, or connect a Node, the graph recompiles all the elements that have changed, and restarts the effect. However, changing values (for example, editing a curve) does not make Unity recompile anything and affects the simulation in real time. To add Nodes, you can either: -* Right-click in the graph, and select __Create Node__. +* Right-click in the graph, and select **Create Node**. * Press the spacebar on your keyboard. * Click and drag an edge from an existing port, and release the click in an empty space. * Drag and drop an element from the [Blackboard](Blackboard.md) into the graph. -When you do any of the above actions, the __Create Node__ menu appears. Here, you can see the Nodes, Blocks, and Contexts that are compatible with that specific location in the graph. +When you do any of the above actions, the **Create Node** menu appears. Here, you can see the Nodes, Blocks, and Contexts that are compatible with that specific location in the graph. + +## Additional resources + +* [VFX Graph template window reference](Templates-window.md) +* [Visual Effect Graph window reference](VisualEffectGraphWindow.md) +* [Visual Effect Graph Asset reference](VisualEffectGraphAsset.md) diff --git a/Packages/com.unity.visualeffectgraph/Documentation~/Images/templates-window.png b/Packages/com.unity.visualeffectgraph/Documentation~/Images/templates-window.png index 94486cf3b9b..d2e76479109 100644 Binary files a/Packages/com.unity.visualeffectgraph/Documentation~/Images/templates-window.png and b/Packages/com.unity.visualeffectgraph/Documentation~/Images/templates-window.png differ diff --git a/Packages/com.unity.visualeffectgraph/Documentation~/Templates-window.md b/Packages/com.unity.visualeffectgraph/Documentation~/Templates-window.md index 8fe60e26d8c..23961c9deb4 100644 --- a/Packages/com.unity.visualeffectgraph/Documentation~/Templates-window.md +++ b/Packages/com.unity.visualeffectgraph/Documentation~/Templates-window.md @@ -1,41 +1,68 @@ -# VFX Graph Templates window -Use the template window to create a VFX Graph asset with a predefined effect. You can use these templates as a starting point for your own effects. -Each template has a description and an image to describe its behavior. -This window is displayed when you create a new Visual Effect Graph from the `Create` context menu in the project browser. +# VFX Graph template window -![Template-Window](Images/templates-window.png) +The VFX Graph template window allows you to create a new VFX graph from an existing template with a predefined effect. You can use these templates as a starting point for your own effects. + +There are multiple ways to [access the VFX Graph template window](#access-the-vfx-graph-template-window). + +**Note**: The template browser displays only templates that are compatible with the current project. + +![The template window](Images/templates-window.png) + +| Label | Name | Description | +| :--- | :--- | :--- | +| **A** | Template list | Lists all the available templates you can select and start from to create a new VFX graph.

**Note**: the **Install Learning Templates** button imports the [Learning Templates sample](sample-learningTemplates.md) from the Visual Effect Graph package. | +| **B** | Template details | Displays a picture and description of the selected template. | +| **C** | Search and filtering tool | Filters the template list using the [Unity Search](https://docs.unity3d.com/Manual/search-overview.html) functionality. Type text to search templates by name or select **Add** (+) to filter templates based on specific characteristics.
In addition to some of the default Unity Search options, Visual Effect Graph allows you to filter the list by template grouping using **Category**. | +| **D** | Sorting tool | Sorts the templates within their respective categories. The categories remain listed in alphabetical order. The options are:
  • **Sort By Name**: Lists templates in alphabetical order.
  • **Sort By Order**: Lists templates in VFX Graph's default order.
  • **Sort By Modification Date**: Lists the last modified templates first.
  • **Sort By Last Used**: Lists the last used templates first.
  • **Sort By Favorite**: Lists templates marked as favorites first.
**Note**: To mark a template as a favorite, hover over the template in the list and select the gray star that appears. To remove a template as a favorite, select the star again. | +| **E** | **Cancel** | Closes the window and cancels the VFX graph asset creation. | +| **F** | **Create** | Creates a new VFX graph asset based on the selected template. | + +## Access the VFX Graph template window + +### From the Project window + +1. Right-click in your Project window. + +1. Select **Create** > **Visual Effects** > **Visual Effect Graph**. + +### From the VFX Graph editor toolbar -## Open from VFX Graph editor toolbar ![toolbar](Images/templates-window-toolbar.png) -You can open the templates window from the **Add** (+) button in the VFX Graph editor toolbar. This button includes a drop-down to either `insert` a template in the current graph, or `create` a new asset file from a template. +1. In the VFX Graph window's toolbar, select the drop-down arrow besides the **Add** (+) button. -When you insert a template, Unity places it at the center of the screen. +1. Select one of the available options: + * **Create from template** to create a new asset file from a template. + * **Insert template** to insert a template in the current graph. + +Once you complete a template insertion, Unity places it at the center of the VFX Graph window's workspace. > [!TIP] -> If you hold the `CTRL` key while you click on the **Add** (+) button, the templates window opens to create a new VFX asset. +> To create a new VFX graph asset, you can also hold the `CTRL` key while you directly select **Add** (+) in the toolbar. + +### From the VFX Graph workspace + +1. Right-click in the VFX Graph window's workspace. + +1. Select **Insert template**. + +Once you complete the VFX graph asset creation, Unity inserts the template at the right-click position. + +## Create a custom VFX graph template + +You can create your own VFX graph templates to have them available in the template browser. + +To create a custom VFX graph template, follow these steps: -## Open from the VFX Graph context menu -When you right click in the VFX Graph editor window, the context menu shows the **Insert template** option, which inserts the template at the mouse position. +1. In the **Project** window, select the VFX graph asset you want to use as a template. -## Search and filter templates -The template window includes a search field to filter templates according to various criteria. -The search field works the same way as with the main [Unity search window](https://docs.unity3d.com/Manual/search-window-reference.html), although there are fewer filters available. +1. In the **Inspector** window, select **Use as Template**. -## Sort templates -Next to the search field, a dropdown allows to sort templates by: -- Name -- Order -- Modification date -- Last used -- Favorite +1. Expand the **Template** section. -> [!NOTE] -> The sorting is applied inside each category, but categories are always sorted by name. +1. Optional: Set the metadata that describes the template in the template browser: **Name**, **Category**, **Description**, **Icon**, and **Thumbnail**. -## VFX Graph template Editor +## Additional resources -When you select a [Visual Effect Asset](VisualEffectGraphAsset.md) in the Project window, the inspector displays a section dedicated to template description. -To make a Visual Effect asset become a template, activate the **Use as Template** option. -By default the template name is the asset name, but you can override it by editing the `Name` field. -The description, icon and thumbnail fields are optional, but can help to understand the purpose of the template when browsing in the template window. +* [Create a new VFX graph](GettingStarted.md) +* [Visual Effect Graph Asset reference](VisualEffectGraphAsset.md) diff --git a/Packages/com.unity.visualeffectgraph/Documentation~/VisualEffectGraphAsset.md b/Packages/com.unity.visualeffectgraph/Documentation~/VisualEffectGraphAsset.md index 37df784b2a3..91d0c533009 100644 --- a/Packages/com.unity.visualeffectgraph/Documentation~/VisualEffectGraphAsset.md +++ b/Packages/com.unity.visualeffectgraph/Documentation~/VisualEffectGraphAsset.md @@ -25,23 +25,23 @@ With the Visual Effect Graph open, you can now edit the Visual Effect. When you select a Visual Effect Graph Asset, the Inspector displays Asset-wide configuration Options. -| Property Name | Description / Values | -|--------------------------|--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| **Fixed Delta Time** | Indicates whether to update the visual effect at the rate that the **Fixed Time Step** property defines in the [Visual Effect Project Settings](VisualEffectProjectSettings.md). | -| **Exact Fixed Time** | Indicates whether to update the visual effect multiple times per frame if the frame time is longer than the **Fixed Time Step**. For example, if a frame takes 10ms and **Fixed Time Step** is 5 ms, the effect updates twice with a 5ms deltaTime instead of once with a 10ms deltaTime. This is a resource intensive operation so only use it in high-end scenarios.
This property is only relevant if you enable **Fixed Delta Time**. | -| **Ignore Time Scale** | Indicates whether to ignore the [timeScale](https://docs.unity3d.com/ScriptReference/Time-timeScale.html) when Unity simulates the effect. Enable this property if you want the visual effect to play at normal speed when the time scale is not equal to `1`. | -| **Culling Flags** | Sets whether Unity updates the visual effect depending on its culling state. The culling state refers to whether a Camera can see the visual effect's bounding box or not. The options are:
• **Recompute bounds and simulate when visible**: Unity simulates the effect and recalculates the effect's bounding box when the effect is visible. If your visual effect uses a dynamic bounding box (one that you compute with Operators), do not use this option and instead use one that includes **Always Recompute Bounds** .
• **Always Recompute Bounds, simulate only when Visible**: Regardless of whether any Camera can see the effect's bounding box or not, Unity always recalculates the bounding box. Unity only simulates the effect if a Camera can see the updated bounds.
• **Always Recompute Bounds and Simulate**: Regardless of whether any Camera can see the effect's bounding box or not, Unity always recalculates the bounding box and simulates the effect.

**Note**: Regardless of the mode, Unity always uses the bounding box to perform culling of the effect. | -| **Instancing mode** | Selects how Unity computes the batch capacity for the [Instancing](Instancing.md) feature. You can choose from **Disabled**, **Automatic batch capacity** and **Custom batch capacity**. | -| **Max Batch Capacity** | Sets the maximum number of instances that a batch can hold. The actual capacity may be reduced by other factors, such as the memory used by the effect. Only available when **Instancing mode** is set to **Custom batch capacity**. | -| **PreWarm Total Time** | Sets the duration, in seconds, that Unity simulates the effect for when `Reset()` occurs. This pre-simulates the effect so that, when the effect starts, it appears already 'built-up'. When you change this value, Unity calculates a new value for **PreWarm Delta Time**. | -| **PreWarm Step Count** | Sets the number of simulation steps that Unity uses to calculate the PreWarm. A greater number of steps increase precision as well as the resource intensity of the effect, which decreases performance. When you change this value, Unity calculates a new value for **PreWarm Delta Time**. | -| **PreWarm Delta Time** | Sets the delta time, in seconds, that Unity uses for the PreWarm. When you change this value, Unity calculates new values for **PreWarm Total Time** and **PreWarm Step Count**. Adjust this value, instead of **PreWarm Total Time** and **PreWarm Step Count** individually, if you need to use a precise delta time for your simulation. | -| **Initial Event Name** | Sets the name of the [Event](Events.md) that Unity sends when the effect enables. The default value is **OnPlay**, but you can change this to another name, or even a blank field, to make it so that every system does not spawn by default. | -| **Output Render Order** | Defines a list that shows every Output Context in their rendering order. You can re-order this list to change the order that Unity renders the Output Contexts. Unity draws items at the top of the list first then progressively draws those lower down the list in front of those above. | -| **Shaders** | Defines a list of every Shader that Unity has compiled for the Visual Effect Graph. These are read-only and mainly for debugging purposes. Use **Shader Externalization** in [Visual Effect Preferences](VisualEffectPreferences.md) to externalize Shaders temporarily for debugging purposes. | -| ** Use as Template** | Marks the asset as a template and displays it in the Templates window. If you activate this option, you must at least fill in the **Template Name** property. | -| **Template Name** | The display name of the template in the Templates window. | -| **Template Category** | The category in which to display the template in the Templates window. | -| **Template Description** | The description of the template, displayed in the detail panel of the Templates window. | -| **Template Icon** | An optional icon to display next to the template name in the Templates window. | -| **Template Thumbnail** | An optional image to showcase the template's VFX in the detail panel of the Templates window. | +| Property | Description | +| :--- | :--- | +| **Fixed Delta Time** | Indicates whether to update the visual effect at the rate that the **Fixed Time Step** property defines in the [Visual Effect Project Settings](VisualEffectProjectSettings.md). | +| **Exact Fixed Time** | Indicates whether to update the visual effect multiple times per frame if the frame time is longer than the **Fixed Time Step**. For example, if a frame takes 10ms and **Fixed Time Step** is 5 ms, the effect updates twice with a 5ms deltaTime instead of once with a 10ms deltaTime. This is a resource intensive operation so only use it in high-end scenarios.
This property is only relevant if you enable **Fixed Delta Time**. | +| **Ignore Time Scale** | Indicates whether to ignore the [timeScale](https://docs.unity3d.com/ScriptReference/Time-timeScale.html) when Unity simulates the effect. Enable this property if you want the visual effect to play at normal speed when the time scale is not equal to `1`. | +| **Culling Flags** | Sets whether Unity updates the visual effect depending on its culling state. The culling state refers to whether a Camera can see the visual effect's bounding box or not. The options are:
  • **Recompute bounds and simulate when visible**: Unity simulates the effect and recalculates the effect's bounding box when the effect is visible. If your visual effect uses a dynamic bounding box (one that you compute with Operators), do not use this option and instead use one that includes **Always Recompute Bounds** .
  • **Always Recompute Bounds, simulate only when Visible**: Regardless of whether any Camera can see the effect's bounding box or not, Unity always recalculates the bounding box. Unity only simulates the effect if a Camera can see the updated bounds.
  • **Always Recompute Bounds and Simulate**: Regardless of whether any Camera can see the effect's bounding box or not, Unity always recalculates the bounding box and simulates the effect.
**Note**: Regardless of the mode, Unity always uses the bounding box to perform culling of the effect. | +| **Instancing mode** | Selects how Unity computes the batch capacity for the [Instancing](Instancing.md) feature. You can choose from **Disabled**, **Automatic batch capacity** and **Custom batch capacity**. | +| **Max Batch Capacity** | Sets the maximum number of instances that a batch can hold. The actual capacity may be reduced by other factors, such as the memory used by the effect. Only available when **Instancing mode** is set to **Custom batch capacity**. | +| **PreWarm Total Time** | Sets the duration, in seconds, that Unity simulates the effect for when `Reset()` occurs. This pre-simulates the effect so that, when the effect starts, it appears already 'built-up'. When you change this value, Unity calculates a new value for **PreWarm Delta Time**. | +| **PreWarm Step Count** | Sets the number of simulation steps that Unity uses to calculate the PreWarm. A greater number of steps increase precision as well as the resource intensity of the effect, which decreases performance. When you change this value, Unity calculates a new value for **PreWarm Delta Time**. | +| **PreWarm Delta Time** | Sets the delta time, in seconds, that Unity uses for the PreWarm. When you change this value, Unity calculates new values for **PreWarm Total Time** and **PreWarm Step Count**. Adjust this value, instead of **PreWarm Total Time** and **PreWarm Step Count** individually, if you need to use a precise delta time for your simulation. | +| **Initial Event Name** | Sets the name of the [Event](Events.md) that Unity sends when the effect enables. The default value is **OnPlay**, but you can change this to another name, or even a blank field, to make it so that every system does not spawn by default. | +| **Output Render Order** | Defines a list that shows every Output Context in their rendering order. You can re-order this list to change the order that Unity renders the Output Contexts. Unity draws items at the top of the list first then progressively draws those lower down the list in front of those above. | +| **Shaders** | Defines a list of every Shader that Unity has compiled for the Visual Effect Graph. These are read-only and mainly for debugging purposes. Use **Shader Externalization** in [Visual Effect Preferences](VisualEffectPreferences.md) to externalize Shaders temporarily for debugging purposes. | +| **Use as Template** | Marks the asset as a template and displays it in the Templates window. If you activate this option, you must at least fill in the **Template Name** property. | +| **Template Name** | The display name of the template in the Templates window. | +| **Template Category** | The category in which to display the template in the Templates window. | +| **Template Description** | The description of the template, displayed in the detail panel of the Templates window. | +| **Template Icon** | An optional icon to display next to the template name in the Templates window. | +| **Template Thumbnail** | An optional image to showcase the template's VFX in the detail panel of the Templates window. | diff --git a/Tests/SRPTests/Projects/HDRP_RuntimeTests/Assets/Tests/HDRP_Runtime_Graphics_Tests.cs b/Tests/SRPTests/Projects/HDRP_RuntimeTests/Assets/Tests/HDRP_Runtime_Graphics_Tests.cs index a8e906fa010..45151e1bc7a 100644 --- a/Tests/SRPTests/Projects/HDRP_RuntimeTests/Assets/Tests/HDRP_Runtime_Graphics_Tests.cs +++ b/Tests/SRPTests/Projects/HDRP_RuntimeTests/Assets/Tests/HDRP_Runtime_Graphics_Tests.cs @@ -62,13 +62,7 @@ public void SetDefaultResolution() [IgnoreGraphicsTest( "003-VirtualTexturing$", "https://jira.unity3d.com/browse/UUM-131182 Both Switches fail on MultiThreaded (pass on Native Jobs)", - runtimePlatforms: new RuntimePlatform[] { RuntimePlatform.Switch, RuntimePlatform.Switch2, RuntimePlatform.PS4 }, // Also unstable on PS4: https://jira.unity3d.com/browse/UUM-135501 - renderingThreadingModes: new RenderingThreadingMode[] { RenderingThreadingMode.MultiThreaded } - )] - [IgnoreGraphicsTest( - "003-VirtualTexturing-Forward$", - "https://jira.unity3d.com/browse/UUM-131182 Switch fails on MultiThreaded (pass on Native Jobs)", - runtimePlatforms: new RuntimePlatform[] { RuntimePlatform.Switch }, + runtimePlatforms: new RuntimePlatform[] { RuntimePlatform.PS4 }, // Also unstable on PS4: https://jira.unity3d.com/browse/UUM-135501 renderingThreadingModes: new RenderingThreadingMode[] { RenderingThreadingMode.MultiThreaded } )] [IgnoreGraphicsTest(