Getting Started

Introduction

Installation

Project Configuration

Testing in Your Sandbox

Samples & Examples

Core Concepts & Glossary

Package Types

Spaces & Space Templates

Custom Avatars

Custom Avatar Animations

Avatar Attachments

Custom Prefab Objects

Embedded Packages

Drivable Vehicles

Scene Setup

Testing In Unity vs Sandbox

Controlling the Camera

Custom Collision, Layers, and Tags

Audio Mixers and Groups

Key Differences from Standard Unity3D development

Economy

Quests and Rewards

Economy Overview

Monetization

Items

Consumable Items

Rewarding Items

World Currency

Selling Items

Scripting

Components

Entrance Point

Camera Passthrough

Interactable

Trigger Event

Point Of Interest

Environment Settings Overrides

Render Pipeline Settings Overrides

Movement Materials

Climbable

Avatar Teleporter

Empty Frame

Projector Surface

Seat Hotspot

Guidelines

Supported Features and Limitations

Performance Guidelines

Lighting

Publishing to Spatial

Finding Published Packages

Support

FAQs

Help and Support

Release Notes

Asset Import Settings

Fixing Broken Shaders on Mobile & VR

Before

Before

After

After

Have you experienced shaders looking ugly on mobile and the Meta Quest? If your shader uses _CameraOpaqueTexture or _CameraDepthTexture, they will look broken on mobile and Quest as it's only available on web.

We added global shader keywords _USE_CAMERA_COLOR_TEXTURE and _USE_CAMERA_DEPTH_TEXTURE so you can make your shader variants across those platforms.

Click here to download example shaders as reference.

Screenshot 2023-01-27 at 8.54.24 AM.jpg

Shader "Examples/CameraTextureExample"
{
    Properties
    {
        _BaseColor ("Base color", Color) = (0.6, 0.6, 0.6, 1)
        _BaseMap ("Texture", 2D) = "white" {}
    }
    SubShader
    {
        Tags 
        { 
            "RenderType" = "Opaque" 
            "Queue" = "Transparent" // Render Queue must be set to later than "2500" for the camera texture to be visible
            "RenderPipeline" = "UniversalPipeline"
        }
        Blend SrcAlpha OneMinusSrcAlpha

        Pass
        {
            HLSLPROGRAM
            #pragma vertex vert
            #pragma fragment frag
            #pragma multi_compile_fog

            // Global Keywords that Spatial provides
            #pragma multi_compile _ _USE_CAMERA_COLOR_TEXTURE
            #pragma multi_compile _ _USE_CAMERA_DEPTH_TEXTURE

            #include "Packages/com.unity.render-pipelines.universal/ShaderLibrary/Core.hlsl"

            struct Attributes
            {
                float4 positionOS : POSITION;
                float3 normalOS : NORMAL;
                float2 uv : TEXCOORD0;
                UNITY_VERTEX_INPUT_INSTANCE_ID
            };

            struct Varying
            {
                float4 positionCS : SV_POSITION;
                float2 uv : TEXCOORD0;
                #if defined(_USE_CAMERA_COLOR_TEXTURE) || defined(_USE_CAMERA_DEPTH_TEXTURE)
                    float4 screenPos : TEXCOORD2;
                #endif
                float fogCoord : TEXCOORD1;
                UNITY_VERTEX_OUTPUT_STEREO
            };

            TEXTURE2D(_BaseMap);
            SAMPLER(sampler_BaseMap);

            #if defined(_USE_CAMERA_COLOR_TEXTURE)
                TEXTURE2D(_CameraOpaqueTexture);
                SAMPLER(sampler_CameraOpaqueTexture);
            #endif

            #if defined(_USE_CAMERA_DEPTH_TEXTURE)
                TEXTURE2D(_CameraDepthTexture);
                SAMPLER(sampler_CameraDepthTexture);
            #endif

            CBUFFER_START(UnityPerMaterial)
                float4 _BaseMap_ST;
                half4 _BaseColor;
            CBUFFER_END

            Varying vert (Attributes IN)
            {
                UNITY_SETUP_INSTANCE_ID(IN);
                Varying OUT = (Varying)0;
                UNITY_TRANSFER_INSTANCE_ID(IN, OUT);
                UNITY_INITIALIZE_VERTEX_OUTPUT_STEREO(OUT);

                VertexPositionInputs vertexInput = GetVertexPositionInputs(IN.positionOS.xyz);
                OUT.positionCS = vertexInput.positionCS;
                #if defined(_USE_CAMERA_COLOR_TEXTURE) || defined(_USE_CAMERA_DEPTH_TEXTURE)
                    OUT.screenPos = vertexInput.positionNDC;
                #endif

                OUT.uv = TRANSFORM_TEX(IN.uv, _BaseMap);

                OUT.fogCoord = ComputeFogFactor(OUT.positionCS.z);

                return OUT;
            }

            half4 frag (Varying IN) : SV_Target
            {
                half4 color = SAMPLE_TEXTURE2D(_BaseMap, sampler_BaseMap, IN.uv) * _BaseColor;
                
                #if defined(_USE_CAMERA_COLOR_TEXTURE)
                    float4 cameraColor = SAMPLE_TEXTURE2D(_CameraOpaqueTexture, sampler_CameraOpaqueTexture, IN.screenPos.xy / IN.screenPos.w);
                    color.rgb *= cameraColor.rgb;
                #endif

                #if defined(_USE_CAMERA_DEPTH_TEXTURE)
                    float4 cameraDepth = LinearEyeDepth(SAMPLE_TEXTURE2D(_CameraDepthTexture, sampler_CameraDepthTexture, IN.screenPos.xy / IN.screenPos.w), _ZBufferParams) * 0.005;
                    color.a *= 1 - saturate(cameraDepth.r);
                #endif

                color.rgb = MixFog(color.rgb, IN.fogCoord);

                return color;
            }
            ENDHLSL
        }
    }
}