Surface shader + Instancing -> Invalid subscript 'texcoord' in generated code

So I’m writing a really simple surface shader to be used with indirect instancing. All this shader does is drawing a textured quad. I use a ComputeBuffer to provide per-instance data and then I just pass uv_MainTex in the Input structure to access the uv coords.

So everything compiles ok until i write the vertex shader where I access the per-instance compute buffer, then I get "invalid subscript ‘texcoord’ at line 97, 108 and 153 (lines that don’t exist in my shader).

Can anyone tell me what I’m doing wrong?
Does the “automatic” uv_MainTex get “deactivated” and I should pass the uvs manually?

Thanks

Shader "Custom/instanced_indirect_tex"
{
    Properties
    {
        _Color ("Color", Color) = (1,1,1,1)
        _MainTex ("Albedo (RGB)", 2D) = "white" {}
        _Glossiness ("Smoothness", Range(0,1)) = 0.5
        _Metallic ("Metallic", Range(0,1)) = 0.0
    }
    SubShader
    {
        Tags { "RenderType"="Opaque" }
        LOD 200

        CGPROGRAM
        //#pragma vertex vert
        #pragma surface surf Standard fullforwardshadows vertex:pre_vert
        //#pragma target 5

        #include "UnityCG.cginc"

        sampler2D _MainTex;

        struct VertexShaderInput
        {
            float4 vertex     : POSITION;
            float3 normal     : NORMAL;
            uint   instanceId : SV_InstanceID;
        };
        
        // Surface shader input struct
        struct Input
        {
            float2 uv_MainTex;
        };

        half _Glossiness;
        half _Metallic;
        fixed4 _Color;


        struct MeshProperties
        {
            float4x4 mat;
            float4 color;
        };

        #ifdef SHADER_API_D3D11 
        StructuredBuffer<MeshProperties> _Properties;
        #endif

        void pre_vert( inout VertexShaderInput v, out Input o )
        {
            UNITY_INITIALIZE_OUTPUT( Input, o );
        #ifdef SHADER_API_D3D11
            v.vertex = mul( _Properties[ v.instanceId ].mat, v.vertex );
            //o.colorin = _Properties[ v.instanceId ].color;
        #endif
        }

        void surf( Input IN, inout SurfaceOutputStandard o )
        {
          // Albedo comes from a texture tinted by colors
            o.Albedo = tex2D (_MainTex, IN.uv_MainTex).rgb;
        #ifdef SHADER_API_D3D11
            //o.Albedo = IN.colorin;
        #endif
            // Metallic and smoothness come from slider variables
            o.Metallic = .61;
            o.Smoothness = .61;
        }
        ENDCG
    }
    //FallBack "Diffuse"
}

In order for the custom vertex function to be compatible with the compiled internal vertex function, they will both use whatever input struct you pass to the custom vertex function (hence the ‘inout’ keyword). The compiled surface shader generally assumes you are using ‘appdata_full’ (as it contains all ‘required’ inputs), and because surface shaders do support instancing, the instance ID is actually defined in that struct, it’s just abstracted to the point that it’s difficult to access manually. Luckily, there is a macro for accessing the abstracted version;

void pre_vert( inout appdata_full v, out Input o )
{
    UNITY_INITIALIZE_OUTPUT( Input, o );
#ifdef SHADER_API_D3D11
    uint instanceID = UNITY_GET_INSTANCE_ID (v);
    v.vertex = mul( _Properties[ instanceID ].mat, v.vertex );
    //o.colorin = _Properties[ instanceID ].color;
#endif
}

This may not work in your case (I’m not sure when Unity decides you are using instancing or not), in which case you would need to add some things to your struct;

struct VertexShaderInput
{
    float4 vertex : POSITION;
    float4 tangent : TANGENT;
    float3 normal : NORMAL;
    float4 texcoord : TEXCOORD0;
    float4 texcoord1 : TEXCOORD1;
    float4 texcoord2 : TEXCOORD2;
    float4 texcoord3 : TEXCOORD3;
    fixed4 color : COLOR;
    uint instanceId : SV_InstanceID;
};