Can sampler2DArray be used?

Is it possible to use sampler2DArray in GLSL shaders? Is there an alternative that works with CG shaders?

Shader "TestShader" {
    Properties {
        _TestTex ("Test Tex (RGB)", 2D) = "white" {}
    }
    SubShader { Pass {
        GLSLPROGRAM
        #ifdef FRAGMENT
            uniform sampler2DArray _TestTex;

            // Rest of shader...
        #endif

        // vertex shader...
        ENDGLSL
    }}
}

EDIT: MY ATTEMPT

// Upgrade NOTE: replaced 'samplerRect' with 'sampler2D'
// Upgrade NOTE: replaced 'texRECT' with 'tex2D'

Shader "Tilemap/Tilemap2" {
  Properties {
    _MainTex ("Tileset", 2D) = "black" {}
    _Tilemap ("Tilemap", 2D) = "black" {}
    _TileCount ("Input/Output", vector) = (0,0,0,0)
  }
  SubShader {
    Tags { "RenderType"="Opaque" }
    LOD 200
    pass
    {  
      CGPROGRAM
      #pragma vertex vert
      #pragma fragment frag
      #include "UnityCG.cginc"

      uniform sampler2D _MainTex;
      uniform sampler2D _Tilemap;
      float4 _TileCount;
      
      struct appdata {
        float4 vertex	: POSITION;
        float2 texcoord	: TEXCOORD0;
      };
      
      float4 _MainTex_ST;
      float4 _Tilemap_ST;
      
      struct v2f {
        float4 pos	: SV_POSITION;
        float2 uv	: TEXCOORD0;
      };
      
      v2f vert(appdata v) {
        v2f result;
        result.pos = mul(UNITY_MATRIX_MVP, v.vertex);
        result.uv = TRANSFORM_TEX(v.texcoord, _MainTex);
        return result;
      }

	float2 mod(const float2 a, const float2 b) {
		return floor(frac(a / b) * b);
	}

	half4 frag(v2f i) : COLOR {
		float2 mappingScale = _TileCount.zw;
		float2 mappingAddress = i.uv.xy * mappingScale;
		float4 whichTile = tex2D(_Tilemap, mod(mappingAddress, mappingScale));
      	
      	float2 tileScale = _TileCount.xy;
      	//float2 tileScaledTex = i.uv.xy * float2(mappingScale.x / tileScale.x, mappingScale.y / tileScale.y);
      	
      	return tex2D(_MainTex, (whichTile.xy + frac(mappingAddress)) / tileScale);//, ddx(tileScaledTex), ddy(tileScaledTex));
      }
      ENDCG
    }
  } 
  FallBack "Diffuse"
}

The Java code at your link uses glBindTexture(GL_TEXTURE_2D_ARRAY, …
http://www.opengl.org/sdk/docs/man3/xhtml/glBindTexture.xml

But Unity does not have a property type that maps to it:
http://unity3d.com/support/documentation/Components/SL-Properties

Also, I don’t know if you care, but OpenGL ES only supports the two property types that Unity does (GL_TEXTURE_RECTANGLE was removed for Unity3: http://unity3d.com/support/documentation/Manual/SL-V3Conversion.html ).
glBindTexture

@Jessy Is altering a small index texture not more efficient than managing lots of game objects (or altering meshes dynamically). Yes the GC gets hit hard right now!

During my searches I came across the following link which makes a lot more sense to me: http://http.developer.nvidia.com/GPUGems2/gpugems2_chapter12.html

I have done my best to convert this into something Unity can understand. It is working better, in that the shader is showing the right number of tiles on my plane, it is cutting the atlas into the right number of tiles. I can manually insert the tile index that I want into the shader which causes all tiles to be the one specified.

I cannot figure out the format of the _Tilemap image.

I will update question with my source.