so I have been thinking about replicating N64 graphics in Unity and my main challenge so far is figuring out how to emulate the N64 unique method of bi linear filtering. To those who don’t know the N64 used 3 samples for filtering rather than the standard 4. I found this link right here that shows off a method of replicating N64 filtering and I was wondering if it could perhaps be done via custom shader in Unity or might there be another way
Yes, absolutely. The code shown in that first post is HLSL shader code, the same shader language Unity’s shaders are written in. Most of that code would be direct applicable in a Unity shader directly.
Here’s a version that uses a slightly different approach:
Shader "Custom/N64Filter"
{
Properties
{
_MainTex ("Albedo (RGB)", 2D) = "white" {}
}
SubShader
{
Tags { "RenderType"="Opaque" }
LOD 200
CGPROGRAM
#pragma surface surf Lambert
sampler2D _MainTex;
float4 _MainTex_TexelSize;
struct Input
{
float2 uv_MainTex;
};
// based on https://www.shadertoy.com/view/wdy3RW
// with proper support for mip maps and textures that aren't using point filtering
fixed4 N64Filtering(sampler2D tex, float2 uv, float4 texelSize)
{
// texel coordinates
float2 texels = uv * texelSize.zw;
// calculate mip level
float2 dx = ddx(texels);
float2 dy = ddy(texels);
float delta_max_sqr = max(dot(dx, dx), dot(dy, dy));
float mip = max(0.0, 0.5 * log2(delta_max_sqr));
// scale texel sizes and texel coordinates to handle mip levels properly
float scale = pow(2,floor(mip));
texelSize.xy *= scale;
texelSize.zw /= scale;
texels = texels / scale - 0.5;
// calculate blend for the three points of the tri-filter
float2 fracTexels = frac(texels);
float3 blend = float3(
abs(fracTexels.x+fracTexels.y-1),
min(abs(fracTexels.xx-float2(0,1)), abs(fracTexels.yy-float2(1,0)))
);
// calculate equivalents of point filtered uvs for the three points
float2 uvA = (floor(texels + fracTexels.yx) + 0.5) * texelSize.xy;
float2 uvB = (floor(texels) + float2(1.5, 0.5)) * texelSize.xy;
float2 uvC = (floor(texels) + float2(0.5, 1.5)) * texelSize.xy;
// sample points
fixed4 A = tex2Dlod (tex, float4(uvA, 0, mip));
fixed4 B = tex2Dlod (tex, float4(uvB, 0, mip));
fixed4 C = tex2Dlod (tex, float4(uvC, 0, mip));
// blend and return
return A * blend.x + B * blend.y + C * blend.z;
}
void surf (Input IN, inout SurfaceOutput o)
{
fixed4 c = N64Filtering(_MainTex, IN.uv_MainTex, _MainTex_TexelSize);
o.Albedo = c.rgb;
}
ENDCG
}
FallBack "Diffuse"
}
Looks like bgolus beat me to it. But anyways here is a direct adaption of the code in the linked example (for some reason I had to change the multiply to an add in the modf() calls, I’m not sure why their implementation works with multiply, as it reduces the blend to nothing.)
Shader "Invertex/Custom/N64Bilinear"
{
//Unity implementation of N64 3-point Bilinear Filtering example from:
//http://www.emutalk.net/threads/54215-Emulating-Nintendo-64-3-sample-Bilinear-Filtering-using-Shaders
Properties
{
_MainTex ("Texture", 2D) = "white" {}
_Cutoff("Alpha Cutoff", Range(0,1)) = 0.5
}
SubShader
{
Tags { "RenderType"="Opaque" "Queue"="AlphaTest" }
CGPROGRAM
#pragma surface surf Lambert alphatest:_Cutoff
sampler2D _MainTex;
float4 _MainTex_TexelSize; //Unity will fill this in with the texture dimensions
struct Input
{
float2 uv_MainTex;
};
fixed4 N64Sample(sampler2D tex, float2 uv, float4 texelSize)
{
float Texture_X = texelSize.x;
float Texture_Y = texelSize.y;
float2 tex_pix_a = float2(Texture_X, 0.0);
float2 tex_pix_b = float2(0.0, Texture_Y);
float2 tex_pix_c = float2(tex_pix_a.x, tex_pix_b.y);
float2 half_tex = float2(tex_pix_a.x * 0.5, tex_pix_b.y * 0.5);
float2 UVCentered = uv - half_tex;
float4 diffuseColor = tex2D(tex, UVCentered);
float4 sample_a = tex2D(tex, UVCentered + tex_pix_a);
float4 sample_b = tex2D(tex, UVCentered + tex_pix_b);
float4 sample_c = tex2D(tex, UVCentered + tex_pix_c);
float interp_x = modf(UVCentered.x + Texture_X, Texture_X);
float interp_y = modf(UVCentered.y + Texture_Y, Texture_Y);
if (UVCentered.x < 0) { interp_x = 1 - interp_x * -1; }
if (UVCentered.y < 0) { interp_y = 1 - interp_y * -1; }
diffuseColor = (diffuseColor + interp_x * (sample_a - diffuseColor) + interp_y * (sample_b - diffuseColor)) * (1 - step(1, interp_x + interp_y));
diffuseColor += (sample_c + (1 - interp_x) * (sample_b - sample_c) + (1 - interp_y) * (sample_a - sample_c)) * step(1, interp_x + interp_y);
return diffuseColor;
}
void surf(Input IN, inout SurfaceOutput o)
{
fixed4 c = N64Sample(_MainTex, IN.uv_MainTex, _MainTex_TexelSize);
o.Albedo = c.rgb;
o.Alpha = c.a;
}
ENDCG
}
FallBack "Diffuse"
}
Note: Make sure the import settings on your textures have the “Filter Mode” set to “Point” instead of Bilinear.
edit: After further testing I don’t think this version is still working correctly. I’m not sure why their math is resulting in different behavior in Unity… So just use bgolus’s
Is there a way to apply this as a URP rendering pass?
Can’t be done as a rendering pass. Would have to be implemented in Shader Graph and the resulting shader used on all objects.
Could you please assist me in converting my shader to URP? I have added vertex color functionality to create more genuine looking art. Here it is:
Shader "Cyclopian/N64_Vertex"
{
Properties
{
_Color("Color", Color) = (1,1,1,1)
_MainTex("Albedo (RGB)", 2D) = "white" {}
[Range(0,1)]
_Cutoff("Alpha Cutoff", Range(0,1)) = 0.5
[Range(0,1)]
_Glossiness("Smoothness", Range(0,1)) = 0.5
[Range(0,1)]
_Metallic("Metallic", Range(0,1)) = 0.0
}
SubShader
{
Tags { "RenderType" = "Opaque" }
LOD 200
CGPROGRAM
#pragma surface surf Standard alphatest:_Cutoff fullforwardshadows
sampler2D _MainTex;
float4 _MainTex_TexelSize;
struct Input
{
float2 uv_MainTex;
float4 vertexColor : COLOR;
};
struct v2f {
float4 pos : SV_POSITION;
fixed4 color : COLOR;
};
fixed4 N64Filtering(sampler2D tex, float2 uv, float4 scale)
{
//texel coords
float2 texel = uv * scale.zw;
//get mip map coords and scaling
float2 mipX = ddx(texel), mipY = ddy(texel);
float delta_max_sqr = max(dot(mipX, mipX), dot(mipY, mipY));
float mip = max(0.0, 0.5 * log2(delta_max_sqr));
float size = pow(2, floor(mip));
scale.xy *= size;
scale.zw /= size;
texel = texel / size - 0.5;
//sample points
float2 fracTexl = frac(texel);
float2 uv1 = (floor(texel + fracTexl.yx) + 0.5) * scale.xy;
fixed4 out1 = tex2Dlod(tex, float4(uv1, 0, mip));
float2 uv2 = (floor(texel) + float2(1.5, 0.5)) * scale.xy;
fixed4 out2 = tex2Dlod(tex, float4(uv2, 0, mip));
float2 uv3 = (floor(texel) + float2(0.5, 1.5)) * scale.xy;
fixed4 out3 = tex2Dlod(tex, float4(uv3, 0, mip));
//calculate blend and apply
float3 blend = float3(abs(fracTexl.x + fracTexl.y - 1), min(abs(fracTexl.xx - float2(0, 1)), abs(fracTexl.yy - float2(1, 0))));
float4 _outTex = out1 * blend.x + out2 * blend.y + out3 * blend.z;
// blend and return
return _outTex;
}
half _Glossiness;
half _Metallic;
fixed4 _Color;
void surf(Input IN, inout SurfaceOutputStandard o)
{
fixed4 c = N64Filtering(_MainTex, IN.uv_MainTex, _MainTex_TexelSize) * _Color * IN.vertexColor;;
o.Albedo = c.rgb * IN.vertexColor; // Combine normal color with the vertex color
// Metallic and smoothness come from slider variables
o.Metallic = _Metallic;
o.Smoothness = _Glossiness;
o.Alpha = c.a;
}
ENDCG
}
FallBack "Diffuse"
}
Any help would be appreciated, even just some pointers. Ive been working on this for a few days now and am lost in where to begin!
You need to use Shader Graph to make a shader that works with the URP.
Are there any good pointers for integrating this into the Shader Graph for URP? Tried messing with custom functions, but couldn’t get any sort of reasonable output.
Just make a custom function with Sampler2D, float2 and float4 as input parameters, named the same as in that code. And make an output parameter of type float4 _outTex, and then just copy the inner code of that N64Filtering function into the custom function, but remove the return _outTex
line and make the line above it just be _outTex = out1 * ...etc
instead of float4 _outTex =
I did that, but I don’t think Shader Graph supports a Sampler2D with a texture? The only input parameter equating to Sampler2D is “Bare Sampler State”, which only takes in a Sampler State node and never any texture.
sampler2D
is older builtin syntax that automatically defines a SamplerState
bound to the Texture2D
.
So you just need to make a Texture2D
input along with that SamplerState
input.
Then replace your tex2dLod
functions with texture2DinputName.SampleLevel(texSampler, uv, mip)
Here’s my attempt at converting bgolus’s shader above:
//UNITY_SHADER_NO_UPGRADE
#ifndef MAJORAS_MASK_IS_BETTER_THAN_OCARAINA_OF_TIME_COME_AT_ME_BRO_INCLUDED
#define MAJORAS_MASK_IS_BETTER_THAN_OCARAINA_OF_TIME_COME_AT_ME_BRO_INCLUDED
#include "Packages/com.unity.render-pipelines.core/ShaderLibrary/Common.hlsl"
#include "Packages/com.unity.render-pipelines.core/ShaderLibrary/Texture.hlsl"
void N64Sample_float(
in UnityTexture2D Texture,
in float2 UV,
out float4 Out)
{
// texel coordinates
float4 texelSize = Texture.texelSize;
float2 texels = UV * texelSize.zw;
// calculate mip level
float2 dx = ddx(texels);
float2 dy = ddy(texels);
float delta_max_sqr = max(dot(dx, dx), dot(dy, dy));
float mip = max(0.0, 0.5 * log2(delta_max_sqr));
// scale texel sizes and texel coordinates to handle mip levels properly
float scale = pow(2,floor(mip));
texelSize.xy *= scale;
texelSize.zw /= scale;
texels = texels / scale - 0.5;
// calculate blend for the three points of the tri-filter
float2 fracTexels = frac(texels);
float3 blend = float3(
abs(fracTexels.x+fracTexels.y-1),
min(abs(fracTexels.xx-float2(0,1)), abs(fracTexels.yy-float2(1,0)))
);
// calculate equivalents of point filtered uvs for the three points
float2 uvA = (floor(texels + fracTexels.yx) + 0.5) * texelSize.xy;
float2 uvB = (floor(texels) + float2(1.5, 0.5)) * texelSize.xy;
float2 uvC = (floor(texels) + float2(0.5, 1.5)) * texelSize.xy;
// sample points
float4 A = Texture.SampleLevel(Texture.samplerstate, uvA, mip);
float4 B = Texture.SampleLevel(Texture.samplerstate, uvB, mip);
float4 C = Texture.SampleLevel(Texture.samplerstate, uvC, mip);
// blend and return
Out = A * blend.x + B * blend.y + C * blend.z;
}
#endif
Sorry I don’t know enough about the Shader Graph to fully graphiphy it; I just use custom functions for anything that’s going to be more than like 4 nodes big.
PS Happy Thanksgiving to Americans!