Passing touch position to compute shader

I am working on a game where I have to mix colored liquids as shown here. I was able to achieve this effect by using a plug-in (StableFluids):
6202383--680976--68747470733a2f2f692e696d6775722e636f6d2f584c5a6c6332652e676966.gif

Problem:
The shaders used in the plugin are able to create the effect I want but the effect is created in the wrong position; rather than being triggered at the point where the user’s finger is touching the screen, the effect is being triggered at other points.

Implementation:
The plug-in has one compute shader which takes 2 parameters: force origin and force direction. The force direction is working properly as the deformation happens in the direction of the finger swipe. However, I am having trouble the touch input position into an accurate force origin value. I am passing the force origin value to the shader using Unity’s “RaycastHit.textureCoord”.

Below is the compute shader code:

// StableFluids - A GPU implementation of Jos Stam's Stable Fluids on Unity
// https://github.com/keijiro/StableFluids

#pragma kernel Advect
#pragma kernel Force
#pragma kernel PSetup
#pragma kernel PFinish
#pragma kernel Jacobi1
#pragma kernel Jacobi2

// Common parameter
float Time;
float DeltaTime;

// External force
float2 ForceOrigin;
float2 ForceVector;
float ForceExponent;

// U (velocity field)
Texture2D<float2> U_in;
SamplerState samplerU_in;
RWTexture2D<float2> U_out;

// W (velocity field; working)
Texture2D<float2> W_in;
RWTexture2D<float2> W_out;

// Div W
RWTexture2D<float> DivW_out;

// P (pressure field)
Texture2D<float> P_in;
RWTexture2D<float> P_out;

// Color map
Texture2D<half4> C_in;
SamplerState samplerC_in;
RWTexture2D<half4> C_out;

// Jacobi method arguments
float Alpha, Beta;

Texture2D<float> X1_in;
Texture2D<float> B1_in;
RWTexture2D<float> X1_out;

Texture2D<float2> X2_in;
Texture2D<float2> B2_in;
RWTexture2D<float2> X2_out;

// Advect step
[numthreads(8, 8, 1)]
void Advect(uint2 tid : SV_DispatchThreadID)
{
    uint2 dim;
    W_out.GetDimensions(dim.x, dim.y);

    float2 uv = (tid + 0.5) / dim;
    float2 duv = U_in[tid] * float2((float)dim.y / dim.x, 1) * DeltaTime;

    W_out[tid] = U_in.SampleLevel(samplerU_in, uv - duv, 0);
}

// Add-force step
[numthreads(8, 8, 1)]
void Force(uint2 tid : SV_DispatchThreadID)
{
    uint2 dim;
    W_out.GetDimensions(dim.x, dim.y);

    float2 pos = (tid + 0.5 - dim * 0.5) / dim.y;
    float amp = exp(-ForceExponent * distance(ForceOrigin, pos));

    W_out[tid] = W_in[tid] + ForceVector * amp;
}

// Setup for Project step (divW calculation)
[numthreads(8, 8, 1)]
void PSetup(uint2 tid : SV_DispatchThreadID)
{
    uint2 dim;
    W_in.GetDimensions(dim.x, dim.y);

    DivW_out[tid] = (W_in[tid + int2(1, 0)].x - W_in[tid - int2(1, 0)].x +
                     W_in[tid + int2(0, 1)].y - W_in[tid - int2(0, 1)].y) * dim.y / 2;

    P_out[tid] = 0;
}

// Finishing for Project step (divergence free field calculation)
[numthreads(8, 8, 1)]
void PFinish(uint2 tid : SV_DispatchThreadID)
{
    uint2 dim;
    W_in.GetDimensions(dim.x, dim.y);

    if (any(tid == 0) || any(tid == dim - 1)) return;

    float P1 = P_in[max(tid - int2(1, 0), 1)];
    float P2 = P_in[min(tid + int2(1, 0), dim - 2)];
    float P3 = P_in[max(tid - int2(0, 1), 1)];
    float P4 = P_in[min(tid + int2(0, 1), dim - 2)];

    float2 u = W_in[tid] - float2(P2 - P1, P4 - P3) * dim.y / 2;

    U_out[tid] = u;

    if (tid.x == 1) U_out[int2(0, tid.y)] = -u;
    if (tid.y == 1) U_out[int2(tid.x, 0)] = -u;
    if (tid.x == dim.x - 2) U_out[int2(dim.x - 1, tid.y)] = -u;
    if (tid.y == dim.y - 2) U_out[int2(tid.x, dim.y - 1)] = -u;
}

// Jacobi method with a scalar field
[numthreads(8, 8, 1)]
void Jacobi1(uint2 tid : SV_DispatchThreadID)
{
    X1_out[tid] = (X1_in[tid - int2(1, 0)] + X1_in[tid + int2(1, 0)] +
                   X1_in[tid - int2(0, 1)] + X1_in[tid + int2(0, 1)] + Alpha * B1_in[tid]) / Beta;
}

// Jacobi method with a vector field
[numthreads(8, 8, 1)]
void Jacobi2(uint2 tid : SV_DispatchThreadID)
{
    X2_out[tid] = (X2_in[tid - int2(1, 0)] + X2_in[tid + int2(1, 0)] +
                   X2_in[tid - int2(0, 1)] + X2_in[tid + int2(0, 1)] + Alpha * B2_in[tid]) / Beta;
}

Questions:

  • What kind of conversion is required to convert the touch input position to the shader’s coordinate system so that the effect appears where the finger has touched?
  • Are there any other open-source plug-ins to implement the required effect that have a clearer relationship between the touch input and the shader effect coordinates?

I would appreciate any suggestions and thoughts on this topic. Thank you.

this is what the example seems to be using from mouse position:

and this for getting direction on drag (from previous position)

Thank you for your reply. I converted the direction calculation part of the code to make it work with finger touch.

Regarding the calculation of the mouse position, that code can only work if the given texture is covering the entire screen of the device. I wanted to make a robust code that can work with any shaped flat surfaces.