Seamless procedural terrain (plane)

Hi!
Im following a tutorial about procedural terrain generation using planes. Right now everything is fine, except the seams on Terrain tiles. Hope someone can lead me in the right direction. As far as i understand the code, the edges of the corresponding planes will get the same heights to stitch the planes together. That leads to a really unusable result as you can see in the attached image. I was thininkig about linear interpolating and normalizing should do it better, but no idea how to achieve this. Im not a programmer, just interseted in learning.

NoiseMapGeneration.cs

using System.Collections;
using System.Collections.Generic;
using UnityEngine;
[System.Serializable]
public class Wave
{
    public float seed;
    public float frequency;
    public float amplitude;
}
public class NoiseMapGeneration : MonoBehaviour
{
  public float[,] GeneratNoiseMap(int mapDepth,int mapWidth, float scale, float offsetX, float offsetY, Wave [] waves)
    {
        float[,] noiseMap = new float[mapDepth, mapWidth];
        for (int zIndex =0; zIndex< mapDepth; zIndex++)
        {
            for (int xIndex =0; xIndex <mapWidth; xIndex++)
            {
                float sampleX = (xIndex+offsetX) / scale;
                float sampleZ = (zIndex + offsetY) / scale;
                float noise = 0f;
                float normalization = 0f;
                foreach (Wave wave in waves) {
                    noise += wave.amplitude * Mathf.PerlinNoise(sampleX * wave.frequency + wave.seed, sampleZ * wave.frequency + wave.seed);
                    normalization += wave.amplitude;
                }
                noise /= normalization;
                noiseMap[zIndex, xIndex] = noise;
            }
        }
        return noiseMap;
    }
}

TileGeneration.cs

using System;
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
[System.Serializable]
public class TerrainType
{
    public string name;
    public float height;
    public Color color;
}
public class TileGeneration : MonoBehaviour
{
    [SerializeField]
    TerrainType[] terrainTypes;

    [SerializeField]
    NoiseMapGeneration noiseMapGeneration;

    [SerializeField]
    private MeshRenderer tileRenderer;

    [SerializeField]
    private MeshFilter meshFilter;

    [SerializeField]
    private MeshCollider meshCollider;

    [SerializeField]
    private float mapScale;

    [SerializeField]
    private float heightMultiplier;

    [SerializeField]
    private AnimationCurve heightCurve;

    [SerializeField]
    private Wave[] waves;
   

    private void UpdateMeshVertices(float [,] heightMap)
    {
        int tileDepth = heightMap.GetLength(0);
        int tileWidth = heightMap.GetLength(1);
        Vector3[] meshVertices = this.meshFilter.mesh.vertices;
        int vertexIndex = 0;
        for (int zIndex = 0; zIndex < tileDepth; zIndex++)
        {
            for (int xIndex = 0; xIndex < tileWidth; xIndex++)
            {
                float height = heightMap[zIndex, xIndex];
                Vector3 vertex = meshVertices[vertexIndex]
;                meshVertices[vertexIndex] = new Vector3(vertex.x, this.heightCurve.Evaluate(height) * this.heightMultiplier, vertex.z);
                vertexIndex++;
            }
        }
        this.meshFilter.mesh.vertices = meshVertices;
        this.meshFilter.mesh.RecalculateBounds();
        this.meshFilter.mesh.RecalculateNormals();
        this.meshCollider.sharedMesh = this.meshFilter.mesh;
    }
    // Start is called before the first frame update
    void Start()
    {
        GenerateTile();
    }

    private void GenerateTile()
    {
        Vector3[] meshVertices = this.meshFilter.mesh.vertices;
        int tileDepth = (int)Mathf.Sqrt(meshVertices.Length);
        int tileWidth = tileDepth;
        float offsetX = -this.gameObject.transform.position.x;
        float offsetY = -this.gameObject.transform.position.z;
        float[,] heightMap = this.noiseMapGeneration.GeneratNoiseMap(tileDepth, tileWidth, this.mapScale,offsetX,offsetY,waves);
        Texture2D tileTexture = BuildTexture(heightMap);
        this.tileRenderer.material.mainTexture = tileTexture;
        UpdateMeshVertices(heightMap);
    }
    private Texture2D BuildTexture(float[,] heightMap)
    {
        int tileDepth = heightMap.GetLength(0);
        int tileWidth = heightMap.GetLength(1);
        Color[] colorMap = new Color[tileDepth * tileWidth];
        for (int zIndex = 0; zIndex < tileDepth; zIndex ++)
        {
            for (int xIndex =0; xIndex < tileWidth; xIndex++)
            {
                int colorIndex = zIndex * tileWidth + xIndex;
                float height = heightMap[zIndex, xIndex];
                TerrainType terrainType = ChooseTerrainType(height);
                colorMap[colorIndex] = terrainType.color;
              
            }
        }
        Texture2D tileTexture = new Texture2D(tileWidth, tileDepth);
        tileTexture.wrapMode = TextureWrapMode.Clamp;
        tileTexture.SetPixels(colorMap);
        tileTexture.Apply();
        return tileTexture;
    }
    TerrainType ChooseTerrainType(float height)
    {
        foreach (TerrainType terrainType in terrainTypes)
        {
            if (height < terrainType.height)
            {
                return terrainType;
            }
        }
        return terrainTypes[terrainTypes.Length - 1];
    }
}

LevelGeneration.cs

using System.Collections;
using System.Collections.Generic;
using UnityEngine;

public class LevelGeneration : MonoBehaviour
{
    [SerializeField]
    private int mapWidthInTiles, mapDespthInTiles;

    [SerializeField]
    private GameObject tilePrefab;
    // Start is called before the first frame update
    void Start()
    {
        GenerateMap();
    }

    // Update is called once per frame
    void GenerateMap()
    {
        Vector3 tileSize = tilePrefab.GetComponent<MeshRenderer>().bounds.size;
        int tileWidth = (int)tileSize.x;
        int tileDepth = (int)tileSize.z;
        for (int xTileIndex = 0; xTileIndex < mapWidthInTiles; xTileIndex++)
        {
            for (int zTileIndex =0; zTileIndex < mapDespthInTiles; zTileIndex++ )
            {
                Vector3 tilePosition = new Vector3(this.gameObject.transform.position.x + xTileIndex * tileWidth,
                    this.gameObject.transform.position.y,
                    this.gameObject.transform.position.z + zTileIndex * tileDepth);
                GameObject tile = Instantiate(tilePrefab, tilePosition, Quaternion.identity) as GameObject;
            }
        }
    }
}

Thanks in advance for your help.

The problem with your current approach is that each tile mesh is created in isolation. Although the edge points match up, they are attached to different triangle edges so their normals will be different on each mesh. When it gets rendered the lighting is then calculated differently and that’s why you end up with the visible seam on the join.

The simplest way of fixing is to just create one huge map that only contains one tile rather than a map with lots of tiles like you currently have.

The more complicated fix would be to take the vertices from the sides of each tile and find their identical points on the other neighbouring meshes. You could then generate normal values for the shared vertices that would solve the lighting issue you’re seeing.

My mistake was to assume that recalculateNormals() will do the that. I changed all normals to Vector3.back for testing and the “seams” disappeared. Also a unlit material worked. I would try to create a array[,] of terrains, check the neighbours edges (in 8 directions to get the corners also), recalculate the normals at the edges and the “seams” should disappear. Time to read more about normals. A single giant terrain is not an option in my case, i think.

What you’re having is an age-old problem of stitching map chunks.
A single giant terrain is not a good approach, you need chunks for a reason. Also there are limits in how many vertices you can have per mesh.

The normals work per vertex, however if you want smooth geometry that means that the vertices are shared between the faces, and so you need RecalculateNormals to get them properly oriented.

The best solution would be to do normal calculation yourself, by including the connected faces, without adding them to chunk geometry. Calculating normals is not easy. I’ve done it before. What you want is called weighted normals, and it’s a serious algorithm.

The other way, maybe easier, would be to create a mesh as usual but with the overlapping rim geometry included, and THEN find a way to remove this extraneous geometry, as a post processing technique.

All in all, there is no easy way out.
You could also make the terrain look faceted, and this will solve your normal issues, but then each face has a triplet of independent normals, and a mesh is bigger in memory and GPU.

Finally you can make a combination of the techniques above, do a regular RecalculateNormals, but as a post process pass, introduce normal correction, by considering all neighboring normals that live on different chunks.

You sample them, and average them, then assign this average back.
Literally averaging them is all you need to do; like X=(A+B+C+…+N) / n, where n is the amount of coincidental normals per single apparent vertex. (this technique is called normal smoothing.)

Click here for a potential problem with this.

This can fail spectacularly only if your face areas vary a lot. If they do, then you’d still need to calculate weighted normals, but in this context it’s really a simple formula that takes into account two things: the face area and the acuteness of the local angle. I can find it if you need it. You do however need access to face vertices, and this is something you just don’t get out of box (* see next paragraph), so you need to be able to cache certain topological relationships to access the model in a more robust way than what Unity provides with basic lists/arrays.

  • You can access the vertices, of course, but there is no clear relationship between a normal and the other two vertices. You’d have to look for the specific vertex index in triangles, and then extract the other two vertices. That’s horribly slow. For these reasons, you want to basically, reinvent how the mesh is stored, and efficiently produce Unity Mesh when it’s needed. Which is what I typically end up doing in all my adventures.

This is also the most performant way I can think of, because you need not allocate/deallocate extra memory. You just read from normals array and write new data back, and the algorithm is in logarithmic time if you are careful, because the chunk area (normal list) is exponentially related to its perimeter (the selection you care about).

Depending on how you manage frustum occlusion (normally you wouldn’t render a chunk that’s out of sight), you can also get away with the edge cases, and ignore them completely, because they are probably out of sight anyway.