将GLSL shadertoy着色器移植到统一的问题

Max*_*rdt 7 shader glsl unity-game-engine

我目前正在尝试将一个shadertoy.com着色器(Atmospheric Sc​​attering Sample,带代码的交互式演示)移植到Unity.着色器是用GLSL编写的,我必须启动编辑器C:\Program Files\Unity\Editor>Unity.exe -force-opengl以使其渲染着色器(否则出现"此着色器无法在此GPU上运行"错误),但现在这不是问题.问题是将着色器移植到Unity.

散射等功能在我的移植着色器中都是相同且"可运行"的,唯一的mainImage()功能是功能管理摄像机,光线方向和光线方向本身.必须改变这一点,以便使用Unity的摄像机位置,视图方向以及光源和方向.

原始的主要功能如下:

void mainImage( out vec4 fragColor, in vec2 fragCoord )
{
    // default ray dir
    vec3 dir = ray_dir( 45.0, iResolution.xy, fragCoord.xy );

    // default ray origin
    vec3 eye = vec3( 0.0, 0.0, 2.4 );

    // rotate camera
    mat3 rot = rot3xy( vec2( 0.0, iGlobalTime * 0.5 ) );
    dir = rot * dir;
    eye = rot * eye;

    // sun light dir
    vec3 l = vec3( 0, 0, 1 );

    vec2 e = ray_vs_sphere( eye, dir, R );
    if ( e.x > e.y ) {
        discard;
    }

    vec2 f = ray_vs_sphere( eye, dir, R_INNER );
    e.y = min( e.y, f.x );

    vec3 I = in_scatter( eye, dir, e, l );

    fragColor = vec4( I, 1.0 );
}
Run Code Online (Sandbox Code Playgroud)

我已经阅读了该函数的文档以及它在https://www.shadertoy.com/howto中的工作原理.

图像着色器实现mainImage()函数,以便通过计算每个像素的颜色来生成过程图像.此功能预计每个像素调用一次,主机应用程序负责为其提供正确的输入并从中获取输出颜色并将其分配给屏幕像素.原型是:

void mainImage(在vec2 fragCoord中输出vec4 fragColor);

其中fragCoord包含着色器需要计算颜色的像素坐标.坐标在渲染表面上以像素为单位,范围从0.5到分辨率-0.5,其中分辨率通过iResolution制服传递到着色器(见下文).

生成的颜色在fragColor中作为四分量向量收集,客户端忽略最后一个颜色.结果被收集为预期未来添加多个渲染目标的"out"变量.

因此在该函数中有参考iGlobalTime使相机随时间旋转并参考iResolution分辨率.我已经在Unity着色器中嵌入着色器,并尝试修复并连接它dir,eye并确保l它与Unity一起工作,但我完全卡住了.我得到某种与原始着色器"相关"的图片:(顶部是原始的,但是当前的统一状态)

统一着色器比较

我不是专业的着色器,我只知道OpenGL的一些基础知识,但在大多数情况下,我用C#编写游戏逻辑,所以我真正做的就是查看其他着色器示例并查看如何获取数据关于这段代码中的相机,光源等,但正如你所看到的,真的没有任何效果.

我已经从https://en.wikibooks.org/wiki/GLSL_Programming/Unity/Specular_Highlightshttp://forum.unity3d.com/threads/glsl-shader.39629中的一些向量复制了着色器的skelton代码./.

我希望有人可以指出我如何修复此着色器/正确将其移植到统一的某个方向.下面是当前着色器代码,重现它所需要做的就是在空白项目中创建一个新着色器,将代码复制到内部,制作新材质,将着色器指定给该材质,然后添加一个球体并添加该材质在它上面并添加定向灯.

Shader "Unlit/AtmoFragShader" {
    Properties{
        _MainTex("Base (RGB)", 2D) = "white" {}
    _LC("LC", Color) = (1,0,0,0) /* stuff from the testing shader, now really used */
        _LP("LP", Vector) = (1,1,1,1)
    }

        SubShader{
        Tags{ "Queue" = "Geometry" } //Is this even the right queue?

        Pass{
        //Tags{ "LightMode" = "ForwardBase" }
        GLSLPROGRAM

    /* begin port by copying in the constants */
    // math const
    const float PI = 3.14159265359;
    const float DEG_TO_RAD = PI / 180.0;
    const float MAX = 10000.0;

    // scatter const
    const float K_R = 0.166;
    const float K_M = 0.0025;
    const float E = 14.3;                       // light intensity
    const vec3  C_R = vec3(0.3, 0.7, 1.0);  // 1 / wavelength ^ 4
    const float G_M = -0.85;                    // Mie g

    const float R = 1.0; /* this is the radius of the spehere? this should be set from the geometry or something.. */
    const float R_INNER = 0.7;
    const float SCALE_H = 4.0 / (R - R_INNER);
    const float SCALE_L = 1.0 / (R - R_INNER);

    const int NUM_OUT_SCATTER = 10;
    const float FNUM_OUT_SCATTER = 10.0;

    const int NUM_IN_SCATTER = 10;
    const float FNUM_IN_SCATTER = 10.0;

    /* begin functions. These are out of the defines because they should be accesible to anyone. */

    // angle : pitch, yaw
    mat3 rot3xy(vec2 angle) {
        vec2 c = cos(angle);
        vec2 s = sin(angle);

        return mat3(
            c.y, 0.0, -s.y,
            s.y * s.x, c.x, c.y * s.x,
            s.y * c.x, -s.x, c.y * c.x
            );
    }

    // ray direction
    vec3 ray_dir(float fov, vec2 size, vec2 pos) {
        vec2 xy = pos - size * 0.5;

        float cot_half_fov = tan((90.0 - fov * 0.5) * DEG_TO_RAD);
        float z = size.y * 0.5 * cot_half_fov;

        return normalize(vec3(xy, -z));
    }

    // ray intersects sphere
    // e = -b +/- sqrt( b^2 - c )
    vec2 ray_vs_sphere(vec3 p, vec3 dir, float r) {
        float b = dot(p, dir);
        float c = dot(p, p) - r * r;

        float d = b * b - c;
        if (d < 0.0) {
            return vec2(MAX, -MAX);
        }
        d = sqrt(d);

        return vec2(-b - d, -b + d);
    }

    // Mie
    // g : ( -0.75, -0.999 )
    //      3 * ( 1 - g^2 )               1 + c^2
    // F = ----------------- * -------------------------------
    //      2 * ( 2 + g^2 )     ( 1 + g^2 - 2 * g * c )^(3/2)
    float phase_mie(float g, float c, float cc) {
        float gg = g * g;

        float a = (1.0 - gg) * (1.0 + cc);

        float b = 1.0 + gg - 2.0 * g * c;
        b *= sqrt(b);
        b *= 2.0 + gg;

        return 1.5 * a / b;
    }

    // Reyleigh
    // g : 0
    // F = 3/4 * ( 1 + c^2 )
    float phase_reyleigh(float cc) {
        return 0.75 * (1.0 + cc);
    }

    float density(vec3 p) {
        return exp(-(length(p) - R_INNER) * SCALE_H);
    }

    float optic(vec3 p, vec3 q) {
        vec3 step = (q - p) / FNUM_OUT_SCATTER;
        vec3 v = p + step * 0.5;

        float sum = 0.0;
        for (int i = 0; i < NUM_OUT_SCATTER; i++) {
            sum += density(v);
            v += step;
        }
        sum *= length(step) * SCALE_L;

        return sum;
    }

    vec3 in_scatter(vec3 o, vec3 dir, vec2 e, vec3 l) {
        float len = (e.y - e.x) / FNUM_IN_SCATTER;
        vec3 step = dir * len;
        vec3 p = o + dir * e.x;
        vec3 v = p + dir * (len * 0.5);

        vec3 sum = vec3(0.0);
        for (int i = 0; i < NUM_IN_SCATTER; i++) {
            vec2 f = ray_vs_sphere(v, l, R);
            vec3 u = v + l * f.y;

            float n = (optic(p, v) + optic(v, u)) * (PI * 4.0);

            sum += density(v) * exp(-n * (K_R * C_R + K_M));

            v += step;
        }
        sum *= len * SCALE_L;

        float c = dot(dir, -l);
        float cc = c * c;

        return sum * (K_R * C_R * phase_reyleigh(cc) + K_M * phase_mie(G_M, c, cc)) * E;
    }
    /* end functions */
    /* vertex shader begins here*/
#ifdef VERTEX
    const float SpecularContribution = 0.3;
    const float DiffuseContribution = 1.0 - SpecularContribution;

    uniform vec4 _LP;
    varying vec2 TextureCoordinate;
    varying float LightIntensity; 
    varying vec4 someOutput;

    /* transient stuff */
    varying vec3 eyeOutput;
    varying vec3 dirOutput;
    varying vec3 lOutput;
    varying vec2 eOutput; 

    /* lighting stuff */
    // i.e. one could #include "UnityCG.glslinc" 
    uniform vec3 _WorldSpaceCameraPos;
    // camera position in world space
    uniform mat4 _Object2World; // model matrix
    uniform mat4 _World2Object; // inverse model matrix
    uniform vec4 _WorldSpaceLightPos0;
    // direction to or position of light source
    uniform vec4 _LightColor0;
    // color of light source (from "Lighting.cginc")


    void main()
    {
        /* code from that example shader */
        gl_Position = gl_ModelViewProjectionMatrix * gl_Vertex;

        vec3 ecPosition = vec3(gl_ModelViewMatrix * gl_Vertex);
        vec3 tnorm = normalize(gl_NormalMatrix * gl_Normal);
        vec3 lightVec = normalize(_LP.xyz - ecPosition);

        vec3 reflectVec = reflect(-lightVec, tnorm);
        vec3 viewVec = normalize(-ecPosition);

        /* copied from https://en.wikibooks.org/wiki/GLSL_Programming/Unity/Specular_Highlights for testing stuff */
        //I have no idea what I'm doing, but hopefully this computes some vectors which I need
        mat4 modelMatrix = _Object2World;
        mat4 modelMatrixInverse = _World2Object; // unity_Scale.w 
                                                 // is unnecessary because we normalize vectors

        vec3 normalDirection = normalize(vec3(
            vec4(gl_Normal, 0.0) * modelMatrixInverse));
        vec3 viewDirection = normalize(vec3(
            vec4(_WorldSpaceCameraPos, 1.0)
            - modelMatrix * gl_Vertex));
        vec3 lightDirection;
        float attenuation;

        if (0.0 == _WorldSpaceLightPos0.w) // directional light?
        {
            attenuation = 1.0; // no attenuation
            lightDirection = normalize(vec3(_WorldSpaceLightPos0));
        }
        else // point or spot light
        {
            vec3 vertexToLightSource = vec3(_WorldSpaceLightPos0
                - modelMatrix * gl_Vertex);
            float distance = length(vertexToLightSource);
            attenuation = 1.0 / distance; // linear attenuation 
            lightDirection = normalize(vertexToLightSource);
        }
        /* test port */
        // default ray dir
        //That's the direction of the camera here? 
        vec3 dir = viewDirection; //normalDirection;//viewDirection;// tnorm;//lightVec;//lightDirection;//normalDirection; //lightVec;//tnorm;//ray_dir(45.0, iResolution.xy, fragCoord.xy);

        // default ray origin
        //I think they mean the position of the camera here? 
        vec3 eye = vec3(_WorldSpaceCameraPos); //vec3(_WorldSpaceLightPos0); //// vec3(0.0, 0.0, 0.0); //_WorldSpaceCameraPos;//ecPosition; //vec3(0.0, 0.0, 2.4);

        // rotate camera not needed, remove it

        // sun light dir
        //I think they mean the direciton of our directional light? 
        vec3 l = lightDirection;//_LightColor0.xyz; //lightDirection; //normalDirection;//normalize(vec3(_WorldSpaceLightPos0));//lightVec;// vec3(0, 0, 1);

        /* this computes the intersection of the ray and the sphere.. is this really needed?*/
        vec2 e = ray_vs_sphere(eye, dir, R);
        /* copy stuff sothat we can use it on the fragment shader, "discard" is only allowed in fragment shader,
        so the rest has to be computed in fragment shader */
        eOutput = e;
        eyeOutput = eye;
        dirOutput = dir;
        lOutput = dir;
    }

#endif

#ifdef FRAGMENT

    uniform sampler2D _MainTex;
    varying vec2 TextureCoordinate;
    uniform vec4 _LC;
    varying float LightIntensity;

    /* transient port */
    varying vec3 eyeOutput;
    varying vec3 dirOutput;
    varying vec3 lOutput;
    varying vec2 eOutput;

    void main()
    {
        /* real fragment */

        if (eOutput.x > eOutput.y) {
            //discard;
        }

        vec2 f = ray_vs_sphere(eyeOutput, dirOutput, R_INNER);
        vec2 e = eOutput;
        e.y = min(e.y, f.x);

        vec3 I = in_scatter(eyeOutput, dirOutput, eOutput, lOutput);
        gl_FragColor = vec4(I, 1.0);

        /*vec4 c2;
        c2.x = 1.0;
        c2.y = 1.0;
        c2.z = 0.0;
        c2.w = 1.0f;
        gl_FragColor = c2;*/
        //gl_FragColor = c;
    }

#endif

    ENDGLSL
    }
    }
}
Run Code Online (Sandbox Code Playgroud)

任何帮助表示赞赏,对于长篇文章和解释感到抱歉.

编辑:我刚刚发现,spehere的半径确实会对这些东西产生影响,在每个方向上具有刻度2.0的球体会产生更好的结果.然而,图片仍然完全独立于相机和任何灯光的视角,这远不是shaderlab版本.

STATUS2

Car*_*ira 2

看起来您正在尝试在球体上渲染 2D 纹理。它有一些不同的方法。对于您想要做的事情,我会将着色器应用在与球体交叉的平面上。

\n\n

对于一般用途,请查看这篇文章,了解如何将 ShaderToy 转换为 Unity3D。

\n\n

我在这里包含了一些步骤:

\n\n
    \n
  • 将 iGlobalTime 着色器输入(\xe2\x80\x9cshader 播放时间(以秒为单位)\xe2\x80\x9d)替换为 _Time.y
  • \n
  • 将 iResolution.xy(\xe2\x80\x9c 视口分辨率,以像素为单位\xe2\x80\x9d)替换为 _ScreenParams.xy
  • \n
  • 将 vec2 类型替换为 float2,将 mat2 类型替换为 float2x2 等。
  • \n
  • 将所有元素具有相同值的 vec3(1) 快捷构造函数替换为显式 float3(1,1,1)
  • \n
  • 用 Tex2D 替换 Texture2D
  • \n
  • 将 atan(x,y) 替换为 atan2(y,x) <- 注意参数顺序!
  • \n
  • 将 mix() 替换为 lerp()
  • \n
  • 将 *= 替换为 mul()
  • \n
  • 从Texture2D 查找中删除第三个(偏差)参数
  • \n
  • mainImage(out vec4 fragColor, in vec2 fragCoord) 是片段着色器函数,相当于 float4 mainImage(float2 fragCoord : SV_POSITION) : SV_Target
  • \n
  • GLSL 中的 UV 坐标为 0 在顶部并向下增加,在 HLSL 中 0 为底部并向上增加,因此在某些时候您可能需要使用 uv.y = 1 \xe2\x80\x93 uv.y。
  • \n
\n\n

关于这个问题:

\n\n
Tags{ "Queue" = "Geometry" } //Is this even the right queue?\n
Run Code Online (Sandbox Code Playgroud)\n\n

队列引用它将被渲染的顺序,几何是第一个,如果您希望着色器在所有内容上运行,您可以使用例如覆盖。这个话题就到这里了

\n\n
    \n
  • 背景 - 此渲染队列先于其他任何渲染队列进行渲染。它用于天空盒等。
  • \n
  • 几何(默认)- 这用于大多数对象。不透明几何体使用此队列。
  • \n
  • AlphaTest - alpha 测试几何体使用此队列。它\xe2\x80\x99 是与“几何一”不同的队列,因为在绘制所有实体对象后,它\xe2\x80\x99 可以更有效地渲染经过alpha 测试的对象。
  • \n
  • 透明 - 此渲染队列在几何和 AlphaTest 之后按从后到前的顺序渲染。任何 alpha 混合的内容(即不写入深度缓冲区的着色器)都应该放在这里(玻璃、粒子效果)。
  • \n
  • 叠加 - 此渲染队列用于叠加效果。最后渲染的任何内容都应该放在这里(例如镜头光晕)。
  • \n
\n