2015-09-28 66 views
1

我正在製作一個統一的視頻遊戲,並決定使用光線追蹤。我有代碼,但正如你將在第二秒看到的那樣。它並不是完全一幀一幀地渲染。 這是我的光線跟蹤代碼,這是主攝像頭的主要腳本。如何做到與C統一實時光線追蹤#

using UnityEngine; 
using System.Collections; 

public class RayTracer : MonoBehaviour 
{ 

    public Color backgroundColor = Color.black; 
    public float RenderResolution = 1f; 
    public float maxDist = 100f; 
    public int maxRecursion = 4; 


    private Light[] lights; 
    private Texture2D renderTexture; 

    void Awake() 
    { 
     renderTexture = new Texture2D((int)(Screen.width * RenderResolution), (int)(Screen.height * RenderResolution)); 
     lights = FindObjectsOfType(typeof(Light)) as Light[]; 
    } 

    void Start() 
    { 
     RayTrace(); 
    } 

    void OnGUI() 
    { 
     GUI.DrawTexture(new Rect(0, 0, Screen.width, Screen.height), renderTexture); 
    } 

    void RayTrace() 
    { 
     for (int x = 0; x < renderTexture.width; x++) 
     { 
      for (int y = 0; y < renderTexture.height; y++) 
      { 
       Color color = Color.black; 
       Ray ray = GetComponent<Camera>().ScreenPointToRay(new Vector3(x/RenderResolution, y/RenderResolution, 0)); 

       renderTexture.SetPixel(x, y, TraceRay(ray, color, 0)); 
      } 
     } 

     renderTexture.Apply(); 
    } 

    Color TraceRay(Ray ray, Color color, int recursiveLevel) 
    { 

     if (recursiveLevel < maxRecursion) 
     { 
      RaycastHit hit; 
      if (Physics.Raycast(ray, out hit, maxDist)) 
      { 
       Vector3 viewVector = ray.direction; 
       Vector3 pos = hit.point + hit.normal * 0.0001f; 
       Vector3 normal = hit.normal; 

       RayTracerObject rto = hit.collider.gameObject.GetComponent<RayTracerObject>(); 
       //Does the object we hit have that script? 
       if (rto == null) 
       { 
        var GO = hit.collider.gameObject; 
        Debug.Log("Raycast hit failure! On " + GO.name + " position " + GO.transform.position.ToString()); 
        return color; //exit out 
       } 

       Material mat = hit.collider.GetComponent<Renderer>().material; 
       if (mat.mainTexture) 
       { 
        color += (mat.mainTexture as Texture2D).GetPixelBilinear(hit.textureCoord.x, hit.textureCoord.y); 
       } 
       else 
       { 
        color += mat.color; 
       } 

       color *= TraceLight(rto, viewVector, pos, normal); 

       if (rto.reflectiveCoeff > 0) 
       { 
        float reflet = 2.0f * Vector3.Dot(viewVector, normal); 
        Ray newRay = new Ray(pos, viewVector - reflet * normal); 
        color += rto.reflectiveCoeff * TraceRay(newRay, color, recursiveLevel + 1); 
       } 

       if (rto.transparentCoeff > 0) 
       { 
        Ray newRay = new Ray(hit.point - hit.normal * 0.0001f, viewVector); 
        color += rto.transparentCoeff * TraceRay(newRay, color, recursiveLevel + 1); 
       } 
      } 
     } 

     return color; 

    } 

    Color TraceLight(RayTracerObject rto, Vector3 viewVector, Vector3 pos, Vector3 normal) 
    { 
     Color c = RenderSettings.ambientLight; 

     foreach (Light light in lights) 
     { 
      if (light.enabled) 
      { 
       c += LightTrace(rto, light, viewVector, pos, normal); 
      } 
     } 
     return c; 
    } 

    Color LightTrace(RayTracerObject rto, Light light, Vector3 viewVector, Vector3 pos, Vector3 normal) 
    { 


     float dot, distance, contribution; 
     Vector3 direction; 
     switch (light.type) 
     { 
      case LightType.Directional: 
       contribution = 0; 
       direction = -light.transform.forward; 
       dot = Vector3.Dot(direction, normal); 
       if (dot > 0) 
       { 
        if (Physics.Raycast(pos, direction, maxDist)) 
        { 
         return Color.black; 
        } 

        if (rto.lambertCoeff > 0) 
        { 
         contribution += dot * rto.lambertCoeff; 
        } 
        if (rto.reflectiveCoeff > 0) 
        { 
         if (rto.phongCoeff > 0) 
         { 
          float reflet = 2.0f * Vector3.Dot(viewVector, normal); 
          Vector3 phongDir = viewVector - reflet * normal; 
          float phongTerm = max(Vector3.Dot(phongDir, viewVector), 0.0f); 
          phongTerm = rto.reflectiveCoeff * Mathf.Pow(phongTerm, rto.phongPower) * rto.phongCoeff; 

          contribution += phongTerm; 
         } 
         if (rto.blinnPhongCoeff > 0) 
         { 
          Vector3 blinnDir = -light.transform.forward - viewVector; 
          float temp = Mathf.Sqrt(Vector3.Dot(blinnDir, blinnDir)); 
          if (temp != 0.0f) 
          { 
           blinnDir = (1.0f/temp) * blinnDir; 
           float blinnTerm = max(Vector3.Dot(blinnDir, normal), 0.0f); 
           blinnTerm = rto.reflectiveCoeff * Mathf.Pow(blinnTerm, rto.blinnPhongPower) * rto.blinnPhongCoeff; 

           contribution += blinnTerm; 
          } 
         } 
        } 
       } 
       return light.color * light.intensity * contribution; 
      case LightType.Point: 
       contribution = 0; 
       direction = (light.transform.position - pos).normalized; 
       dot = Vector3.Dot(normal, direction); 
       distance = Vector3.Distance(pos, light.transform.position); 
       if ((distance < light.range) && (dot > 0)) 
       { 
        if (Physics.Raycast(pos, direction, distance)) 
        { 
         return Color.black; 
        } 

        if (rto.lambertCoeff > 0) 
        { 
         contribution += dot * rto.lambertCoeff; 
        } 
        if (rto.reflectiveCoeff > 0) 
        { 
         if (rto.phongCoeff > 0) 
         { 
          float reflet = 2.0f * Vector3.Dot(viewVector, normal); 
          Vector3 phongDir = viewVector - reflet * normal; 
          float phongTerm = max(Vector3.Dot(phongDir, viewVector), 0.0f); 
          phongTerm = rto.reflectiveCoeff * Mathf.Pow(phongTerm, rto.phongPower) * rto.phongCoeff; 

          contribution += phongTerm; 
         } 
         if (rto.blinnPhongCoeff > 0) 
         { 
          Vector3 blinnDir = -light.transform.forward - viewVector; 
          float temp = Mathf.Sqrt(Vector3.Dot(blinnDir, blinnDir)); 
          if (temp != 0.0f) 
          { 
           blinnDir = (1.0f/temp) * blinnDir; 
           float blinnTerm = max(Vector3.Dot(blinnDir, normal), 0.0f); 
           blinnTerm = rto.reflectiveCoeff * Mathf.Pow(blinnTerm, rto.blinnPhongPower) * rto.blinnPhongCoeff; 

           contribution += blinnTerm; 
          } 
         } 
        } 
       } 
       if (contribution == 0) 
       { 
        return Color.black; 
       } 
       return light.color * light.intensity * contribution; 
      case LightType.Spot: 
       contribution = 0; 
       direction = (light.transform.position - pos).normalized; 
       dot = Vector3.Dot(normal, direction); 
       distance = Vector3.Distance(pos, light.transform.position); 
       if (distance < light.range && dot > 0) 
       { 
        float dot2 = Vector3.Dot(-light.transform.forward, direction); 
        if (dot2 > (1 - light.spotAngle/180)) 
        { 
         if (Physics.Raycast(pos, direction, distance)) 
         { 
          return Color.black; 
         } 
         if (rto.lambertCoeff > 0) 
         { 
          contribution += dot * rto.lambertCoeff; 
         } 
         if (rto.reflectiveCoeff > 0) 
         { 
          if (rto.phongCoeff > 0) 
          { 
           float reflet = 2.0f * Vector3.Dot(viewVector, normal); 
           Vector3 phongDir = viewVector - reflet * normal; 
           float phongTerm = max(Vector3.Dot(phongDir, viewVector), 0.0f); 
           phongTerm = rto.reflectiveCoeff * Mathf.Pow(phongTerm, rto.phongPower) * rto.phongCoeff; 

           contribution += phongTerm; 
          } 
          if (rto.blinnPhongCoeff > 0) 
          { 
           Vector3 blinnDir = -light.transform.forward - viewVector; 
           float temp = Mathf.Sqrt(Vector3.Dot(blinnDir, blinnDir)); 
           if (temp != 0.0f) 
           { 
            blinnDir = (1.0f/temp) * blinnDir; 
            float blinnTerm = max(Vector3.Dot(blinnDir, normal), 0.0f); 
            blinnTerm = rto.reflectiveCoeff * Mathf.Pow(blinnTerm, rto.blinnPhongPower) * rto.blinnPhongCoeff; 

            contribution += blinnTerm; 
           } 
          } 
         } 
        } 
       } 
       if (contribution == 0) 
       { 
        return Color.black; 
       } 
       return light.color * light.intensity * contribution; 
     } 
     return Color.black; 
    } 

    float max(float x0, float x1) 
    { 
     return x0 > x1 ? x0 : x1; 
    } 
} 

這是附着在場景中的對象

using UnityEngine; 
using System.Collections; 

public class RayTracerObject : MonoBehaviour 
{ 

    public float lambertCoeff = 1f; 

    public float reflectiveCoeff = 0f; 

    public float phongCoeff = 1f; 
    public float phongPower = 2f; 

    public float blinnPhongCoeff = 1f; 
    public float blinnPhongPower = 2f; 

    public float transparentCoeff = 0f; 


    public Color baseColor = Color.gray; 

    void Awake() 
    { 
     if (!GetComponent<Renderer>().material.mainTexture) 
     { 
      GetComponent<Renderer>().material.color = baseColor; 
     } 
    } 
} 

我怎麼會去這樣做的代碼?代碼是什麼?

回答

5

儘管在主線程中進行光線跟蹤是一個完全可以接受的設計,但它可能不是您想要的Unity,因爲它會阻止其他任何事情。

現在你可以生成一個子線程來執行光線跟蹤並讓主線程呈現結果。但問題在於,這兩種方法都沒有使用GPU,這首先使用Unity來擊敗點。

如何做到實時團結與C#

這一切都取決於你打算使它什麼場景組成以及如何光線追蹤。您可以在低分辨率下實時渲染一些簡單的東西,然而,使用合理的屏幕分辨率和合理的光線反彈級別進行渲染,即投射反射或透射材料的遞歸光線的數量可能會更加困難。

相反,我會勸你遵循那裏現在正在使用被稱爲通用GPUGPGPU技術的GPU進行實時光線跟蹤在光線追蹤的變化趨勢。 nVidia在這個主題上有一些談話,並且可以在YouTube上找到。 Here is my sample Unity GPGPU galaxy simulation可能證明對GPGPU有用。

樣品GPGPU內核只是告訴你什麼是GPGPU約爲:

// File: Galaxy1Compute.compute 

// Each #kernel tells which function to compile; you can have many kernels 
#pragma kernel UpdateStars 

#include "Galaxy.cginc" 

// blackmagic 
#define BLOCKSIZE 128 

RWStructuredBuffer<Star> stars; 

Texture2D HueTexture; 

// refer to http://forum.unity3d.com/threads/163591-Compute-Shader-SamplerState-confusion 
SamplerState samplerHueTexture; 

// time ellapsed since last frame 
float deltaTime; 

const float Softening=3e4f; 
#define Softening2 Softening * Softening 

static float G = 6.67300e-11f; 
static float DefaultMass = 1000000.0f; 

// Do a pre-calculation assuming all the stars have the same mass 
static float GMM = G*DefaultMass*DefaultMass; 


[numthreads(BLOCKSIZE,1,1)] 
void UpdateStars (uint3 id : SV_DispatchThreadID) 
{ 
    uint i = id.x; 
    uint numStars, stride; 
    stars.GetDimensions(numStars, stride); 

    float3 position = stars[i].position; 
    float3 velocity = stars[i].velocity; 

    float3 A=float3(0,0,0); 

    [loop] 
    for (uint j = 0; j < numStars; j++) 
    {  
     if (i != j) 
     { 
      float3 D = stars[j].position - stars[i].position; 
      float r = length(D); 
      float f = GMM/(r * r + Softening2); 
      A += f * normalize(D); 
     } 
    } 

    velocity += A * deltaTime; 
    position += velocity * deltaTime; 

    if (i < numStars) 
    { 
     stars[i].velocity = velocity; 
     stars[i].position = position; 
     stars[i].accelMagnitude = length(A); 
    } 


} 

此外,還有一些關於這個問題精美書籍。 Real-time Volume Graphics,儘管它涵蓋了體積,但它確實涵蓋了投射射線 - 射線追蹤的本質。最難的範例轉變是爲GPGPU寫作,一旦你明白了,編寫GPGPU光線追蹤程序是GPGPU卷着色器的一個簡單步驟。

enter image description here

一個奇妙的大部頭陪任何光線追蹤作者是馬特·菲爾的Physically Based Rendering書(有一個第2版,但我還沒有讀到)

enter image description here