那么我们在做PBR时,为什么要纠结到底用伽马空间还是线性空间呢?PBR全称为Physically Based Rendering,既然是基于物理的渲染,那么我们做渲染时对于贴图采样出来的值必定要是和真实环境下相同的值才行,而采用了伽马空间的话贴图中颜色会被encoding gamma所改变,shader中采样出来的颜色值和真实环境下的值是不一样的,这样怎么能称为基于物理的渲染呢?
void Start () {
colorID = Shader.PropertyToID("_Color");
prop = new MaterialPropertyBlock();
var obj = Resources.Load("Perfabs/Sphere") as GameObject;
listObj = new GameObject[objCount];
listProp = new GameObject[objCount];
for (int i = 0; i < objCount; ++i)
{
int x = Random.Range(-6,-2);
int y = Random.Range(-4, 4);
int z = Random.Range(-4, 4);
GameObject o = Instantiate(obj);
o.name = i.ToString();
o.transform.localPosition = new Vector3(x,y,z);
listObj[i] = o;
}
for (int i = 0; i < objCount; ++i)
{
int x = Random.Range(2, 6);
int y = Random.Range(-4, 4);
int z = Random.Range(-4, 4);
GameObject o = Instantiate(obj);
o.name = (objCount + i).ToString();
o.transform.localPosition = new Vector3(x, y, z);
listProp[i] = o;
}
}
然后我们在Update函数中响应我们的操作,这里我使用按键上下健位来操作。
void Update () {
if (Input.GetKeyDown(KeyCode.DownArrow))
{
Stopwatch sw = new Stopwatch();
sw.Start();
for (int i = 0; i < objCount; ++i)
{
float r = Random.Range(0, 1f);
float g = Random.Range(0, 1f);
float b = Random.Range(0, 1f);
listObj[i].GetComponent<Renderer>().material.SetColor("_Color", new Color(r, g, b, 1));
}
sw.Stop();
UnityEngine.Debug.Log(string.Format("material total: {0:F4} ms", (float)sw.ElapsedTicks *1000 / Stopwatch.Frequency));
}
if (Input.GetKeyDown(KeyCode.UpArrow))
{
Stopwatch sw = new Stopwatch();
sw.Start();
for (int i = 0; i < objCount; ++i)
{
float r = Random.Range(0, 1f);
float g = Random.Range(0, 1f);
float b = Random.Range(0, 1f);
listProp[i].GetComponent<Renderer>().GetPropertyBlock(prop);
prop.SetColor(colorID, new Color(r, g, b, 1));
listProp[i].GetComponent<Renderer>().SetPropertyBlock(prop);
}
sw.Stop();
UnityEngine.Debug.Log(string.Format("MaterialPropertyBlock total: {0:F4} ms", (float)sw.ElapsedTicks * 1000 / Stopwatch.Frequency));
}
}