IEnumerator CubemapToEquirectangular(ComputeBuffer cameraPixelsBuffer, uint[] cameraPixels, ComputeBuffer convertPanoramaResultBuffer, int cameraWidth, int cameraHeight, byte[] pixelValues,
int stride, int panoramaWidth, int panoramaHeight, int ssaaFactor, bool async)
{
if (captureStereoscopic && usingGpuTransform)
{
// Was already done earlier, just grab the result
convertPanoramaResultBuffer.GetData(resultPixels);
if (resultPixels[convertPanoramaResultBuffer.count - 1] != BufferSentinelValue)
ReportOutOfGraphicsMemory();
writeOutputPixels(pixelValues, stride, panoramaWidth, panoramaHeight * 2, panoramaHeight * 2, /*yStart*/0);
}
else if (captureStereoscopic && !usingGpuTransform)
{
// TODO: Factor out into separate method
float startTime = Time.realtimeSinceStartup;
float processingTimePerFrame = cpuMillisecondsPerFrame / 1000.0f;
for (int y = 0; y < panoramaHeight; y++)
for (int x = 0; x < panoramaWidth; x++)
{
float xcoord = (float)x / panoramaWidth;
float ycoord = (float)y / panoramaHeight;
float latitude = (ycoord - 0.5f) * Mathf.PI;
float sinLat = Mathf.Sin(latitude);
float cosLat = Mathf.Cos(latitude);
float longitude = (xcoord * 2.0f - 1.0f) * Mathf.PI;
float sinLong = Mathf.Sin(longitude);
float cosLong = Mathf.Cos(longitude);
// Scale IPD down as latitude moves toward poles to avoid discontinuities
float latitudeNormalized = latitude / (Mathf.PI / 2.0f); // Map to [-1, 1]
float ipdScale = IpdScaleFunction(latitudeNormalized);
float scaledEyeRadius = ipdScale * interpupillaryDistance / 2.0f;
int cameraNum;
float u, v;
float ipdScaleLerp = 1.0f - ipdScale * 5.0f; // Scale [0, 0.2] to [0, 1] and reverse
// Top/bottom cap
Color colorCap = new Color(0.0f, 0.0f, 0.0f, 0.0f);
if (ipdScaleLerp > 0.0f)
{
Vector3 equirectRayDirection = new Vector3(cosLat * sinLong, sinLat, cosLat * cosLong);
float distance = 1.0f / equirectRayDirection.y;
u = equirectRayDirection.x * distance; v = equirectRayDirection.z * distance;
if (u * u <= 1 && v * v <= 1)
{
if (equirectRayDirection.y > 0.0f)
{
cameraNum = 0;
}
else
{
u = -u;
cameraNum = 1;
}
u = (u + 1.0f) * 0.5f;
v = (v + 1.0f) * 0.5f;
colorCap = GetCameraPixelBilinear(cameraPixels, cameraNum, u, v);
}
}
for (int i = 0; i < 2; i++)
{
// The following is equivalent to:
// Quaternion eyesRotation = Quaternion.Euler(0.0f, longitude * 360.0f / (2 * Mathf.PI), 0.0f);
// Vector3 initialEyePosition = (i == 0 ? Vector3.left : Vector3.right) * scaledEyeRadius;
// Vector3 pos = eyesRotation * initialEyePosition; // eye position
// Vector3 dir = eyesRotation * Vector3.forward; // gaze direction
Vector3 dir = new Vector3(sinLong, 0.0f, cosLong);
float angle = (Mathf.PI / 2.0f - Mathf.Acos(scaledEyeRadius / circleRadius));
if (i == 0) angle = -angle;
float circlePointAngle = longitude + angle;
if (circlePointAngle < 0.0f) circlePointAngle += 2 * Mathf.PI;
if (circlePointAngle >= 2 * Mathf.PI) circlePointAngle -= 2 * Mathf.PI;
// Debug.Assert(circlePointAngle >= 0.0f && circlePointAngle < 2 * Mathf.PI);
float circlePointNumber = circlePointAngle / (2 * Mathf.PI) * numCirclePoints;
int circlePoint0 = (int)Mathf.Floor(circlePointNumber) % numCirclePoints;
// Get color from each adjacent circle point
Color color0 = new Color(), color1 = new Color();
for (int j=0; j < 2; j++)
{
int circlePointIdx = (j == 0 ? circlePoint0 : (circlePoint0 + 1) % numCirclePoints);
float cameraPointAngle = 2 * Mathf.PI * circlePointIdx / numCirclePoints;
float sinCameraPointAngle = Mathf.Sin(cameraPointAngle);
float cosCameraPointAngle = Mathf.Cos(cameraPointAngle);
// Equivalent to (using fact that both dir and circlePointNorm are unit vectors):
// Quaternion circlePointRotation = Quaternion.Euler(0.0f, cameraPointAngle * 360.0f / (2 * Mathf.PI), 0.0f);
// Vector3 circlePointNormal = circlePointRotation * Vector3.forward;
// float newLongitude = Mathf.Sign(Vector3.Cross(circlePointNormal, dir).y) * Vector3.Angle(circlePointNormal, dir) * (2 * Mathf.PI) / 360.0f;
float newLongitude = Mathf.Sign(dir.x * cosCameraPointAngle - dir.z * sinCameraPointAngle) *
Mathf.Acos(dir.z * cosCameraPointAngle + dir.x * sinCameraPointAngle);
float cosNewLong = Mathf.Cos(newLongitude);
float sinNewLong = Mathf.Sin(newLongitude);
// Select which of the two cameras for this point to use and adjust ray to make camera plane perpendicular to axes
cameraNum = 2 + circlePointIdx * (CamerasPerCirclePoint / 2) + (newLongitude >= 0.0f ? 1 : 0);
float longitudeAdjust = (newLongitude >= 0.0f ? -hFovAdjust : hFovAdjust);
float longSum = newLongitude + longitudeAdjust;
// Equivalent to:
// Vector3 textureRayDir = Quaternion.Euler(-latitude * 360.0f / (2 * Mathf.PI), newLongitude * 360.0f / (2 * Mathf.PI), 0.0f) * Vector3.forward;
// Vector3 textureRayDirAdjusted = Quaternion.Euler(0.0f, longitudeAdjust * 360.0f / (2 * Mathf.PI), 0.0f) * textureRayDir;
Vector3 textureRayDirAdjusted = new Vector3(cosLat * Mathf.Sin(longSum), sinLat, cosLat * Mathf.Cos(longSum));
u = textureRayDirAdjusted.x / textureRayDirAdjusted.z / tanHalfHFov;
v = -textureRayDirAdjusted.y / textureRayDirAdjusted.z / tanHalfVFov;
// There's a lot of vertical overlap so don't accept v near the edge of the left/right cameras, to avoid artifact pixels
if (! (textureRayDirAdjusted.z > 0.0f && u * u <= 1.0f && v * v <= 1.0f - 0.1f) )
{
cameraNum = 2 + numCirclePoints * (CamerasPerCirclePoint / 2) + circlePointIdx * (CamerasPerCirclePoint / 2) + (latitude >= 0.0f ? 1 : 0);
float latitudeAdjust = (latitude >= 0.0f ? vFovAdjust : -vFovAdjust);
float cosLatAdjust = Mathf.Cos(latitudeAdjust);
float sinLatAdjust = Mathf.Sin(latitudeAdjust);
// Equivalent to:
// textureRayDirAdjusted = Quaternion.Euler(latitudeAdjust * 360.0f / (2 * Mathf.PI), 0.0f, 0.0f) * textureRayDir;
textureRayDirAdjusted = new Vector3(cosLat * sinNewLong,
cosLatAdjust * sinLat - cosLat * cosNewLong * sinLatAdjust,
sinLatAdjust * sinLat + cosLat * cosNewLong * cosLatAdjust);
u = textureRayDirAdjusted.x / textureRayDirAdjusted.z / tanHalfHFov;
v = -textureRayDirAdjusted.y / textureRayDirAdjusted.z / tanHalfVFov;
// Debug.Assert(ipdScaleLerp >= 1.0 || (textureRayDirAdjusted.z > 0.0f && u * u <= 1.0f && v * v <= 1.0f));
}
u = (u + 1.0f) * 0.5f;
v = (v + 1.0f) * 0.5f;
Color col = GetCameraPixelBilinear(cameraPixels, cameraNum, u, v);
if (j == 0) color0 = col; else color1 = col;
}
Color32 c = Color.Lerp(color0, color1, circlePointNumber - Mathf.Floor(circlePointNumber));
if (colorCap.a > 0.0f && ipdScaleLerp > 0.0f)
c = Color.Lerp(c, colorCap, ipdScaleLerp);
int outputIdx = stride * (y + panoramaHeight * i) + x * 4;
pixelValues[outputIdx + 0] = c.b;
pixelValues[outputIdx + 1] = c.g;
pixelValues[outputIdx + 2] = c.r;
pixelValues[outputIdx + 3] = 255;
}
if ((x & 0xFF) == 0 && Time.realtimeSinceStartup - startTime > processingTimePerFrame)
{
yield return null; // Wait until next frame
startTime = Time.realtimeSinceStartup;
}
}
}
else if (!captureStereoscopic && usingGpuTransform)
{
int sliceHeight = (panoramaHeight + ResultBufferSlices - 1) / ResultBufferSlices;
Log("Invoking GPU shader for equirectangular reprojection");
int endYNegative = (int)Mathf.Floor(panoramaHeight * 0.25f);
int startYPositive = (int)Mathf.Ceil(panoramaHeight * 0.75f);
for (int sliceNum = 0; sliceNum < ResultBufferSlices; sliceNum++)
{
int startSlice = sliceNum * sliceHeight;
int endSlice = Math.Min(startSlice + sliceHeight, panoramaHeight);
convertPanoramaShader.SetInt("startY", sliceNum * sliceHeight);
convertPanoramaShader.SetInt("sliceHeight", endSlice - startSlice);
if (endSlice <= endYNegative)
convertPanoramaShader.Dispatch(convertPanoramaYNegativeKernelIdx, (panoramaWidth + threadsX - 1) / threadsX, (sliceHeight + threadsY - 1) / threadsY, 1);
else if (startSlice >= startYPositive)
convertPanoramaShader.Dispatch(convertPanoramaYPositiveKernelIdx, (panoramaWidth + threadsX - 1) / threadsX, (sliceHeight + threadsY - 1) / threadsY, 1);
else
convertPanoramaShader.Dispatch(convertPanoramaKernelIdx, (panoramaWidth + threadsX - 1) / threadsX, (panoramaHeight + threadsY - 1) / threadsY, 1);
convertPanoramaResultBuffer.GetData(resultPixels);
if (resultPixels[convertPanoramaResultBuffer.count - 1] != BufferSentinelValue)
ReportOutOfGraphicsMemory();
writeOutputPixels(pixelValues, stride, panoramaWidth, sliceHeight, panoramaHeight, startSlice);
}
}
else // if (!captureStereoscopic && !usingGpuTransform)
{
if (async)
yield return StartCoroutine(CubemapToEquirectangularCpu(cameraPixels, cameraWidth, cameraHeight, pixelValues,
stride, panoramaWidth, panoramaHeight, ssaaFactor, async));
else
{
var enumerator = CubemapToEquirectangularCpu(cameraPixels, cameraWidth, cameraHeight, pixelValues,
stride, panoramaWidth, panoramaHeight, ssaaFactor, async);
while (enumerator.MoveNext()) { }
}
}
}