public IEnumerator CaptureScreenshotAsyncHelper(string filenameBase, bool async)
{
if (async)
while (Capturing)
yield return null; // If CaptureScreenshot() was called programmatically multiple times, serialize the coroutines
Capturing = true;
if (!OnCaptureStart())
{
audioSource.PlayOneShot(failSound);
Capturing = false;
yield break;
}
// Have to refresh cameras each frame during video in case cameras or image effects change - consider an option for this.
Camera[] cameras = GetCaptureCameras();
Array.Sort(cameras, (x, y) => x.depth.CompareTo(y.depth));
if (cameras.Length == 0)
{
Debug.LogWarning("No cameras found to capture");
audioSource.PlayOneShot(failSound);
Capturing = false;
yield break;
}
// Need to do this first in case we need to reinitialize
if (antiAliasing != AntiAliasing._1)
{
foreach (Camera c in cameras)
{
if (c.actualRenderingPath == RenderingPath.DeferredLighting ||
c.actualRenderingPath == RenderingPath.DeferredShading)
{
Debug.LogWarning("CapturePanorama: Setting Anti Aliasing=1 because at least one camera in deferred mode. Use SSAA setting or Antialiasing image effect if needed.");
antiAliasing = AntiAliasing._1;
Reinitialize();
break;
}
}
}
Log("Starting panorama capture");
if (!captureEveryFrame && startSound != null && Camera.main != null)
{
audioSource.PlayOneShot(startSound);
}
List<ScreenFadeControl> fadeControls = new List<ScreenFadeControl>();
foreach (Camera c in cameras)
{
var fadeControl = c.gameObject.AddComponent<ScreenFadeControl>();
fadeControl.fadeMaterial = fadeMaterial;
fadeControls.Add(fadeControl);
}
SetFadersEnabled(fadeControls, false);
if (fadeDuringCapture && async)
yield return StartCoroutine(FadeOut(fadeControls));
// Make sure black is shown before we start - sometimes two frames are needed
for (int i = 0; i < 2; i++)
yield return new WaitForEndOfFrame();
// Initialize compute buffers - do here instead of in Reinitialize() to work around error on Destroy()
ComputeBuffer convertPanoramaResultBuffer = null;
ComputeBuffer forceWaitResultConvertPanoramaStereoBuffer = null;
if (usingGpuTransform)
{
if (captureStereoscopic)
{
convertPanoramaResultBuffer =
new ComputeBuffer(/*count*/panoramaWidth * panoramaHeight * 2 + 1, /*stride*/4); // + 1 for sentinel
convertPanoramaStereoShader.SetBuffer(renderStereoIdx, "result", convertPanoramaResultBuffer);
forceWaitResultConvertPanoramaStereoBuffer = new ComputeBuffer(/*count*/1, /*stride*/4);
convertPanoramaStereoShader.SetBuffer(renderStereoIdx, "forceWaitResultBuffer", forceWaitResultConvertPanoramaStereoBuffer);
}
else
{
int sliceHeight = (panoramaHeight + ResultBufferSlices - 1) / ResultBufferSlices;
convertPanoramaResultBuffer =
new ComputeBuffer(/*count*/panoramaWidth * sliceHeight + 1, /*stride*/4); // + 1 for sentinel
foreach (int kernelIdx in convertPanoramaKernelIdxs)
convertPanoramaShader.SetBuffer(kernelIdx, "result", convertPanoramaResultBuffer);
}
}
int cameraPixelsBufferNumTextures = numCameras;
overlapTextures = 0;
int circlePointCircularBufferSize = 0;
if (captureStereoscopic && usingGpuTransform)
{
overlapTextures = ssaaFactor == 1 ? 1 : 2; // Overlap of 1 supports blending between circle points, overlap of 2 supports it even with SSAA at boundaries
circlePointCircularBufferSize = 1 + overlapTextures;
// 2 + for top/bottom, and divide by 2 because we're doing left/right and up/down separately
cameraPixelsBufferNumTextures = Math.Min(numCameras, 2 + (CamerasPerCirclePoint / 2) * circlePointCircularBufferSize);
}
ComputeBuffer cameraPixelsBuffer = new ComputeBuffer(/*count*/cameraPixelsBufferNumTextures * cameraWidth * cameraHeight + 1, /*stride*/4);
textureToBufferShader.SetBuffer(textureToBufferIdx, "result", cameraPixelsBuffer);
// Set up sentinels to detect out of graphics memory
textureToBufferShader.SetInt("sentinelIdx", cameraPixelsBuffer.count - 1);
if (usingGpuTransform && !captureStereoscopic)
{
convertPanoramaShader.SetInt("cameraPixelsSentinelIdx", cameraPixelsBuffer.count - 1);
convertPanoramaShader.SetInt("sentinelIdx", convertPanoramaResultBuffer.count - 1);
foreach (int kernelIdx in convertPanoramaKernelIdxs)
convertPanoramaShader.SetBuffer(kernelIdx, "cameraPixels", cameraPixelsBuffer);
}
if (usingGpuTransform && captureStereoscopic)
{
convertPanoramaStereoShader.SetInt("cameraPixelsSentinelIdx", cameraPixelsBuffer.count - 1);
convertPanoramaStereoShader.SetBuffer(renderStereoIdx, "cameraPixels", cameraPixelsBuffer);
}
ComputeBuffer forceWaitResultTextureToBufferBuffer = new ComputeBuffer(/*count*/1, /*stride*/4);
textureToBufferShader.SetBuffer(textureToBufferIdx, "forceWaitResultBuffer", forceWaitResultTextureToBufferBuffer);
float startTime = Time.realtimeSinceStartup;
Quaternion headOrientation = Quaternion.identity;
#if OVR_SUPPORT
if (OVRManager.display != null)
{
headOrientation = OVRManager.display.GetHeadPose(0.0).orientation;
}
#endif
#if UNITY_5_1
if (VRSettings.enabled && VRSettings.loadedDevice != VRDeviceType.None)
{
headOrientation = InputTracking.GetLocalRotation(0);
}
#endif
Log("Rendering camera views");
foreach (Camera c in cameras)
Log("Camera name: " + c.gameObject.name);
var methodMap = new Dictionary<Camera, List<ImageEffectCopyCamera.InstanceMethodPair>>();
foreach (Camera c in cameras)
methodMap[c] = ImageEffectCopyCamera.GenerateMethodList(c);
// Need to extract each cubemap into a Texture2D so we can read the pixels, but Unity bug
// prevents this with antiAliasing: http://issuetracker.unity3d.com/issues/texture2d-dot-readpixels-fails-if-rendertexture-has-anti-aliasing-set
// We copy the cubemap textures using a shader as a workaround.
string suffix = "." + FormatToExtension(imageFormat);
string filePath = "";
// Save in separate thread to avoid hiccups
string imagePath = saveImagePath;
if (imagePath == null || imagePath == "")
{
imagePath = Application.dataPath + "/..";
}
convertPanoramaStereoShader.SetInt("circlePointCircularBufferSize", circlePointCircularBufferSize);
int nextCirclePointCircularBufferStart = 0, nextCirclePointStart = 0, writeIdx = 0;
int ilimit = usingGpuTransform ? numCameras + overlapTextures * CamerasPerCirclePoint : numCameras;
int leftRightPhaseEnd = (ilimit - 2) / 2 + 2;
int circlePointsRendered = 0;
int saveCubemapImageNum = 0;
Log("Changing quality level");
int saveQualityLevel = QualitySettings.GetQualityLevel();
bool qualitySettingWasChanged = false;
string[] qualitySettingNames = QualitySettings.names;
if (qualitySetting != qualitySettingNames[saveQualityLevel]) // Don't change if already set to it
{
for (int i = 0; i < qualitySettingNames.Length; i++)
{
string name = qualitySettingNames[i];
if (name == qualitySetting)
{
QualitySettings.SetQualityLevel(i, /*applyExpensiveChanges*/false); // applyExpensiveChanges causes trouble
qualitySettingWasChanged = true;
}
}
if (qualitySetting != "" && !qualitySettingWasChanged)
{
Debug.LogError("Quality setting specified for CapturePanorama is invalid, ignoring.", this);
}
}
BeforeRenderPanorama();
RenderTexture.active = null;
for (int i = 0; i < ilimit; i++)
{
// Don't use RenderToCubemap - it causes problems with compositing multiple cameras, and requires
// more temporary VRAM. Just render cube map manually.
if (captureStereoscopic)
{
if (i < 2)
{
// 0, 1 are top/bottom caps
camGos[1].transform.localPosition = Vector3.zero;
camGos[1].transform.localRotation = Quaternion.Euler((i == 0) ? 90.0f : -90.0f, 0.0f, 0.0f);
}
else
{
// Do all left/right textures first then all up/down textures
int iAdjusted, numInGroupBias;
if (i < leftRightPhaseEnd)
{
iAdjusted = i - 2;
numInGroupBias = 0;
}
else
{
iAdjusted = i - leftRightPhaseEnd;
numInGroupBias = 2;
}
int circlePointNum = (iAdjusted / (CamerasPerCirclePoint / 2)) % numCirclePoints;
int numInGroup = iAdjusted % (CamerasPerCirclePoint / 2) + numInGroupBias;
float circleAngle = 360.0f * circlePointNum / numCirclePoints;
camGos[1].transform.localPosition = Quaternion.Euler(0.0f, circleAngle, 0.0f) * Vector3.forward * circleRadius;
if (numInGroup < 2)
camGos[1].transform.localRotation = Quaternion.Euler(0.0f, circleAngle + (numInGroup == 0 ? -hFovAdjustDegrees : hFovAdjustDegrees), 0.0f);
else
camGos[1].transform.localRotation = Quaternion.Euler((numInGroup == 2 ? -vFovAdjustDegrees : vFovAdjustDegrees), circleAngle, 0.0f);
if (numInGroup == 1 || numInGroup == 3) circlePointsRendered++;
}
}
else
{
switch ((CubemapFace)i)
{
case CubemapFace.PositiveX: camGos[1].transform.localRotation = Quaternion.Euler( 0.0f, 90.0f, 0.0f); break;
case CubemapFace.NegativeX: camGos[1].transform.localRotation = Quaternion.Euler( 0.0f, -90.0f, 0.0f); break;
case CubemapFace.PositiveY: camGos[1].transform.localRotation = Quaternion.Euler( 90.0f, 0.0f, 0.0f); break;
case CubemapFace.NegativeY: camGos[1].transform.localRotation = Quaternion.Euler(-90.0f, 0.0f, 0.0f); break;
case CubemapFace.PositiveZ: camGos[1].transform.localRotation = Quaternion.Euler( 0.0f, 0.0f, 0.0f); break;
case CubemapFace.NegativeZ: camGos[1].transform.localRotation = Quaternion.Euler( 0.0f, 180.0f, 0.0f); break;
}
}
foreach (Camera c in cameras)
{
// To get the camera in the right eye position, migrate the camera transform to camGos[0]
camGos[2].transform.parent = null;
cam.CopyFrom(c);
// TODO: Determine if we should reset matrices of the camera in case it's using custom transform matrices
camGos[0].transform.localPosition = cam.transform.localPosition;
camGos[0].transform.localRotation = cam.transform.localRotation;
camGos[2].transform.parent = camGos[1].transform;
cam.transform.localPosition = Vector3.zero;
cam.transform.localRotation = Quaternion.identity;
copyCameraScript.enabled = methodMap[c].Count > 0;
copyCameraScript.onRenderImageMethods = methodMap[c];
cam.fieldOfView = vFov; // hFov inferred from aspect ratio of target
// Question: Should we adjust near clip in stereoscopic mode based on circleRadius?
// (avoids clipping that doesn't occur in normal camera view but might lead to unexpected bad effects)
camGos[0].transform.rotation *= Quaternion.Inverse(headOrientation);
if (useDefaultOrientation)
camGos[0].transform.rotation = Quaternion.identity;
cam.targetTexture = cubemapRenderTexture;
// Aspect ratio must be determined by size of render target. This is critical when Unity native VR is enabled.
cam.ResetAspect();
// Temporarily set original camera to same position/rotation/field of view as
// rendering camera during render. If any image effects examine camera
// orientation/FOV this will ensure they behave correctly.
Vector3 savePosition = c.transform.position;
Quaternion saveRotation = c.transform.rotation;
float saveFieldOfView = c.fieldOfView;
RenderTexture saveRenderTexture = c.targetTexture;
c.transform.position = cam.transform.position;
c.transform.rotation = cam.transform.rotation;
c.fieldOfView = cam.fieldOfView;
cam.Render();
c.transform.position = savePosition;
c.transform.rotation = saveRotation;
c.fieldOfView = saveFieldOfView;
c.targetTexture = saveRenderTexture;
}
// Read one pixel from texture to force render to complete before continuing
RenderTexture.active = cubemapRenderTexture;
forceWaitTexture.ReadPixels(new Rect(cameraWidth - 1, cameraHeight - 1, 1, 1), 0, 0);
int forceWaitValue = 1000000 + i;
textureToBufferShader.SetInt("forceWaitValue", forceWaitValue);
textureToBufferShader.SetTexture(textureToBufferIdx, "source", cubemapRenderTexture);
textureToBufferShader.SetInt("startIdx", writeIdx * cameraWidth * cameraHeight);
textureToBufferShader.Dispatch(textureToBufferIdx, (cameraWidth + threadsX - 1) / threadsX, (cameraHeight + threadsY - 1) / threadsY, 1);
uint[] forceWaitResult = new uint[1];
forceWaitResultTextureToBufferBuffer.GetData(forceWaitResult);
if (forceWaitResult[0] != forceWaitValue)
Debug.LogError("TextureToBufferShader: Unexpected forceWaitResult value " + forceWaitResult[0] + ", should be " + forceWaitValue);
if (saveCubemap &&
((i < 2) ||
(i >= 2 && i < 2 + numCirclePoints * 2) ||
(i >= leftRightPhaseEnd && i < leftRightPhaseEnd + numCirclePoints * 2)))
{
// This is really slow - retrieving all cameraPixels data for just a portion of it. But this is mainly useful for debugging anyway.
cameraPixelsBuffer.GetData(cameraPixels);
if (cameraPixels[cameraPixelsBuffer.count - 1] != BufferSentinelValue)
ReportOutOfGraphicsMemory();
SaveCubemapImage(cameraPixels, filenameBase, suffix, imagePath, saveCubemapImageNum, writeIdx);
saveCubemapImageNum++;
}
writeIdx++;
if (writeIdx >= cameraPixelsBufferNumTextures) writeIdx = 2; // Leave top/bottom in indexes 0/1
// For stereoscopic GPU transform, interleave capture and rendering to decrease VRAM consumption
if (captureStereoscopic && usingGpuTransform &&
((i - 2) + 1) % (CamerasPerCirclePoint / 2) == 0 &&
(circlePointsRendered - nextCirclePointStart >= circlePointCircularBufferSize || i + 1 == 2 + (ilimit - 2) / 2 || i + 1 == ilimit))
{
forceWaitValue = 2000000 + i;
convertPanoramaStereoShader.SetInt("forceWaitValue", forceWaitValue);
convertPanoramaStereoShader.SetInt("leftRightPass", i < leftRightPhaseEnd ? 1 : 0);
convertPanoramaStereoShader.SetInt("circlePointStart", nextCirclePointStart);
convertPanoramaStereoShader.SetInt("circlePointEnd", cameraPixelsBufferNumTextures < numCameras ? circlePointsRendered : circlePointsRendered + 1);
convertPanoramaStereoShader.SetInt("circlePointCircularBufferStart", nextCirclePointCircularBufferStart);
convertPanoramaStereoShader.Dispatch(renderStereoIdx, (panoramaWidth + threadsX - 1) / threadsX, (panoramaHeight + threadsY - 1) / threadsY, 2);
forceWaitResultConvertPanoramaStereoBuffer.GetData(forceWaitResult);
if (forceWaitResult[0] != forceWaitValue)
Debug.LogError("ConvertPanoramaStereoShader: Unexpected forceWaitResult value " + forceWaitResult[0] + ", should be " + forceWaitValue);
if (i + 1 == leftRightPhaseEnd)
{
nextCirclePointCircularBufferStart = (nextCirclePointCircularBufferStart + circlePointCircularBufferSize) % circlePointCircularBufferSize;
nextCirclePointStart = 0;
circlePointsRendered = 0;
}
else
{
nextCirclePointStart = circlePointsRendered - overlapTextures;
nextCirclePointCircularBufferStart = (nextCirclePointCircularBufferStart + circlePointCircularBufferSize - overlapTextures) % circlePointCircularBufferSize;
}
}
RenderTexture.active = null;
}
AfterRenderPanorama();
Log("Resetting quality level");
if (qualitySettingWasChanged)
QualitySettings.SetQualityLevel(saveQualityLevel, /*applyExpensiveChanges*/false);
// If we need to access the cubemap pixels on the CPU, retrieve them now
if (saveCubemap || !usingGpuTransform)
{
cameraPixelsBuffer.GetData(cameraPixels);
if (cameraPixels[cameraPixelsBuffer.count - 1] != BufferSentinelValue)
ReportOutOfGraphicsMemory();
}
RenderTexture.active = null;
if (saveCubemap &&
!(captureStereoscopic && usingGpuTransform)) // In this mode images are saved during capture
{
// Save cubemap while still faded, as fast as possible - should be pretty quick
for (int i = 0; i < numCameras; i++)
{
int bufferIdx = i;
SaveCubemapImage(cameraPixels, filenameBase, suffix, imagePath, i, bufferIdx);
}
}
// If this is not here, the fade-in will drop frames.
for (int i = 0; i < 2; i++)
yield return new WaitForEndOfFrame();
if (async && !usingGpuTransform && fadeDuringCapture)
yield return StartCoroutine(FadeIn(fadeControls));
filePath = imagePath + "/" + filenameBase + suffix;
bool producedImageSuccess = false;
{
// Write pixels directly to .NET Bitmap for saving out
// Based on https://msdn.microsoft.com/en-us/library/5ey6h79d%28v=vs.110%29.aspx
Bitmap bitmap = new Bitmap(panoramaWidth, panoramaHeight * (captureStereoscopic ? 2 : 1), PixelFormat.Format32bppArgb);
var bmpData = bitmap.LockBits(new Rectangle(0, 0, bitmap.Width, bitmap.Height), ImageLockMode.WriteOnly, bitmap.PixelFormat);
IntPtr ptr = bmpData.Scan0;
byte[] pixelValues = new byte[Math.Abs(bmpData.Stride) * bitmap.Height];
// Convert to equirectangular projection - use compute shader for better performance if supported by platform
if (async)
yield return StartCoroutine(CubemapToEquirectangular(cameraPixelsBuffer, cameraPixels, convertPanoramaResultBuffer, cameraWidth, cameraHeight, pixelValues, bmpData.Stride, panoramaWidth, panoramaHeight, ssaaFactor, async));
else
{
var enumerator = CubemapToEquirectangular(cameraPixelsBuffer, cameraPixels, convertPanoramaResultBuffer, cameraWidth, cameraHeight, pixelValues, bmpData.Stride, panoramaWidth, panoramaHeight, ssaaFactor, async);
while (enumerator.MoveNext()) { }
}
producedImageSuccess = (pixelValues[3] == 255);
yield return null;
System.Runtime.InteropServices.Marshal.Copy(pixelValues, 0, ptr, pixelValues.Length);
bitmap.UnlockBits(bmpData);
yield return null;
Log("Time to take panorama screenshot: " + (Time.realtimeSinceStartup - startTime) + " sec");
if (producedImageSuccess)
{
var thread = new Thread(() =>
{
Log("Saving equirectangular image");
// TODO: Use better image processing library to get decent JPEG quality out.
bitmap.Save(filePath, FormatToDrawingFormat(imageFormat));
});
thread.Start();
while (thread.ThreadState == ThreadState.Running)
if (async)
yield return null;
else
Thread.Sleep(0);
}
bitmap.Dispose();
}
// Release ComputeBuffers - all done with these
foreach (var buffer in new ComputeBuffer[] {
convertPanoramaResultBuffer,
cameraPixelsBuffer,
forceWaitResultConvertPanoramaStereoBuffer,
forceWaitResultTextureToBufferBuffer })
if (buffer != null)
buffer.Release();
convertPanoramaResultBuffer = cameraPixelsBuffer = null;
if (async && usingGpuTransform && fadeDuringCapture)
yield return StartCoroutine(FadeIn(fadeControls));
foreach (ScreenFadeControl fadeControl in fadeControls)
{
Destroy(fadeControl);
}
fadeControls.Clear();
if (producedImageSuccess && uploadImages && !captureEveryFrame)
{
Log("Uploading image");
imageFileBytes = File.ReadAllBytes(filePath);
string mimeType = FormatMimeType(imageFormat);
if (async)
yield return StartCoroutine(UploadImage(imageFileBytes, filenameBase + suffix, mimeType, async));
else
{
var enumerator = UploadImage(imageFileBytes, filenameBase + suffix, mimeType, async);
while (enumerator.MoveNext()) { }
}
}
else
{
if (!producedImageSuccess)
{
if (failSound != null && Camera.main != null)
audioSource.PlayOneShot(failSound);
}
else if (!captureEveryFrame && doneSound != null && Camera.main != null)
{
audioSource.PlayOneShot(doneSound);
}
Capturing = false;
}
}