I'm trying to export a depth texture being created during rendering pass of ARSCNView. In order to do that, I wrote a code that renders SCNScene in the background with a custom MTLRenderPassDescriptor
. When I trace the resources bound to GPU using 'GPU Capture tool', I found that the custom MTLRenderPassDescriptor
is ignored in the SCNRenderer.render method.
I used this code to render the SCNScene to the offscreen.
// Render Pass - render sceneView
renderer.scene = sceneView.scene
renderer.pointOfView = sceneView.pointOfView
renderer.render(atTime: 0, viewport: viewport, commandBuffer: commandBuffer, passDescriptor: renderPassDescriptor)
When I check the resources in GPU capture, the renderer produces its own frame texture and depth texture, not described ones in renderPassDescriptor
. It's weird according to the document. I also tested this without ARKit session and it works as I expected (the renderer uses texture resources described in the renderPassDescriptor
).
How can I fix this? Is this a SceneKit bug?
Image 1. I found in GPU capture that the depth texture is not linked to blit pass.
Image 2. The color attachment texture address is 0x144a4f310
Image 3. The depth attachment texture address is 0x144a50050
Image 4. Bound textures to render method have different addresses
Here is Minimum Working Example.
import UIKit
import SceneKit
import ARKit
class ViewController: UIViewController, ARSCNViewDelegate, ARSessionDelegate {
@IBOutlet var sceneView: ARSCNView!
var ship: SCNNode!
var device: MTLDevice!
var renderer: SCNRenderer!
var commandQueue: MTLCommandQueue!
let textureSizeX = 2732
let textureSizeY = 2048
lazy var viewport = CGRect(x: 0, y: 0, width: CGFloat(textureSizeX), height: CGFloat(textureSizeY))
override func viewDidLoad() {
super.viewDidLoad()
// Set the view's delegate
sceneView.delegate = self
sceneView.session.delegate = self
sceneView.showsStatistics = true
sceneView.scene = SCNScene()
ship = SCNScene(named: "art.scnassets/ship.scn")?.rootNode.childNode(withName: "shipMesh", recursively: true)!
sceneView.scene.rootNode.addChildNode(ship)
// background renderer
device = MTLCreateSystemDefaultDevice()!
renderer = SCNRenderer(device: device, options: nil)
commandQueue = device.makeCommandQueue()!
}
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
guard let referenceImages = ARReferenceImage.referenceImages(inGroupNamed: "AR Resources", bundle: nil) else {
fatalError("Missing expected asset catalog resources.")
}
// Create a session configuration
let configuration = ARWorldTrackingConfiguration()
configuration.detectionImages = referenceImages
// Run the view's session
sceneView.session.run(configuration, options: [.resetTracking, .removeExistingAnchors])
}
override func viewWillDisappear(_ animated: Bool) {
super.viewWillDisappear(animated)
// Pause the view's session
sceneView.session.pause()
}
// MARK: - ARSCNViewDelegate
// Override to create and configure nodes for anchors added to the view's session.
func renderer(_ renderer: SCNSceneRenderer, willRenderScene scene: SCNScene, atTime time: TimeInterval) {
doRender()
}
func doRender() {
let renderPassDescriptor = makeRenderPassDescriptor()
let commandBuffer = commandQueue.makeCommandBuffer()!
// Render Pass - render sceneView
renderer.scene = sceneView.scene
renderer.pointOfView = sceneView.pointOfView
renderer.render(atTime: 0, viewport: viewport, commandBuffer: commandBuffer, passDescriptor: renderPassDescriptor)
// Blit Pass - copy depth texture to buffer
let imageWidth = Int(textureSizeX)
let imageHeight = Int(textureSizeY)
let pixelCount = imageWidth * imageHeight
let depthImageBuffer = device.makeBuffer(length: 4 * pixelCount, options: .storageModeShared)!
let blitEncoder = commandBuffer.makeBlitCommandEncoder()!
blitEncoder.copy(from: renderPassDescriptor.depthAttachment.texture!,
sourceSlice: 0,
sourceLevel: 0,
sourceOrigin: MTLOriginMake(0, 0, 0),
sourceSize: MTLSizeMake(imageWidth, imageHeight, 1),
to: depthImageBuffer,
destinationOffset: 0,
destinationBytesPerRow: 4 * imageWidth,
destinationBytesPerImage: 4 * pixelCount,
options: .depthFromDepthStencil)
blitEncoder.endEncoding()
commandBuffer.commit()
// Wait until depth buffer copying is done.
commandBuffer.waitUntilCompleted()
}
func makeRenderPassDescriptor() -> MTLRenderPassDescriptor {
let renderPassDescriptor = MTLRenderPassDescriptor()
let frameBufferDescriptor = MTLTextureDescriptor.texture2DDescriptor(pixelFormat: .rgba8Unorm_srgb, width: textureSizeX, height: textureSizeY, mipmapped: false)
frameBufferDescriptor.usage = [.renderTarget, .shaderRead]
renderPassDescriptor.colorAttachments[0].texture = device.makeTexture(descriptor: frameBufferDescriptor)!
renderPassDescriptor.colorAttachments[0].loadAction = .clear
renderPassDescriptor.colorAttachments[0].storeAction = .store
renderPassDescriptor.colorAttachments[0].clearColor = MTLClearColorMake(1, 1, 1, 1.0)
let depthBufferDescriptor: MTLTextureDescriptor = MTLTextureDescriptor.texture2DDescriptor(pixelFormat: .depth32Float, width: textureSizeX, height: textureSizeY, mipmapped: false)
depthBufferDescriptor.usage = .renderTarget
renderPassDescriptor.depthAttachment.texture = device.makeTexture(descriptor: depthBufferDescriptor)
renderPassDescriptor.depthAttachment.loadAction = .clear
renderPassDescriptor.depthAttachment.storeAction = .store
return renderPassDescriptor
}
func session(_ session: ARSession, didFailWithError error: Error) {
// Present an error message to the user
}
func session(_ session: ARSession, didUpdate frame: ARFrame) {
}
func sessionWasInterrupted(_ session: ARSession) {
// Inform the user that the session has been interrupted, for example, by presenting an overlay
}
func sessionInterruptionEnded(_ session: ARSession) {
// Reset tracking and/or remove existing anchors if consistent tracking is required
}
}