如何解决ARKit – 如何实现 SCNRenderer().audioListener?
我正在尝试在我的应用程序中实现这个新的位置音频。
使用 tapGesture 我在我的场景中插入一个 Drone,然后我将 sound.mp3 附加到它
func tapToPlace(node: SCNNode,recognizer: UITapGestureRecognizer,view : ARSCNView){
debugPrint("tap")
if gameState == .placeObject {
DispatchQueue.global().async {
let tapedScreen = recognizer.location(in: view)
guard let query = view.raycastQuery(from: tapedScreen,allowing: .existingPlaneGeometry,alignment: .horizontal) else {return}
let result = view.session.raycast(query).first
guard let worldTransform = result?.worldTransform else {return}// simd_Float4x4
let newNode = node.clone() // duplicate the node create at app start up
newNode.position = SCNVector3(worldTransform.columns.3.x,worldTransform.columns.3.y,worldTransform.columns.3.z) // place it at position tapped
// set up position audio
let audio = SCNAudioSource(fileNamed: "sound.mp3")! // add audio file
audio.loops = true
audio.volume = 0.3
audio.rate = 0.1
audio.isPositional = true
audio.shouldStream = false
audio.load()
let player = SCNAudioPlayer(source: audio)
newNode.addAudioPlayer(player)
view.scene.rootNode.addChildNode(newNode)
}
}
}
阅读苹果文档似乎需要实现这个audioListner: SCNnode
我该怎么做?
我尝试了以下方法:
我得到了相机的当前位置。
func trackCameraLocation(arView: ARSCNView) -> simd_float4x4 {
var cameraloc : simd_float4x4!
if let camera = arView.session.currentFrame?.camera.transform {
cameraloc = camera
}
return cameraloc
}
我在方法中使用了这个方法来更新帧,以便获得准确的用户位置。
func session(_ session: ARSession,didUpdate frame: ARFrame) {
cameraLocation = trackCameraLocation(arView: sceneView)
}
获得相机位置后,在 didAdd 方法节点中,我尝试设置 audioListner..
func renderer(_ renderer: SCNSceneRenderer,didAdd node: SCNNode,for anchor: ARAnchor) {
let cameraNode = SCNNode()
cameraNode.position = SCNVector3(cameraLocation.columns.3.x,cameraLocation.columns.3.y,cameraLocation.columns.3.z)
renderer.audioListener = cameraNode
}
但是..没有任何效果..听不到任何声音..我只看到我家地板上的 Drone 静音了。
寻求帮助或解释如何实现 ARKit 的这个新未来。
预先感谢您的帮助。
在这里放置我的音频文件:
解决方法
试试这个解决方案。它适用于 VR 应用和 AR 应用。
import SceneKit
extension ViewController: SCNSceneRendererDelegate {
func renderer(_ renderer: SCNSceneRenderer,updateAtTime time: TimeInterval) {
listener.position.z = -20 // change listener's position here
renderer.audioListener = self.listener
}
}
...
class ViewController: UIViewController {
let scene = SCNScene()
let audioNode = SCNNode()
let listener = SCNNode()
override func viewDidLoad() {
super.viewDidLoad()
let sceneView = self.view as! SCNView
sceneView.scene = self.scene
sceneView.backgroundColor = .black
sceneView.delegate = self
let node = SCNNode()
node.geometry = SCNSphere(radius: 0.05)
node.position = SCNVector3(0,-2)
self.scene.rootNode.addChildNode(node)
let path = Bundle.main.path(forResource: "art.scnassets/audio",ofType: "mp3") // MONO AUDIO
let url = URL(fileURLWithPath: path!)
let source = SCNAudioSource(url: url)!
source.isPositional = true
source.shouldStream = false
source.load()
let player = SCNAudioPlayer(source: source)
node.addChildNode(audioNode)
// THE LOCATION OF THIS LINE IS IMPORTANT
audioNode.addAudioPlayer(player)
audioNode.addChildNode(self.listener)
}
}
版权声明:本文内容由互联网用户自发贡献,该文观点与技术仅代表作者本人。本站仅提供信息存储空间服务,不拥有所有权,不承担相关法律责任。如发现本站有涉嫌侵权/违法违规的内容, 请发送邮件至 dio@foxmail.com 举报,一经查实,本站将立刻删除。