ARFaceTrackingとGKAgentで顔に追従するものをつくってみたよ
はじめに
前回Agents, Goals, and Behaviorsを3次元でやってみたという記事を書いたのですが、以前AR FaceTrackingで鼻を伸ばしてみるという記事でFaceTrackingで遊んでいたので2つをくっつけてみました。
コード
import UIKit
import ARKit
import SceneKit
import GameplayKit
class ViewController: UIViewController {
@IBOutlet weak var sceneView: ARSCNView! {
didSet {
sceneView.delegate = self
}
}
var prevTime: TimeInterval = 0
let serialQueue = DispatchQueue(label: "queue")
let faceAgent = GKAgent3D()
let agentSystem = GKComponentSystem(componentClass: GKAgent3D.self)
var faceNode = SCNNode()
lazy var virtualFaceNode: SCNNode = {
let node = SCNNode()
let device = sceneView.device!
let glassesGeometry = ARSCNFaceGeometry(device: device)!
glassesGeometry.firstMaterial!.colorBufferWriteMask = []
node.geometry = glassesGeometry
return node
}()
var objects: [SCNNode] = []
var objectAgents:[GKAgent] = []
// MARK: - LifeCycle
override func viewDidLoad() {
super.viewDidLoad()
createObject(count: 500)
let configuration = ARFaceTrackingConfiguration()
configuration.isLightEstimationEnabled = true
sceneView.session.run(configuration, options: [.resetTracking, .removeExistingAnchors])
}
// MARK: - PrivateMethod
private func createObject(count: Int) {
for _ in 0..<count {
let agent: GKAgent3D = {
let object: SCNNode = {
let node = SCNNode()
node.geometry = SCNSphere(radius: CGFloat.random(in: 0.001...0.01))
let material = SCNMaterial()
material.lightingModel = .physicallyBased
material.diffuse.contents = UIColor(red: CGFloat.random(in: 0.0...1.0),
green: CGFloat.random(in: 0.0...1.0),
blue: CGFloat.random(in: 0.0...1.0),
alpha: CGFloat.random(in: 0.5...1.0))
node.geometry?.materials = [material]
node.position = SCNVector3(CGFloat.random(in: -3...3),
CGFloat.random(in: -3...3),
CGFloat.random(in: -3...3))
return node
}()
sceneView.scene.rootNode.addChildNode(object)
let agent = GKAgent3D()
agent.position = SIMD3<Float>(object.position.x,
object.position.y,
object.position.z)
agent.maxAcceleration = Float.random(in: 1...3)
agent.maxSpeed = Float.random(in: 0.1...2)
agent.delegate = self
agent.behavior = GKBehavior(goals: [GKGoal(toSeekAgent: faceAgent)])
objects.append(object)
return agent
}()
objectAgents.append(agent)
agentSystem.addComponent(agent)
}
}
}
extension ViewController: GKAgentDelegate {
func agentDidUpdate(_ agent: GKAgent) {
if let agent = agent as? GKAgent3D,
let index = objectAgents.firstIndex(where: { $0 == agent }) {
let object = objects[index]
object.position = SCNVector3(agent.position)
}
}
}
extension ViewController: ARSCNViewDelegate {
func renderer(_ renderer: SCNSceneRenderer, didAdd node: SCNNode, for anchor: ARAnchor) {
faceNode = node
serialQueue.async {
self.faceNode.addChildNode(self.virtualFaceNode)
}
}
func renderer(_ renderer: SCNSceneRenderer, didUpdate node: SCNNode, for anchor: ARAnchor) {
guard let faceAnchor = anchor as? ARFaceAnchor else { return }
let geometry = virtualFaceNode.geometry as! ARSCNFaceGeometry
geometry.update(from: faceAnchor.geometry)
}
func renderer(_ renderer: SCNSceneRenderer, updateAtTime time: TimeInterval) {
let delta = prevTime == 0 ? 0 : time - prevTime
prevTime = time
agentSystem.update(deltaTime: delta)
//Agentの位置を顔の座標に更新する
faceAgent.position = SIMD3<Float>(x: faceNode.position.x,
y: faceNode.position.y,
z: faceNode.position.z)
}
}