ARKit + SceneKit: not able to move node anchored to face - position

EDIT: link to project
I have a simple box anchored to the user's face, and I'd like to change it's XYZ position with 3 different UISlider controls. I have tried several methods, but nothing happens. The object stays static.
I think the code can speak better:
import Foundation
import UIKit
import ARKit
import SceneKit
class SceneKitViewController: UIViewController {
#IBOutlet private var sceneView: ARSCNView!
var rootNode: SCNNode!
override func viewDidLoad() {
super.viewDidLoad()
guard ARFaceTrackingConfiguration.isSupported else { return }
sceneView.delegate = self
sceneView.session.delegate = self
sceneView.automaticallyUpdatesLighting = true
sceneView.showsStatistics = true
rootNode = box
}
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
let configuration = ARFaceTrackingConfiguration()
configuration.maximumNumberOfTrackedFaces = 1
configuration.isLightEstimationEnabled = true
sceneView.session.run(configuration, options: [])
}
override func viewWillDisappear(_ animated: Bool) {
super.viewWillAppear(animated)
sceneView.session.pause()
}
var box: SCNNode {
let box = SCNBox(width: 0.1, height: 0.1, length: 0.1, chamferRadius: 0)
let node = SCNNode(geometry: box)
node.position = SCNVector3(0,0,0)
return node
}
#IBAction func setXAxis(sender: UISlider) {
setNewPosition(SCNVector3(sender.value, rootNode.position.y, rootNode.position.z))
}
#IBAction func setYAxis(sender: UISlider) {
setNewPosition(SCNVector3(rootNode.position.x, sender.value, rootNode.position.z))
}
#IBAction func setZAxis(sender: UISlider) {
setNewPosition(SCNVector3(rootNode.position.x, rootNode.position.y, sender.value))
}
#IBAction func setScale(sender: UISlider) {
rootNode.scale = SCNVector3(sender.value, sender.value, sender.value)
}
private func setNewPosition(_ vector: SCNVector3) {
//
print("moving to \(vector)")
// rootNode.position = vector
// rootNode.localTranslate(by: vector)
updatePositionOf(rootNode, withPosition: vector)
}
func updatePositionOf(_ node: SCNNode, withPosition position: SCNVector3) {
var translationMatrix = matrix_identity_float4x4
translationMatrix.columns.3.x = position.x
translationMatrix.columns.3.y = position.y
translationMatrix.columns.3.z = position.z
node.transform = SCNMatrix4(translationMatrix)
}
}
extension SceneKitViewController: ARSCNViewDelegate {
func renderer(_ renderer: SCNSceneRenderer, nodeFor anchor: ARAnchor) -> SCNNode? {
if let _ = anchor as? ARFaceAnchor {
return rootNode
}
return nil
}
func renderer(_ renderer: SCNSceneRenderer, didUpdate node: SCNNode, for anchor: ARAnchor) {
}
}
What am I not understanding here? Thanks!

Related

Core data not triggering immediate refresh with SwiftUI

I'm at a loss with this problem I've been troubleshooting the past few days.
I am drawing a custom shape, and the user can move one of the points around via a drag gesture. I want the shape to redraw, live, when moving the point around.
Here is some sample code that is working using CGPoint.
import SwiftUI
struct ShapeTest: Shape {
#State var points: [CGPoint]
#State var closed: Bool = true
func path(in rect: CGRect) -> Path {
var path = Path()
if (points.count > 0) {
path.move(to: points.first!)
path.addLines(points)
if closed { path.closeSubpath() }
}
return path
}
}
struct RedrawEdgeTestCGPoint: View {
#State var points = [CGPoint]()
#State var originalPosition: CGPoint? = nil
private var movePointDragGesture: some Gesture {
DragGesture(minimumDistance: 0, coordinateSpace: .local)
.onChanged { value in
if points.isEmpty {
return
}
if originalPosition == nil {
originalPosition = CGPoint(x: points.last!.x, y: points.last!.y)
}
let lastIndex = points.count - 1
points[lastIndex].x = originalPosition!.x + value.translation.width
points[lastIndex].y = originalPosition!.y + value.translation.height
}
.onEnded { value in
originalPosition = nil
}
}
private var addNewPointGesture: some Gesture {
TapGesture(count: 2)
.onEnded {
points.append(CGPoint())
}
}
var body: some View {
GeometryReader { geometry in
Group {
ShapeTest(points: points)
.stroke()
}
.contentShape(Rectangle())
.gesture(movePointDragGesture.simultaneously(with: addNewPointGesture))
}
}
}
struct RedrawEdgeTestCGPoint_Previews: PreviewProvider {
static var previews: some View {
RedrawEdgeTestCGPoint()
}
}
Now, here is another code sample. This example is generally the same thing, but this is using a Point entity I've defined in Core Data instead of CGPoint.
import SwiftUI
struct RedrawEdgeTestCoreData: View {
#Environment(\.managedObjectContext) private var viewContext
#State var points: [Point] = []
#State var originalPosition: CGPoint? = nil
private var movePointDragGesture: some Gesture {
DragGesture(minimumDistance: 0, coordinateSpace: .local)
.onChanged { value in
if points.isEmpty {
return
}
if originalPosition == nil {
originalPosition = CGPoint(x: points.last!.x, y: points.last!.y)
}
let lastIndex = points.count - 1
points[lastIndex].x = originalPosition!.x + value.translation.width
points[lastIndex].y = originalPosition!.y + value.translation.height
}
.onEnded { value in
originalPosition = nil
}
}
private var addNewPointGesture: some Gesture {
TapGesture(count: 2)
.onEnded {
points.append(Point(context: viewContext))
}
}
var body: some View {
GeometryReader { geometry in
Group {
ShapeTest(points: points.map { CGPoint(x: $0.x, y: $0.y) })
.stroke()
}
.contentShape(Rectangle())
.gesture(movePointDragGesture.simultaneously(with: addNewPointGesture))
}
}
}
struct RedrawEdgeTestCoreData_Previews: PreviewProvider {
static var previews: some View {
RedrawEdgeTestCoreData()
.environment(\.managedObjectContext, PersistenceController.preview.container.viewContext)
}
}
The version utilizing Core Data does not update while dragging. It will redraw when I double tap to add another point.
Any suggestions as to why this is happening?
I used the approach suggested by jnpdx and it works.
import SwiftUI
class PointsViewModel: ObservableObject {
#Published var points = [Point]()
func movePoint(index: Int, x: CGFloat, y: CGFloat) {
points[index].x = x
points[index].y = y
objectWillChange.send()
}
}
struct RedrawEdgeTestCoreData: View {
#Environment(\.managedObjectContext) private var viewContext
#ObservedObject var viewModel: PointsViewModel
#State var originalPosition: CGPoint? = nil
private var movePointDragGesture: some Gesture {
DragGesture(minimumDistance: 0, coordinateSpace: .local)
.onChanged { value in
if viewModel.points.isEmpty {
return
}
if originalPosition == nil {
originalPosition = CGPoint(x: viewModel.points.last!.x, y: viewModel.points.last!.y)
}
let lastIndex = viewModel.points.count - 1
viewModel.movePoint(index: lastIndex, x: originalPosition!.x + value.translation.width, y: originalPosition!.y + value.translation.height)
}
.onEnded { value in
originalPosition = nil
}
}
private var addNewPointGesture: some Gesture {
TapGesture(count: 2)
.onEnded {
viewModel.points.append(Point(context: viewContext))
}
}
var body: some View {
GeometryReader { geometry in
ZStack {
ShapeTest(points: viewModel.points.map { CGPoint(x: $0.x, y: $0.y) })
.stroke()
}
.contentShape(Rectangle())
.gesture(movePointDragGesture.simultaneously(with: addNewPointGesture))
}
}
}
struct RedrawEdgeTestCoreData_Previews: PreviewProvider {
static var previews: some View {
RedrawEdgeTestCoreData(viewModel: PointsViewModel())
.environment(\.managedObjectContext, PersistenceController.preview.container.viewContext)
}
}

SwifUI: How to get manual list to refresh when updating CoreData records?

I have a CoreData table that has several repeated records that don't need to be stored but do need to be displayed in my user interface. I have manually created my arrays based on the CoreData table. I have made them Observable Objects so they should automatically update and I have made them Hashable and Equatable.
My problem is that the list does not update when the database records are updated. This works fine when records are being added.
Here is my much simplified code in full that demonstrates the problem:
import SwiftUI
let persistentContainerQueue = OperationQueue()
let firstNames = ["Michael", "Damon", "Jacques", "Mika", "Fernando", "Kimi", "Lewis", "Jenson", "Sebastion", "Nico"]
let lastNames = ["Schumacher", "Hill", "Villeneuve", "Hakkinen", "Alonso", "Raikkonen", "Hamilton", "Button", "Vettel", "Rosberg"]
class RepeatedPerson: ObservableObject, Hashable
{
#Published var person: Person
#Published var index: Int
#Published var tested: Bool
init (person: Person, index: Int, tested: Bool)
{
self.person = person
self.index = index
self.tested = tested
}
func hash(into hasher: inout Hasher)
{
hasher.combine(person.firstName)
hasher.combine(person.lastName)
hasher.combine(index)
}
static func == (lhs: RepeatedPerson, rhs: RepeatedPerson) -> Bool
{
return lhs.person.firstName == rhs.person.firstName &&
lhs.person.lastName == rhs.person.lastName &&
lhs.index == rhs.index
}
}
class RepeatedPeople: ObservableObject
{
#Published var people: [RepeatedPerson] = []
}
func getRepeatedPeople() -> [RepeatedPerson]
{
var repeatedPeople:[RepeatedPerson] = []
let records = allRecords(Person.self)
for person in records
{
for index in 1...3
{
repeatedPeople.append(RepeatedPerson(person: person, index: index, tested: true))
}
}
return repeatedPeople
}
struct ContentView: View
{
#Environment(\.managedObjectContext) private var viewContext
#ObservedObject var repeatedPeople = RepeatedPeople()
init()
{
repeatedPeople.people = getRepeatedPeople()
}
var body: some View
{
VStack
{
List()
{
ForEach(repeatedPeople.people, id: \.self)
{ repeatedPerson in
Text("\(repeatedPerson.index)) \(repeatedPerson.person.firstName!) \(repeatedPerson.person.lastName!)")
}
}
HStack
{
Button("Add Record", action:
{
addItem()
repeatedPeople.people = getRepeatedPeople()
})
Button("Change Record", action:
{
let q = allRecords(Person.self)
let oldLastName = q[0].lastName
q[0].lastName = lastNames.randomElement()!
print ("changed \(q[0].firstName!) \(oldLastName!) -> \(q[0].firstName!) \(q[0].lastName!)")
saveDatabase()
})
Button("Reset Database", action:
{
deleteAllRecords(Person.self)
})
}
}
}
private func addItem()
{
withAnimation
{
let newItem = Person(context: viewContext)
newItem.timestamp = Date()
newItem.firstName = firstNames.randomElement()!
newItem.lastName = lastNames.randomElement()!
print ("added \(newItem.firstName!) \(newItem.lastName!)")
saveDatabase()
}
}
}
func allRecords<T: NSManagedObject>(_ type : T.Type, sort: NSSortDescriptor? = nil) -> [T]
{
let context = PersistenceController.shared.container.viewContext
let request = T.fetchRequest()
if let sortDescriptor = sort
{
request.sortDescriptors = [sortDescriptor]
}
do
{
let results = try context.fetch(request)
return results as! [T]
}
catch
{
print("Error with request: \(error)")
return []
}
}
func deleteAllRecords<T: NSManagedObject>(_ type : T.Type)
{
let context = PersistenceController.shared.container.viewContext
let results = allRecords(T.self)
for record in results
{
context.delete(record as NSManagedObject)
}
saveDatabase()
}
func saveDatabase()
{
persistentContainerQueue.addOperation()
{
let context = PersistenceController.shared.container.viewContext
context.performAndWait
{
try? context.save()
}
}
}
To reproduce the problem, add a few records. These will be shown in the list. Then click the 'Update Record' button. The CoreData record will be updated (you can see this the next time you run the app) but the changes will not be shown.
How do I get the new changes to show?
If you add another record the changes will then be shown. A side effect is that the list introduces wild spaces between the records. I have seen this is in other places. Is this a SwiftUI bug?
OK it turned out to be really quite simple. All I actually had to do was remove some of the #Published and provide a UUID for the repeatedPerson record (and for == and hash).
import SwiftUI
import CoreData
let persistentContainerQueue = OperationQueue()
let firstNames = ["Michael", "Damon", "Jacques", "Mika", "Fernando", "Kimi", "Lewis", "Jenson", "Sebastion", "Nico"]
let lastNames = ["Schumacher", "Hill", "Villeneuve", "Hakkinen", "Alonso", "Raikkonen", "Hamilton", "Button", "Vettel", "Rosberg"]
class RepeatedPerson: ObservableObject, Hashable
{
var id: UUID = UUID()
var index: Int
var person: Person?
init (person: Person, index: Int)
{
self.person = person
self.index = index
}
func hash(into hasher: inout Hasher)
{
hasher.combine(id)
}
static func == (lhs: RepeatedPerson, rhs: RepeatedPerson) -> Bool
{
return lhs.id == rhs.id
}
}
class RepeatedPeople: ObservableObject
{
#Published var people: [RepeatedPerson] = []
}
func getRepeatedPeople() -> [RepeatedPerson]
{
var repeatedPeople:[RepeatedPerson] = []
let records = allRecords(Person.self)
for person in records
{
for index in 1...3
{
repeatedPeople.append(RepeatedPerson(person: person, index: index))
}
}
return repeatedPeople
}
struct ContentView: View
{
#Environment(\.managedObjectContext) private var viewContext
#ObservedObject var repeatedPeople = RepeatedPeople()
init()
{
repeatedPeople.people = getRepeatedPeople()
}
var body: some View
{
VStack
{
List()
{
ForEach(repeatedPeople.people, id: \.self)
{ repeatedPerson in
Text("\(repeatedPerson.index)) \(repeatedPerson.person!.firstName!) \(repeatedPerson.person!.lastName!)")
}
}
HStack
{
Button("Add Record", action:
{
addItem()
repeatedPeople.people = getRepeatedPeople()
})
Button("Change Record", action:
{
let q = allRecords(Person.self)
let r = q.randomElement()!
let oldLastName = r.lastName
r.lastName = lastNames.randomElement()!
print ("changed \(r.firstName!) \(oldLastName!) -> \(r.firstName!) \(r.lastName!)")
saveDatabase()
repeatedPeople.people = getRepeatedPeople()
})
Button("Reset Database", action:
{
print ("Reset database")
deleteAllRecords(Person.self)
repeatedPeople.people = getRepeatedPeople()
})
}
}
}
private func addItem()
{
withAnimation
{
let newItem = Person(context: viewContext)
newItem.timestamp = Date()
newItem.firstName = firstNames.randomElement()!
newItem.lastName = lastNames.randomElement()!
print ("added \(newItem.firstName!) \(newItem.lastName!)")
saveDatabase()
}
}
}
func query<T: NSManagedObject>(_ type : T.Type, predicate: NSPredicate? = nil, sort: NSSortDescriptor? = nil) -> [T]
{
let context = PersistenceController.shared.container.viewContext
let request = T.fetchRequest()
if let sortDescriptor = sort
{
request.sortDescriptors = [sortDescriptor]
}
if let predicate = predicate
{
request.predicate = predicate
}
do
{
let results = try context.fetch(request)
return results as! [T]
}
catch
{
print("Error with request: \(error)")
return []
}
}
func allRecords<T: NSManagedObject>(_ type : T.Type, sort: NSSortDescriptor? = nil) -> [T]
{
return query(T.self, sort: sort)
}
func deleteAllRecords<T: NSManagedObject>(_ type : T.Type)
{
let context = PersistenceController.shared.container.viewContext
let results = allRecords(T.self)
for record in results
{
context.delete(record as NSManagedObject)
}
saveDatabase()
}
func saveDatabase()
{
persistentContainerQueue.addOperation()
{
let context = PersistenceController.shared.container.viewContext
context.performAndWait
{
try? context.save()
}
}
}

Convert address to coordinates using MKLocalSearchCompleter and CoreLocation

I have tried to make an app with a textfield to let user input a location, using MKLocalSearchCompleter to complete the searching. After that i would like to get the coordinate and display on the MapKit. However, I failed to get the coordinate using the Geocoder.
class LocationSearchService: NSObject, ObservableObject, MKLocalSearchCompleterDelegate {
#Published var searchQuery = ""
var completer: MKLocalSearchCompleter
#Published var completions: [MKLocalSearchCompletion] = []
var cancellable: AnyCancellable?
override init() {
completer = MKLocalSearchCompleter()
super.init()
cancellable = $searchQuery.assign(to: \.queryFragment, on: self.completer)
completer.delegate = self
}
func completerDidUpdateResults(_ completer: MKLocalSearchCompleter) {
self.completions = completer.results
}
}
The location manager as follows:
class LocationManager: NSObject, ObservableObject {
private let locationManager = CLLocationManager()
private let geocoder = CLGeocoder()
let objectWillChange = PassthroughSubject<Void, Never>()
#Published var status: CLAuthorizationStatus? {
willSet { objectWillChange.send() }
}
#Published var location: CLLocation? {
willSet { objectWillChange.send() }
}
override init() {
super.init()
self.locationManager.delegate = self
self.locationManager.desiredAccuracy = kCLLocationAccuracyBest
self.locationManager.requestWhenInUseAuthorization()
self.locationManager.startUpdatingLocation()
}
#Published var placemark: CLPlacemark? {
willSet { objectWillChange.send() }
}
private func lookupLocation() {
guard let location = self.location else { return }
geocoder.reverseGeocodeLocation(location, completionHandler: { (places, error) in
if error == nil {
self.placemark = places?[0]
} else {
self.placemark = nil
}
})
}
// !!! This is the function I would like to use to get the Coordinate from the address obtained from LocationSearchService
func getCoordinate(address: String) {
geocoder.geocodeAddressString(address, completionHandler: { (places, error) in
if error == nil {
self.placemark = places?[0]
self.location = self.placemark?.location
} else {
self.placemark = nil
self.location = nil
}
})
}
}
extension LocationManager: CLLocationManagerDelegate {
func locationManager(_ manager: CLLocationManager, didChangeAuthorization status: CLAuthorizationStatus) {
self.status = status
}
func locationManager(_ manager: CLLocationManager, didUpdateLocations locations: [CLLocation]) {
guard let location = locations.last else { return } //.first or .last?
self.location = location
self.lookupLocation()
}
}
Content View like this:
struct ContentView: View {
#State private var location: String = ""
#ObservedObject var lm = LocationManager()
private let completer = MKLocalSearchCompleter()
#ObservedObject var locationSearchService = LocationSearchService()
var body: some View {
NavigationView {
VStack {
AddressSearchBar(text: $locationSearchService.searchQuery)
List(locationSearchService.completions, id: \.self) { completion in
VStack(alignment: .leading) {
Text(completion.title)
// Error here, I cannot translate the address to location
//Text(lm.getCoordinate(address: completion.title))
}
}.navigationTitle("Search Location")
}
}
A few issues here:
I would like to convert the user selected item (which I failed to implement here) to the address (completion.title) -- i.e., need to get user selection on the suggested item.
I would like to convert the address found in the suggestion to a coordinate, so that I can mark on MapView.

CollectionView Dynamic cell height swift

Im trying to create a collection view with cells displaying string with variable length.
Im using this function to set cell layout:
func collectionView(collectionView : UICollectionView,layout collectionViewLayout:UICollectionViewLayout,sizeForItemAtIndexPath indexPath:NSIndexPath) -> CGSize
{
var cellSize:CGSize = CGSizeMake(self.whyCollectionView.frame.width, 86)
return cellSize
}
what I would like to do is manipulate cellSize.height based on my cell.labelString.utf16Count length.
the basic logic would be to sa that
if((cell.labelString.text) > 70){
cellSize.height = x
}
else{
cellSize.height = y
}
However, I can't manage to retrieve my cell label string length which always return nil. (I think it's not loaded yet...
for better understanding, here is the full code:
// WhyCell section
var whyData:NSMutableArray! = NSMutableArray()
var textLength:Int!
#IBOutlet weak var whyCollectionView: UICollectionView!
//Loading data
#IBAction func loadData() {
whyData.removeAllObjects()
var findWhyData:PFQuery = PFQuery(className: "PlacesWhy")
findWhyData.whereKey("placeName", equalTo: placeName)
findWhyData.findObjectsInBackgroundWithBlock({
(objects:[AnyObject]!,error:NSError!)->Void in
if (error == nil) {
for object in objects {
self.whyData.addObject(object)
}
let array:NSArray = self.whyData.reverseObjectEnumerator().allObjects
self.whyData = array.mutableCopy() as NSMutableArray
self.whyCollectionView.reloadData()
println("loadData completed. datacount is \(self.whyData.count)")
}
})
}
override func viewDidLoad() {
super.viewDidLoad()
// Do any additional setup after loading the view.
self.loadData()
}
func numberOfSectionsInCollectionView(collectionView: UICollectionView) -> Int {
return 1
}
func collectionView(collectionView: UICollectionView, numberOfItemsInSection section: Int) -> Int {
return whyData.count
}
func collectionView(collectionView: UICollectionView, cellForItemAtIndexPath indexPath: NSIndexPath) -> UICollectionViewCell {
let cell:whyCollectionViewCell = whyCollectionView.dequeueReusableCellWithReuseIdentifier("whyCell", forIndexPath: indexPath) as whyCollectionViewCell
// Loading content from NSMutableArray to cell
let therew:PFObject = self.whyData.objectAtIndex(indexPath.row) as PFObject
cell.userWhy.text = therew.objectForKey("why") as String!
textLength = (therew.objectForKey("why") as String!).utf16Count
self.whyCollectionView.layoutSubviews()
// Displaying user information
var whatUser:PFQuery = PFUser.query()
whatUser.whereKey("objectId", equalTo: therew.objectForKey("reasonGivenBy").objectId)
whatUser.findObjectsInBackgroundWithBlock({
(objects: [AnyObject]!, error: NSError!)->Void in
if !(error != nil) {
if let user:PFUser = (objects as NSArray).lastObject as? PFUser {
cell.userName.text = user.username
// TODO Display avatar
}
}
})
return cell
}
func collectionView(collectionView : UICollectionView,layout collectionViewLayout:UICollectionViewLayout,sizeForItemAtIndexPath indexPath:NSIndexPath) -> CGSize
{
var cellSize:CGSize = CGSizeMake(self.whyCollectionView.frame.width, 86)
return cellSize
}
While the answer above may solve your problem, it establishes a pretty crude way of assigning each cells height. You are being forced to hard code each cell height based on some estimation. A better way of handling this issue is by setting the height of each cell in the collectionview's sizeForItemAtIndexPath delegate method.
I will walk you through the steps on how to do this below.
Step 1: Make your class extend UICollectionViewDelegateFlowLayout
Step 2: Create a function to estimate the size of your text: This method will return a height value that will fit your string!
private func estimateFrameForText(text: String) -> CGRect {
//we make the height arbitrarily large so we don't undershoot height in calculation
let height: CGFloat = <arbitrarilyLargeValue>
let size = CGSize(width: yourDesiredWidth, height: height)
let options = NSStringDrawingOptions.UsesFontLeading.union(.UsesLineFragmentOrigin)
let attributes = [NSFontAttributeName: UIFont.systemFontOfSize(18, weight: UIFontWeightLight)]
return NSString(string: text).boundingRectWithSize(size, options: options, attributes: attributes, context: nil)
}
Step 3: Use or override delegate method below:
func collectionView(collectionView: UICollectionView, layout collectionViewLayout: UICollectionViewLayout, sizeForItemAtIndexPath indexPath: NSIndexPath) -> CGSize {
var height: CGFloat = <someArbitraryValue>
//we are just measuring height so we add a padding constant to give the label some room to breathe!
var padding: CGFloat = <someArbitraryPaddingValue>
//estimate each cell's height
if let text = array?[indexPath.item].text {
height = estimateFrameForText(text).height + padding
}
return CGSize(width: yourDesiredWidth, height: height)
}
You can dynamically set the frame of the cell in the cellForItemAtIndexPath function, so you can customize the height based on a label if you disregard the sizeForItemAtIndexPath function. With customizing the size, you'll probably have to look into collection view layout flow, but hopefully this points you in the right direction. It may look something like this:
class CollectionViewController: UICollectionViewController, UICollectionViewDelegate, UICollectionViewDataSource, UICollectionViewDelegateFlowLayout {
var array = ["a","as","asd","asdf","asdfg","asdfgh","asdfghjk","asdfghjklas","asdfghjkl","asdghjklkjhgfdsa"]
var heights = [10.0,20.0,30.0,40.0,50.0,60.0,70.0,80.0,90.0,100.0,110.0] as [CGFloat]
override func viewDidLoad() {
super.viewDidLoad()
}
override func numberOfSectionsInCollectionView(collectionView: UICollectionView) -> Int {
return 1
}
override func collectionView(collectionView: UICollectionView, numberOfItemsInSection section: Int) -> Int {
return array.count
}
override func collectionView(collectionView: UICollectionView,
cellForItemAtIndexPath indexPath: NSIndexPath) -> UICollectionViewCell {
let cell = collectionView.dequeueReusableCellWithReuseIdentifier("CellID", forIndexPath: indexPath) as Cell
cell.textLabel.text = array[indexPath.row]
cell.textLabel.sizeToFit()
// Customize cell height
cell.frame = CGRectMake(cell.frame.origin.x, cell.frame.origin.y, cell.frame.size.width, heights[indexPath.row])
return cell
}
func collectionView(collectionView: UICollectionView, layout collectionViewLayout: UICollectionViewLayout, sizeForItemAtIndexPath indexPath: NSIndexPath) -> CGSize {
return CGSizeMake(64, 64)
}
}
which gives dynamic heights like so
In Swift 3, use the below method:
private func updateCollectionViewLayout(with size: CGSize) {
var margin : CGFloat = 0;
if isIPad {
margin = 10
}
else{
margin = 6
/* if UIDevice.current.type == .iPhone6plus || UIDevice.current.type == .iPhone6Splus || UIDevice.current.type == .simulator{
margin = 10
}
*/
}
if let layout = menuCollectionView.collectionViewLayout as? UICollectionViewFlowLayout {
layout.itemSize = CGSize(width:(self.view.frame.width/2)-margin, height:((self.view.frame.height-64)/4)-3)
layout.invalidateLayout()
}
}

Recording audio in Swift

Does anyone know where I can find info on how to record audio in a Swift application? I've been looking at some of the audio playback examples but I can't seem to be able to find anything on implementing the audio recording. Thanks
In Swift 3
Add framework AVFoundation
**In info.plist add key value
Key = Privacy - Microphone Usage Description and Value = For using
microphone
(the apps will crash if you don't provide the value - description why you are asking for the permission)**
Import AVFoundation & AVAudioRecorderDelegate, AVAudioPlayerDelegate
import AVFoundation
class RecordVC: UIViewController , AVAudioRecorderDelegate, AVAudioPlayerDelegate
Create button for record audio & play audio , and label for display recording timing & give outlets and action as start_recording , play_recording & declare some variables which we will use later
#IBOutlet var recordingTimeLabel: UILabel!
#IBOutlet var record_btn_ref: UIButton!
#IBOutlet var play_btn_ref: UIButton!
var audioRecorder: AVAudioRecorder!
var audioPlayer : AVAudioPlayer!
var meterTimer:Timer!
var isAudioRecordingGranted: Bool!
var isRecording = false
var isPlaying = false
In viewDidLoad check record permission
override func viewDidLoad() {
super.viewDidLoad()
check_record_permission()
}
func check_record_permission()
{
switch AVAudioSession.sharedInstance().recordPermission() {
case AVAudioSessionRecordPermission.granted:
isAudioRecordingGranted = true
break
case AVAudioSessionRecordPermission.denied:
isAudioRecordingGranted = false
break
case AVAudioSessionRecordPermission.undetermined:
AVAudioSession.sharedInstance().requestRecordPermission({ (allowed) in
if allowed {
self.isAudioRecordingGranted = true
} else {
self.isAudioRecordingGranted = false
}
})
break
default:
break
}
}
generate path where you want to save that recording as myRecording.m4a
func getDocumentsDirectory() -> URL
{
let paths = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)
let documentsDirectory = paths[0]
return documentsDirectory
}
func getFileUrl() -> URL
{
let filename = "myRecording.m4a"
let filePath = getDocumentsDirectory().appendingPathComponent(filename)
return filePath
}
Setup the recorder
func setup_recorder()
{
if isAudioRecordingGranted
{
let session = AVAudioSession.sharedInstance()
do
{
try session.setCategory(AVAudioSessionCategoryPlayAndRecord, with: .defaultToSpeaker)
try session.setActive(true)
let settings = [
AVFormatIDKey: Int(kAudioFormatMPEG4AAC),
AVSampleRateKey: 44100,
AVNumberOfChannelsKey: 2,
AVEncoderAudioQualityKey:AVAudioQuality.high.rawValue
]
audioRecorder = try AVAudioRecorder(url: getFileUrl(), settings: settings)
audioRecorder.delegate = self
audioRecorder.isMeteringEnabled = true
audioRecorder.prepareToRecord()
}
catch let error {
display_alert(msg_title: "Error", msg_desc: error.localizedDescription, action_title: "OK")
}
}
else
{
display_alert(msg_title: "Error", msg_desc: "Don't have access to use your microphone.", action_title: "OK")
}
}
Start recording when button start_recording press & display seconds using updateAudioMeter, & if recording is start then finish the recording
#IBAction func start_recording(_ sender: UIButton)
{
if(isRecording)
{
finishAudioRecording(success: true)
record_btn_ref.setTitle("Record", for: .normal)
play_btn_ref.isEnabled = true
isRecording = false
}
else
{
setup_recorder()
audioRecorder.record()
meterTimer = Timer.scheduledTimer(timeInterval: 0.1, target:self, selector:#selector(self.updateAudioMeter(timer:)), userInfo:nil, repeats:true)
record_btn_ref.setTitle("Stop", for: .normal)
play_btn_ref.isEnabled = false
isRecording = true
}
}
func updateAudioMeter(timer: Timer)
{
if audioRecorder.isRecording
{
let hr = Int((audioRecorder.currentTime / 60) / 60)
let min = Int(audioRecorder.currentTime / 60)
let sec = Int(audioRecorder.currentTime.truncatingRemainder(dividingBy: 60))
let totalTimeString = String(format: "%02d:%02d:%02d", hr, min, sec)
recordingTimeLabel.text = totalTimeString
audioRecorder.updateMeters()
}
}
func finishAudioRecording(success: Bool)
{
if success
{
audioRecorder.stop()
audioRecorder = nil
meterTimer.invalidate()
print("recorded successfully.")
}
else
{
display_alert(msg_title: "Error", msg_desc: "Recording failed.", action_title: "OK")
}
}
Play the recording
func prepare_play()
{
do
{
audioPlayer = try AVAudioPlayer(contentsOf: getFileUrl())
audioPlayer.delegate = self
audioPlayer.prepareToPlay()
}
catch{
print("Error")
}
}
#IBAction func play_recording(_ sender: Any)
{
if(isPlaying)
{
audioPlayer.stop()
record_btn_ref.isEnabled = true
play_btn_ref.setTitle("Play", for: .normal)
isPlaying = false
}
else
{
if FileManager.default.fileExists(atPath: getFileUrl().path)
{
record_btn_ref.isEnabled = false
play_btn_ref.setTitle("pause", for: .normal)
prepare_play()
audioPlayer.play()
isPlaying = true
}
else
{
display_alert(msg_title: "Error", msg_desc: "Audio file is missing.", action_title: "OK")
}
}
}
When recording is finish enable the play button & when play is finish enable the record button
func audioRecorderDidFinishRecording(_ recorder: AVAudioRecorder, successfully flag: Bool)
{
if !flag
{
finishAudioRecording(success: false)
}
play_btn_ref.isEnabled = true
}
func audioPlayerDidFinishPlaying(_ player: AVAudioPlayer, successfully flag: Bool)
{
record_btn_ref.isEnabled = true
}
Generalize function for display alert
func display_alert(msg_title : String , msg_desc : String ,action_title : String)
{
let ac = UIAlertController(title: msg_title, message: msg_desc, preferredStyle: .alert)
ac.addAction(UIAlertAction(title: action_title, style: .default)
{
(result : UIAlertAction) -> Void in
_ = self.navigationController?.popViewController(animated: true)
})
present(ac, animated: true)
}
Here is code.You can record easily.Write this code on IBAction.It will save the recording in Documents by name recordTest.caf
//declare instance variable
var audioRecorder:AVAudioRecorder!
func record(){
var audioSession:AVAudioSession = AVAudioSession.sharedInstance()
audioSession.setCategory(AVAudioSessionCategoryPlayAndRecord, error: nil)
audioSession.setActive(true, error: nil)
var documents: AnyObject = NSSearchPathForDirectoriesInDomains( NSSearchPathDirectory.DocumentDirectory, NSSearchPathDomainMask.UserDomainMask, true)[0]
var str = documents.stringByAppendingPathComponent("recordTest.caf")
var url = NSURL.fileURLWithPath(str as String)
var recordSettings = [AVFormatIDKey:kAudioFormatAppleIMA4,
AVSampleRateKey:44100.0,
AVNumberOfChannelsKey:2,AVEncoderBitRateKey:12800,
AVLinearPCMBitDepthKey:16,
AVEncoderAudioQualityKey:AVAudioQuality.Max.rawValue]
println("url : \(url)")
var error: NSError?
audioRecorder = AVAudioRecorder(URL:url, settings: recordSettings, error: &error)
if let e = error {
println(e.localizedDescription)
} else {
audioRecorder.record()
}
}
Swift2 version of #codester's answer.
func record() {
//init
let audioSession:AVAudioSession = AVAudioSession.sharedInstance()
//ask for permission
if (audioSession.respondsToSelector("requestRecordPermission:")) {
AVAudioSession.sharedInstance().requestRecordPermission({(granted: Bool)-> Void in
if granted {
print("granted")
//set category and activate recorder session
try! audioSession.setCategory(AVAudioSessionCategoryPlayAndRecord)
try! audioSession.setActive(true)
//get documnets directory
let documentsDirectory = NSSearchPathForDirectoriesInDomains(.DocumentDirectory, .UserDomainMask, true)[0]
let fullPath = documentsDirectory.stringByAppendingPathComponent("voiceRecording.caf")
let url = NSURL.fileURLWithPath(fullPath)
//create AnyObject of settings
let settings: [String : AnyObject] = [
AVFormatIDKey:Int(kAudioFormatAppleIMA4), //Int required in Swift2
AVSampleRateKey:44100.0,
AVNumberOfChannelsKey:2,
AVEncoderBitRateKey:12800,
AVLinearPCMBitDepthKey:16,
AVEncoderAudioQualityKey:AVAudioQuality.Max.rawValue
]
//record
try! self.audioRecorder = AVAudioRecorder(URL: url, settings: settings)
} else{
print("not granted")
}
})
}
}
In addition to previous answers, I tried to make it work on Xcode 7.2 and I couldn't hear any sound after, neither when I sent the file via email. No warning or exception.
So I changed settings to the following and stored as an .m4a file.
let recordSettings = [AVSampleRateKey : NSNumber(float: Float(44100.0)),
AVFormatIDKey : NSNumber(int: Int32(kAudioFormatMPEG4AAC)),
AVNumberOfChannelsKey : NSNumber(int: 1),
AVEncoderAudioQualityKey : NSNumber(int: Int32(AVAudioQuality.Medium.rawValue))]
After that I could listen to sound.
For saving the file, I added this on viewDidLoad to initialise the recorder:
let audioSession = AVAudioSession.sharedInstance()
do {
try audioSession.setCategory(AVAudioSessionCategoryPlayAndRecord)
try audioRecorder = AVAudioRecorder(URL: self.directoryURL()!,
settings: recordSettings)
audioRecorder.prepareToRecord()
} catch {
}
And for creating the directory:
func directoryURL() -> NSURL? {
let fileManager = NSFileManager.defaultManager()
let urls = fileManager.URLsForDirectory(.DocumentDirectory, inDomains: .UserDomainMask)
let documentDirectory = urls[0] as NSURL
let soundURL = documentDirectory.URLByAppendingPathComponent("sound.m4a")
return soundURL
}
I also add the actions used to start recording, stop, and play after
#IBAction func doRecordAction(sender: AnyObject) {
if !audioRecorder.recording {
let audioSession = AVAudioSession.sharedInstance()
do {
try audioSession.setActive(true)
audioRecorder.record()
} catch {
}
}
}
#IBAction func doStopRecordingAction(sender: AnyObject) {
audioRecorder.stop()
let audioSession = AVAudioSession.sharedInstance()
do {
try audioSession.setActive(false)
} catch {
}
}
#IBAction func doPlayAction(sender: AnyObject) {
if (!audioRecorder.recording){
do {
try audioPlayer = AVAudioPlayer(contentsOfURL: audioRecorder.url)
audioPlayer.play()
} catch {
}
}
}
Here audio recorder with simple interface written on Swift 4.2 .
final class AudioRecorderImpl: NSObject {
private let session = AVAudioSession.sharedInstance()
private var player: AVAudioPlayer?
private var recorder: AVAudioRecorder?
private lazy var permissionGranted = false
private lazy var isRecording = false
private lazy var isPlaying = false
private var fileURL: URL?
private let settings = [
AVFormatIDKey: Int(kAudioFormatMPEG4AAC),
AVSampleRateKey: 44100,
AVNumberOfChannelsKey: 2,
AVEncoderAudioQualityKey:AVAudioQuality.high.rawValue
]
override init() {
fileURL = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask).first?.appendingPathComponent("note.m4a")
}
func record(to url: URL?) {
guard permissionGranted,
let url = url ?? fileURL else { return }
setupRecorder(url: url)
if isRecording {
stopRecording()
}
isRecording = true
recorder?.record()
}
func stopRecording() {
isRecording = false
recorder?.stop()
try? session.setActive(false)
}
func play(from url: URL?) {
guard let url = url ?? fileURL else { return }
setupPlayer(url: url)
if isRecording {
stopRecording()
}
if isPlaying {
stopPlaying()
}
if FileManager.default.fileExists(atPath: url.path) {
isPlaying = true
setupPlayer(url: url)
player?.play()
}
}
func stopPlaying() {
player?.stop()
}
func pause() {
player?.pause()
}
func resume() {
if player?.isPlaying == false {
player?.play()
}
}
func checkPermission(completion: ((Bool) -> Void)?) {
func assignAndInvokeCallback(_ granted: Bool) {
self.permissionGranted = granted
completion?(granted)
}
switch session.recordPermission {
case .granted:
assignAndInvokeCallback(true)
case .denied:
assignAndInvokeCallback(false)
case .undetermined:
session.requestRecordPermission(assignAndInvokeCallback)
}
}
}
extension AudioRecorderImpl: AVAudioRecorderDelegate, AVAudioPlayerDelegate {
}
private extension AudioRecorderImpl {
func setupRecorder(url: URL) {
guard
permissionGranted else { return }
try? session.setCategory(.playback, mode: .default)
try? session.setActive(true)
let settings = [
AVFormatIDKey: Int(kAudioFormatMPEG4AAC),
AVSampleRateKey: 44100,
AVNumberOfChannelsKey: 2,
AVEncoderAudioQualityKey: AVAudioQuality.high.rawValue
]
recorder = try? AVAudioRecorder(url: url, settings: settings)
recorder?.delegate = self
recorder?.isMeteringEnabled = true
recorder?.prepareToRecord()
}
func setupPlayer(url: URL) {
player = try? AVAudioPlayer(contentsOf: url)
player?.delegate = self
player?.prepareToPlay()
}
}
For Swift 5,
func setup_recorder()
{
if isAudioRecordingGranted
{
let session = AVAudioSession.sharedInstance()
do
{
try session.setCategory(.playAndRecord, mode: .default)
try session.setActive(true)
let settings = [
AVFormatIDKey: Int(kAudioFormatMPEG4AAC),
AVSampleRateKey: 44100,
AVNumberOfChannelsKey: 2,
AVEncoderAudioQualityKey:AVAudioQuality.high.rawValue
]
audioRecorder = try AVAudioRecorder(url: getFileUrl(), settings: settings)
audioRecorder.delegate = self
audioRecorder.isMeteringEnabled = true
audioRecorder.prepareToRecord()
}
catch let error {
display_alert(msg_title: "Error", msg_desc: error.localizedDescription, action_title: "OK")
}
}
else
{
display_alert(msg_title: "Error", msg_desc: "Don't have access to use your microphone.", action_title: "OK")
}
Code in Class file Using Swift 4
Class is AGAudioRecorder
Code is
class AudioRecordViewController: UIViewController {
#IBOutlet weak var recodeBtn: UIButton!
#IBOutlet weak var playBtn: UIButton!
var state: AGAudioRecorderState = .Ready
var recorder: AGAudioRecorder = AGAudioRecorder(withFileName: "TempFile")
override func viewDidLoad() {
super.viewDidLoad()
recodeBtn.setTitle("Recode", for: .normal)
playBtn.setTitle("Play", for: .normal)
recorder.delegate = self
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
}
#IBAction func recode(_ sender: UIButton) {
recorder.doRecord()
}
#IBAction func play(_ sender: UIButton) {
recorder.doPlay()
}
}
extension AudioRecordViewController: AGAudioRecorderDelegate {
func agAudioRecorder(_ recorder: AGAudioRecorder, withStates state: AGAudioRecorderState) {
switch state {
case .error(let e): debugPrint(e)
case .Failed(let s): debugPrint(s)
case .Finish:
recodeBtn.setTitle("Recode", for: .normal)
case .Recording:
recodeBtn.setTitle("Recoding Finished", for: .normal)
case .Pause:
playBtn.setTitle("Pause", for: .normal)
case .Play:
playBtn.setTitle("Play", for: .normal)
case .Ready:
recodeBtn.setTitle("Recode", for: .normal)
playBtn.setTitle("Play", for: .normal)
refreshBtn.setTitle("Refresh", for: .normal)
}
debugPrint(state)
}
func agAudioRecorder(_ recorder: AGAudioRecorder, currentTime timeInterval: TimeInterval, formattedString: String) {
debugPrint(formattedString)
}
}
Swift 3 Code Version: Complete Solution for Audio Recording!
import UIKit
import AVFoundation
class ViewController: UIViewController, AVAudioRecorderDelegate {
//Outlets
#IBOutlet weak var recordingTimeLabel: UILabel!
//Variables
var audioRecorder: AVAudioRecorder!
var meterTimer:Timer!
var isAudioRecordingGranted: Bool!
override func viewDidLoad() {
super.viewDidLoad()
switch AVAudioSession.sharedInstance().recordPermission() {
case AVAudioSessionRecordPermission.granted:
isAudioRecordingGranted = true
break
case AVAudioSessionRecordPermission.denied:
isAudioRecordingGranted = false
break
case AVAudioSessionRecordPermission.undetermined:
AVAudioSession.sharedInstance().requestRecordPermission() { [unowned self] allowed in
DispatchQueue.main.async {
if allowed {
self.isAudioRecordingGranted = true
} else {
self.isAudioRecordingGranted = false
}
}
}
break
default:
break
}
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
audioRecorder = nil
}
//MARK:- Audio recorder buttons action.
#IBAction func audioRecorderAction(_ sender: UIButton) {
if isAudioRecordingGranted {
//Create the session.
let session = AVAudioSession.sharedInstance()
do {
//Configure the session for recording and playback.
try session.setCategory(AVAudioSessionCategoryPlayAndRecord, with: .defaultToSpeaker)
try session.setActive(true)
//Set up a high-quality recording session.
let settings = [
AVFormatIDKey: Int(kAudioFormatMPEG4AAC),
AVSampleRateKey: 44100,
AVNumberOfChannelsKey: 2,
AVEncoderAudioQualityKey: AVAudioQuality.high.rawValue
]
//Create audio file name URL
let audioFilename = getDocumentsDirectory().appendingPathComponent("audioRecording.m4a")
//Create the audio recording, and assign ourselves as the delegate
audioRecorder = try AVAudioRecorder(url: audioFilename, settings: settings)
audioRecorder.delegate = self
audioRecorder.isMeteringEnabled = true
audioRecorder.record()
meterTimer = Timer.scheduledTimer(timeInterval: 0.1, target:self, selector:#selector(self.updateAudioMeter(timer:)), userInfo:nil, repeats:true)
}
catch let error {
print("Error for start audio recording: \(error.localizedDescription)")
}
}
}
#IBAction func stopAudioRecordingAction(_ sender: UIButton) {
finishAudioRecording(success: true)
}
func finishAudioRecording(success: Bool) {
audioRecorder.stop()
audioRecorder = nil
meterTimer.invalidate()
if success {
print("Recording finished successfully.")
} else {
print("Recording failed :(")
}
}
func updateAudioMeter(timer: Timer) {
if audioRecorder.isRecording {
let hr = Int((audioRecorder.currentTime / 60) / 60)
let min = Int(audioRecorder.currentTime / 60)
let sec = Int(audioRecorder.currentTime.truncatingRemainder(dividingBy: 60))
let totalTimeString = String(format: "%02d:%02d:%02d", hr, min, sec)
recordingTimeLabel.text = totalTimeString
audioRecorder.updateMeters()
}
}
func getDocumentsDirectory() -> URL {
let paths = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)
let documentsDirectory = paths[0]
return documentsDirectory
}
//MARK:- Audio recoder delegate methods
func audioRecorderDidFinishRecording(_ recorder: AVAudioRecorder, successfully flag: Bool) {
if !flag {
finishAudioRecording(success: false)
}
}
}
Simple 2022 syntax, record audio on iPhone.
Older answers don't work.
Needed ...
var mic: AVAudioRecorder?
var workingFile: URL {
return FileManager.default.temporaryDirectory.appendingPathComponent("temp.m4a")
}
And then
#IBAction public func tapTalkSend() {
switch AVCaptureDevice.authorizationStatus(for: .audio) {
case .authorized: _talkSend()
case .notDetermined:
AVCaptureDevice.requestAccess(for: .video) { [weak self] granted in
if granted { self?._talkSend() }
}
case .denied: return
case .restricted: return
#unknown default:
return
}
}
And then
public func _talkSend() {
if mic != nil && mic!.isRecording {
mic?.stop()
mic = nil
return
}
do {
try AVAudioSession.sharedInstance().setCategory(.playAndRecord, mode: .default)
try AVAudioSession.sharedInstance().setActive(true)
mic = try AVAudioRecorder(url: workingFile, settings: [:])
}
catch let error {
return print("mic drama \(error)")
}
mic!.delegate = self
mic!.record()
}
Add AVAudioRecorderDelegate on your vc. And:
func audioRecorderDidFinishRecording(_ recorder: AVAudioRecorder, successfully flag: Bool) {
_testPlay() ...
_sendFileSomewhere() ...
}
func audioRecorderEncodeErrorDidOccur(_ recorder: AVAudioRecorder, error: Error?) {
print("audioRecorderEncodeErrorDidOccur")
}
To test by playing it back:
var myPlayer: AVAudioPlayer!
func _testPlay() {
do {
myPlayer = try AVAudioPlayer(contentsOf: workingFile)
myPlayer.prepareToPlay()
myPlayer.play()
}
catch let error {
return print("play drama \(error)")
}
}
In your plist:
<key>NSCameraUsageDescription</key>
<string>For spoken messages.</string>
<key>NSMicrophoneUsageDescription</key>
<string>For spoken messages.</string>

Resources