From 87843ec692de46a1248f360abe45208109b8483b Mon Sep 17 00:00:00 2001 From: Selim Mustafaev Date: Tue, 28 Jul 2020 00:28:02 +0300 Subject: [PATCH] Changes in CoreLocation wrapper --- AutoCat.xcodeproj/project.pbxproj | 4 + .../xcdebugger/Breakpoints_v2.xcbkptlist | 48 ++++++ AutoCat/AppDelegate.swift | 2 +- AutoCat/Controllers/RecordsController.swift | 100 ++++++------ AutoCat/Models/AudioRecord.swift | 4 +- AutoCat/Models/VehicleEvent.swift | 21 +++ AutoCat/Utils/Location.swift | 58 ++++--- AutoCat/Utils/Recorder.swift | 144 ++++++++++-------- 8 files changed, 238 insertions(+), 143 deletions(-) create mode 100644 AutoCat/Models/VehicleEvent.swift diff --git a/AutoCat.xcodeproj/project.pbxproj b/AutoCat.xcodeproj/project.pbxproj index cc153ee..686fa1a 100644 --- a/AutoCat.xcodeproj/project.pbxproj +++ b/AutoCat.xcodeproj/project.pbxproj @@ -80,6 +80,7 @@ 7A96AE2F246B2BCD00297C33 /* WebKit.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 7A96AE2E246B2BCD00297C33 /* WebKit.framework */; }; 7A96AE31246B2FE400297C33 /* Constants.swift in Sources */ = {isa = PBXBuildFile; fileRef = 7A96AE30246B2FE400297C33 /* Constants.swift */; }; 7A96AE33246C095700297C33 /* Base64FS.swift in Sources */ = {isa = PBXBuildFile; fileRef = 7A96AE32246C095700297C33 /* Base64FS.swift */; }; + 7AAE6AD324CDDF950023860B /* VehicleEvent.swift in Sources */ = {isa = PBXBuildFile; fileRef = 7AAE6AD224CDDF950023860B /* VehicleEvent.swift */; }; 7AB562BA249C9E9B00473D53 /* Region.swift in Sources */ = {isa = PBXBuildFile; fileRef = 7AB562B9249C9E9B00473D53 /* Region.swift */; }; 7AB67E8C2435C38700258F61 /* CustomTextField.swift in Sources */ = {isa = PBXBuildFile; fileRef = 7AB67E8B2435C38700258F61 /* CustomTextField.swift */; }; 7AB67E8E2435D1A000258F61 /* CustomButton.swift in Sources */ = {isa = PBXBuildFile; fileRef = 7AB67E8D2435D1A000258F61 /* CustomButton.swift */; }; @@ -157,6 +158,7 @@ 7A96AE2E246B2BCD00297C33 /* WebKit.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = WebKit.framework; path = Platforms/MacOSX.platform/Developer/SDKs/MacOSX10.15.sdk/System/Library/Frameworks/WebKit.framework; sourceTree = DEVELOPER_DIR; }; 7A96AE30246B2FE400297C33 /* Constants.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = Constants.swift; sourceTree = ""; }; 7A96AE32246C095700297C33 /* Base64FS.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = Base64FS.swift; sourceTree = ""; }; + 7AAE6AD224CDDF950023860B /* VehicleEvent.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = VehicleEvent.swift; sourceTree = ""; }; 7AB562B9249C9E9B00473D53 /* Region.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = Region.swift; sourceTree = ""; }; 7AB67E8B2435C38700258F61 /* CustomTextField.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CustomTextField.swift; sourceTree = ""; }; 7AB67E8D2435D1A000258F61 /* CustomButton.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CustomButton.swift; sourceTree = ""; }; @@ -285,6 +287,7 @@ 7A333813249A532400D878F1 /* Filter.swift */, 7AB562B9249C9E9B00473D53 /* Region.swift */, 7A659B5824A2B1BA0043A0F2 /* AudioRecord.swift */, + 7AAE6AD224CDDF950023860B /* VehicleEvent.swift */, ); path = Models; sourceTree = ""; @@ -526,6 +529,7 @@ 7A43F9F8246C8A6200BA5B49 /* JWT.swift in Sources */, 7A6DD903242BF4A5009DE740 /* PlateView.swift in Sources */, 7A488C3F24A74B990054D0B2 /* RealmBindObserver.swift in Sources */, + 7AAE6AD324CDDF950023860B /* VehicleEvent.swift in Sources */, 7A11470323FDE7E500B424AF /* SceneDelegate.swift in Sources */, 7A530B7E24017FEE00CBFE6E /* VehicleCell.swift in Sources */, 7A11474423FF06CA00B424AF /* Api.swift in Sources */, diff --git a/AutoCat.xcodeproj/xcuserdata/selim.xcuserdatad/xcdebugger/Breakpoints_v2.xcbkptlist b/AutoCat.xcodeproj/xcuserdata/selim.xcuserdatad/xcdebugger/Breakpoints_v2.xcbkptlist index 83626b9..da8c074 100644 --- a/AutoCat.xcodeproj/xcuserdata/selim.xcuserdatad/xcdebugger/Breakpoints_v2.xcbkptlist +++ b/AutoCat.xcodeproj/xcuserdata/selim.xcuserdatad/xcdebugger/Breakpoints_v2.xcbkptlist @@ -47,5 +47,53 @@ + + + + + + + + + + + + diff --git a/AutoCat/AppDelegate.swift b/AutoCat/AppDelegate.swift index d859c51..57a34f0 100644 --- a/AutoCat/AppDelegate.swift +++ b/AutoCat/AppDelegate.swift @@ -23,7 +23,7 @@ class AppDelegate: UIResponder, UIApplicationDelegate { func application(_ application: UIApplication, didFinishLaunchingWithOptions launchOptions: [UIApplication.LaunchOptionsKey: Any]?) -> Bool { let config = Realm.Configuration( - schemaVersion: 9, + schemaVersion: 10, migrationBlock: { migration, oldSchemaVersion in if oldSchemaVersion <= 3 { var numbers: [String] = [] diff --git a/AutoCat/Controllers/RecordsController.swift b/AutoCat/Controllers/RecordsController.swift index 8445235..575311e 100644 --- a/AutoCat/Controllers/RecordsController.swift +++ b/AutoCat/Controllers/RecordsController.swift @@ -15,6 +15,7 @@ class RecordsController: UIViewController, UITableViewDelegate { var recorder: Recorder? var addButton: UIBarButtonItem! let bag = DisposeBag() + var recordDisposable: Disposable? let validLetters = ["А", "В", "Е", "К", "М", "Н", "О", "Р", "С", "Т", "У", "Х"] @@ -26,7 +27,7 @@ class RecordsController: UIViewController, UITableViewDelegate { self.addButton = UIBarButtonItem(barButtonSystemItem: .add, target: self, action: #selector(onAddVoiceRecord(_:))) self.navigationItem.rightBarButtonItem = self.addButton - self.recorder = try? Recorder() + self.recorder = Recorder() let ds = RxTableViewSectionedAnimatedDataSource>(configureCell: { dataSource, tableView, indexPath, item in if let cell = tableView.dequeueReusableCell(withIdentifier: "AudioRecordCell", for: indexPath) as? AudioRecordCell { @@ -59,8 +60,6 @@ class RecordsController: UIViewController, UITableViewDelegate { } self.tableView.rx.setDelegate(self).disposed(by: self.bag) - - LocationManager.requestCurrentLocation().subscribe().disposed(by: self.bag) } override func viewDidAppear(_ animated: Bool) { @@ -113,39 +112,42 @@ class RecordsController: UIViewController, UITableViewDelegate { return } - recorder.requestPermissions { error in - DispatchQueue.main.async { - if let error = error { - self.show(error: error) - } else { - do { - let alert = UIAlertController(title: "Recording...", message: nil, preferredStyle: .alert) - alert.addAction(UIAlertAction(title: "Cancel", style: .cancel, handler: { _ in self.recorder?.cancelRecording() })) - alert.addAction(UIAlertAction(title: "Done", style: .default, handler: { _ in self.recorder?.stopRecording() })) - self.present(alert, animated: true) - - let date = Date() - let fileName = "recording-\(date.timeIntervalSince1970).m4a" - let url = try FileManager.default.url(for: fileName, in: "recordings") - try self.makeStartSoundIfNeeded { - try recorder.startRecording(to: url) { result in - let asset = AVURLAsset(url: url) - let duration = TimeInterval(CMTimeGetSeconds(asset.duration)) - let record = AudioRecord(path: url.lastPathComponent, number: self.getPlateNumber(from: result), raw: result, duration: duration) - let realm = try? Realm() - try? realm?.write { - realm?.add(record) - } - alert.dismiss(animated: true) - print("New record saved to: \(url.path)") - } - } - self.donateUserActivity() - } catch { - IHProgressHUD.showError(withStatus: error.localizedDescription) - } - } + var alert: UIAlertController? + var url: URL! + + let locationObservable = LocationManager.requestCurrentLocation() + .map(Optional.init) + .catchErrorJustReturn(nil) + + let recordObservable: Single = recorder.requestPermissions() + .observeOn(MainScheduler.instance) + .flatMap(self.makeStartSoundIfNeeded) + .flatMap { + alert = UIAlertController(title: "Recording...", message: nil, preferredStyle: .alert) + alert!.addAction(UIAlertAction(title: "Cancel", style: .cancel, handler: { _ in self.recordDisposable?.dispose() })) + alert!.addAction(UIAlertAction(title: "Done", style: .default, handler: { _ in self.recorder?.stopRecording() })) + self.present(alert!, animated: true) + + let date = Date() + let fileName = "recording-\(date.timeIntervalSince1970).m4a" + url = try FileManager.default.url(for: fileName, in: "recordings") + + return recorder.startRecording(to: url) } + + self.recordDisposable = Single.zip(locationObservable, recordObservable) { event, text -> AudioRecord in + let asset = AVURLAsset(url: url) + let duration = TimeInterval(CMTimeGetSeconds(asset.duration)) + return AudioRecord(path: url.lastPathComponent, number: self.getPlateNumber(from: text), raw: text, duration: duration, event: event) + } + .subscribe(onSuccess: { record in + let realm = try? Realm() + try? realm?.write { + realm?.add(record) + } + alert?.dismiss(animated: true) + }) { error in + IHProgressHUD.showError(withStatus: error.localizedDescription) } } @@ -195,28 +197,18 @@ class RecordsController: UIViewController, UITableViewDelegate { && region! < 1000 } - func makeStartSoundIfNeeded(completion: @escaping () throws -> Void) throws { + func makeStartSoundIfNeeded() -> Single { if !Settings.shared.recordBeep { - try completion() + return .just(()) } else { - //let session = AVAudioSession.sharedInstance() - //try session.setCategory(.playback, mode: .default, options: [.defaultToSpeaker]) - //try session.setActive(true) - var err: Error? - var soundId = SystemSoundID() - let url = URL(fileURLWithPath: "/System/Library/Audio/UISounds/short_double_high.caf") - AudioServicesCreateSystemSoundID(url as CFURL, &soundId) - AudioServicesPlaySystemSoundWithCompletion(soundId) { - do { - //try session.setActive(false) - try completion() - } catch { - err = error + return Single.create { observer in + var soundId = SystemSoundID() + let url = URL(fileURLWithPath: "/System/Library/Audio/UISounds/short_double_high.caf") + AudioServicesCreateSystemSoundID(url as CFURL, &soundId) + AudioServicesPlaySystemSoundWithCompletion(soundId) { + observer(.success(())) } - } - - if let error = err { - throw error + return Disposables.create() } } } diff --git a/AutoCat/Models/AudioRecord.swift b/AutoCat/Models/AudioRecord.swift index 77ba73f..48546f1 100644 --- a/AutoCat/Models/AudioRecord.swift +++ b/AutoCat/Models/AudioRecord.swift @@ -9,6 +9,7 @@ class AudioRecord: Object, IdentifiableType { @objc dynamic var rawText: String = "" @objc dynamic var addedDate: TimeInterval = Date().timeIntervalSince1970 @objc dynamic var duration: TimeInterval = 0 + @objc dynamic var event: VehicleEvent? var identifier: TimeInterval = 0 var identity: TimeInterval { @@ -18,11 +19,12 @@ class AudioRecord: Object, IdentifiableType { return self.identifier } - init(path: String, number: String?, raw: String, duration: TimeInterval) { + init(path: String, number: String?, raw: String, duration: TimeInterval, event: VehicleEvent?) { self.path = path self.number = number self.duration = duration self.rawText = raw + self.event = event } required init() { diff --git a/AutoCat/Models/VehicleEvent.swift b/AutoCat/Models/VehicleEvent.swift new file mode 100644 index 0000000..8bca39a --- /dev/null +++ b/AutoCat/Models/VehicleEvent.swift @@ -0,0 +1,21 @@ +import Foundation +import RealmSwift + +class VehicleEvent: Object { + @objc dynamic var date: Date = Date() + @objc dynamic var latitude: Double = 0 + @objc dynamic var longitude: Double = 0 + @objc dynamic var speed: Double = 0 + @objc dynamic var direction: Double = 0 + + init(lat: Double, lon: Double, speed: Double, dir: Double) { + self.latitude = lat + self.longitude = lon + self.speed = speed + self.direction = dir + } + + required init() { + super.init() + } +} diff --git a/AutoCat/Utils/Location.swift b/AutoCat/Utils/Location.swift index b71352f..05e4953 100644 --- a/AutoCat/Utils/Location.swift +++ b/AutoCat/Utils/Location.swift @@ -3,14 +3,6 @@ import RxSwift import RxCocoa import CoreLocation -struct VehicleEvent { - var date: Date - var latitude: Double - var longitude: Double - var speed: Double - var direction: Double -} - class RxLocationManagerDelegateProxy: DelegateProxy, DelegateProxyType, CLLocationManagerDelegate { init(locationManager: ParentObject) { @@ -41,20 +33,30 @@ extension Reactive where Base: CLLocationManager { let sel = #selector((CLLocationManagerDelegate.locationManager(_:didChangeAuthorization:)! as (CLLocationManagerDelegate) -> (CLLocationManager, CLAuthorizationStatus) -> Void)) let source: Observable = delegate.methodInvoked(sel) .map { arg in - let status = arg[1] as! CLAuthorizationStatus - return status + let status = CLAuthorizationStatus(rawValue: arg[1] as! Int32) + return status! } return ControlEvent(events: source) } + + var didUpdateLocations: Observable { + let sel = #selector((CLLocationManagerDelegate.locationManager(_:didUpdateLocations:)! as (CLLocationManagerDelegate) -> (CLLocationManager, [CLLocation]) -> Void)) + return delegate.methodInvoked(sel) + .map { args in + if let locations = args[1] as? [CLLocation], let location = locations.first { + return VehicleEvent(lat: location.coordinate.latitude, lon: location.coordinate.longitude, speed: location.speed, dir: location.course) + } else { + throw NSError(domain: "", code: 0, userInfo: [NSLocalizedDescriptionKey: "Update location error"]) + } + } + } } -class LocationManager: { - static let shared = LocationManager() +class LocationManager { + private static let manager = CLLocationManager() + private static let bag = DisposeBag() - private let manager = CLLocationManager() - private let bag = DisposeBag() - - private func checkPermissions() -> Single { + private static func checkPermissions() -> Single { return Single.create { observer in switch CLLocationManager.authorizationStatus() { case .authorizedWhenInUse: @@ -62,21 +64,31 @@ class LocationManager: { break case .notDetermined: self.manager.requestWhenInUseAuthorization() - _ = self.manager.rx.didChangeAuthorization.first().subscribe(onSuccess: { status in - + _ = self.manager.rx.didChangeAuthorization.skip(1).first().subscribe(onSuccess: { result in + if let status = result, status == .authorizedWhenInUse { + observer(.success(())) + } else { + observer(.error(NSError(domain: "", code: 0, userInfo: [NSLocalizedDescriptionKey: "Location permission error"]))) + } }, onError: { observer(.error($0)) }) default: observer(.error(NSError(domain: "", code: 0, userInfo: [NSLocalizedDescriptionKey: "Location permission error"]))) break } - return Disposables.create { } + return Disposables.create() } } - func requestCurrentLocation() -> Single { - return self.checkPermissions().map { - return VehicleEvent() - } + private static func requestLocation() -> Single { + return self.manager.rx.didUpdateLocations.take(1).asSingle().do(onSubscribed: { + DispatchQueue.main.async { + self.manager.requestLocation() + } + }) + } + + static func requestCurrentLocation() -> Single { + return self.checkPermissions().flatMap(self.requestLocation) } } diff --git a/AutoCat/Utils/Recorder.swift b/AutoCat/Utils/Recorder.swift index 7ede563..7a6e26a 100644 --- a/AutoCat/Utils/Recorder.swift +++ b/AutoCat/Utils/Recorder.swift @@ -2,6 +2,7 @@ import Foundation import Speech import AVFoundation import AudioToolbox +import RxSwift class Recorder { @@ -25,79 +26,94 @@ class Recorder { init() { } - func requestPermissions(completion: @escaping (NSError?) -> Void) { - AVAudioSession.sharedInstance().requestRecordPermission { allowed in - if allowed { - SFSpeechRecognizer.requestAuthorization { status in - switch status { - case .authorized: - completion(nil) - break - case .denied: - let error = CocoaError.error("Access denied", suggestion: "Please give permission to use speech recognition in system settings") - completion(error) - break - case .restricted: - let error = CocoaError.error("Access restricted", suggestion: "Speech recognition is restricted on this device") - completion(error) - break - case .notDetermined: - let error = CocoaError.error("Access error", suggestion: "Speech recognition status is not yet determined") - completion(error) - break - @unknown default: - let error = CocoaError.error("Access error", suggestion: "Unknown error accessing speech recognizer") - completion(error) - break + func requestPermissions() -> Single { + return Single.create { observer in + AVAudioSession.sharedInstance().requestRecordPermission { allowed in + if allowed { + SFSpeechRecognizer.requestAuthorization { status in + switch status { + case .authorized: + observer(.success(())) + break + case .denied: + let error = CocoaError.error("Access denied", suggestion: "Please give permission to use speech recognition in system settings") + observer(.error(error)) + break + case .restricted: + let error = CocoaError.error("Access restricted", suggestion: "Speech recognition is restricted on this device") + observer(.error(error)) + break + case .notDetermined: + let error = CocoaError.error("Access error", suggestion: "Speech recognition status is not yet determined") + observer(.error(error)) + break + @unknown default: + let error = CocoaError.error("Access error", suggestion: "Unknown error accessing speech recognizer") + observer(.error(error)) + break + } } + } else { + let error = CocoaError.error("Access denied", suggestion: "Please give permission to use microphone in system settings") + observer(.error(error)) } - } else { - let error = CocoaError.error("Access denied", suggestion: "Please give permission to use microphone in system settings") - completion(error) } + + return Disposables.create() } } - func startRecording(to file: URL, completion: @escaping (String) -> Void) throws { - guard let aac = AVAudioFormat(settings: self.recordingSettings) else { - throw CocoaError.error("Recording error", suggestion: "Format not supported") - } - - ExtAudioFileCreateWithURL(file as CFURL, kAudioFileM4AType, aac.streamDescription, nil, AudioFileFlags.eraseFile.rawValue, &fileRef) - guard let fileRef = self.fileRef else { - throw CocoaError.error(CocoaError.Code.fileWriteUnknown) - } - - try AVAudioSession.sharedInstance().setCategory(.playAndRecord, mode: .default, options: []) - try AVAudioSession.sharedInstance().setActive(true) - - let inFormat = self.engine.inputNode.outputFormat(forBus: 0) - ExtAudioFileSetProperty(fileRef, kExtAudioFileProperty_ClientDataFormat, UInt32(MemoryLayout.size), inFormat.streamDescription) - - self.engine.inputNode.installTap(onBus: 0, bufferSize: 1024, format: inFormat) { buffer, time in - self.request.append(buffer) - //print(self.recognitionTask?.state.rawValue) - ExtAudioFileWrite(fileRef, buffer.frameLength, buffer.audioBufferList) - } - - self.recognitionTask = self.recognizer!.recognitionTask(with: self.request) { result, error in - if let transcription = result?.bestTranscription { - self.result = transcription.formattedString - self.endRecognitionTimer?.invalidate() - self.endRecognitionTimer = Timer.scheduledTimer(withTimeInterval: 2, repeats: false) { timer in - self.finishRecording() - completion(self.result) + func startRecording(to file: URL) -> Single { + return Single.create { observer in + guard let aac = AVAudioFormat(settings: self.recordingSettings) else { + observer(.error(CocoaError.error("Recording error", suggestion: "Format not supported"))) + return Disposables.create() + } + + ExtAudioFileCreateWithURL(file as CFURL, kAudioFileM4AType, aac.streamDescription, nil, AudioFileFlags.eraseFile.rawValue, &self.fileRef) + guard let fileRef = self.fileRef else { + observer(.error(CocoaError.error(CocoaError.Code.fileWriteUnknown))) + return Disposables.create() + } + + do { + try AVAudioSession.sharedInstance().setCategory(.playAndRecord, mode: .default, options: []) + try AVAudioSession.sharedInstance().setActive(true) + + let inFormat = self.engine.inputNode.outputFormat(forBus: 0) + ExtAudioFileSetProperty(fileRef, kExtAudioFileProperty_ClientDataFormat, UInt32(MemoryLayout.size), inFormat.streamDescription) + + self.engine.inputNode.installTap(onBus: 0, bufferSize: 1024, format: inFormat) { buffer, time in + self.request.append(buffer) + ExtAudioFileWrite(fileRef, buffer.frameLength, buffer.audioBufferList) } + + self.recognitionTask = self.recognizer!.recognitionTask(with: self.request) { result, error in + if let transcription = result?.bestTranscription { + self.result = transcription.formattedString + self.endRecognitionTimer?.invalidate() + self.endRecognitionTimer = Timer.scheduledTimer(withTimeInterval: 2, repeats: false) { timer in + self.finishRecording() + observer(.success(self.result)) + } + } + } + + self.endRecognitionTimer = Timer.scheduledTimer(withTimeInterval: 5, repeats: false) { timer in + self.finishRecording() + observer(.success(self.result)) + } + + self.engine.prepare() + try self.engine.start() + } catch { + observer(.error(error)) + } + + return Disposables.create { + self.cancelRecording() } } - - self.endRecognitionTimer = Timer.scheduledTimer(withTimeInterval: 5, repeats: false) { timer in - self.finishRecording() - completion(self.result) - } - - self.engine.prepare() - try self.engine.start() } func cancelRecording() {