commit 47f06da9152de36546f7516aff5ff4843f4f5f55
parent 53ec47928b001c82d3689ccf1e2b67a8603a629d
Author: therealFIGBERT <figbertwelner@gmail.com>
Date: Tue, 29 Oct 2019 22:41:02 -0700
Implementing initial recording functionality
Diffstat:
9 files changed, 152 insertions(+), 12 deletions(-)
diff --git a/AudioRecorder.swift b/AudioRecorder.swift
@@ -0,0 +1,99 @@
+//
+// AudioRecorder.swift
+// captainsLog
+//
+// Created by Benjamin Welner on 10/29/19.
+// Copyright © 2019 FIGBERT Industries. All rights reserved.
+//
+
+import Foundation
+import SwiftUI
+import Combine
+import AVFoundation
+
+class AudioRecorder: NSObject, ObservableObject {
+
+ override init() {
+ super.init()
+ fetchRecordings()
+ }
+
+ let objectWillChange = PassthroughSubject<AudioRecorder, Never>()
+
+ var audioRecorder: AVAudioRecorder!
+ var recordings = [Recording]()
+ var recording: Bool = false {
+ didSet {
+ objectWillChange.send(self)
+ }
+ }
+ var hasRecorded: Bool = false {
+ didSet {
+ objectWillChange.send(self)
+ }
+ }
+
+ func startRecording() {
+ if (!hasRecorded) {
+ let recordingSession = AVAudioSession.sharedInstance()
+ do {
+ try recordingSession.setCategory(.playAndRecord, mode: .default)
+ try recordingSession.setActive(true)
+ } catch {
+ print("Failed to set up audio session")
+ }
+ let documentPath = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)[0]
+ let audioFilename = documentPath.appendingPathComponent("\(Date().toString(dateFormat: "dd-MM-YY_'at'_HH:mm:ss")).m4a")
+ let settings = [
+ AVFormatIDKey: Int(kAudioFormatMPEG4AAC),
+ AVSampleRateKey: 12000,
+ AVNumberOfChannelsKey: 1,
+ AVEncoderAudioQualityKey: AVAudioQuality.high.rawValue
+ ]
+ do {
+ audioRecorder = try AVAudioRecorder(url: audioFilename, settings: settings)
+ audioRecorder.record()
+ recording = true
+ hasRecorded = true
+ } catch {
+ print("Could not start recording")
+ }
+ } else {
+ audioRecorder.record()
+ recording = true
+ }
+ }
+
+ func pauseRecording() {
+ audioRecorder.pause()
+ recording = false
+ }
+
+ func endRecording() {
+ audioRecorder.stop()
+ recording = false
+ fetchRecordings()
+ }
+
+ func getCreationDate(for file: URL) -> Date {
+ if let attributes = try? FileManager.default.attributesOfItem(atPath: file.path) as [FileAttributeKey: Any],
+ let creationDate = attributes[FileAttributeKey.creationDate] as? Date {
+ return creationDate
+ } else {
+ return Date()
+ }
+ }
+
+ func fetchRecordings() {
+ recordings.removeAll()
+ let fileManager = FileManager.default
+ let documentDirectory = fileManager.urls(for: .documentDirectory, in: .userDomainMask)[0]
+ let directoryContents = try! fileManager.contentsOfDirectory(at: documentDirectory, includingPropertiesForKeys: nil)
+ for audio in directoryContents {
+ let recording = Recording(fileURL: audio, createdAt: getCreationDate(for: audio))
+ recordings.append(recording)
+ }
+ recordings.sort(by: { $0.createdAt.compare($1.createdAt) == .orderedAscending})
+ objectWillChange.send(self)
+ }
+}
diff --git a/GlobalVars.swift b/GlobalVars.swift
@@ -63,6 +63,7 @@ class GlobalVars: ObservableObject {
[
"title": "Example",
"date": "00 in Forblgartz, 94560",
+ "length": "0:00",
"number": "1"
]
]
diff --git a/captainsLog.xcodeproj/project.pbxproj b/captainsLog.xcodeproj/project.pbxproj
@@ -14,9 +14,11 @@
3A35D6E623579F7C005B7610 /* Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 3A35D6E523579F7C005B7610 /* Assets.xcassets */; };
3A35D6E923579F7C005B7610 /* Preview Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 3A35D6E823579F7C005B7610 /* Preview Assets.xcassets */; };
3A35D6EC23579F7C005B7610 /* LaunchScreen.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 3A35D6EA23579F7C005B7610 /* LaunchScreen.storyboard */; };
+ 3A5D1BF423691F6F00677C61 /* AudioRecorder.swift in Sources */ = {isa = PBXBuildFile; fileRef = 3A5D1BF323691F6F00677C61 /* AudioRecorder.swift */; };
3A64E4F52363E98A00B5389D /* GlobalVars.swift in Sources */ = {isa = PBXBuildFile; fileRef = 3A64E4F42363E98A00B5389D /* GlobalVars.swift */; };
3A76A1E8235AD08C00964901 /* Settings.swift in Sources */ = {isa = PBXBuildFile; fileRef = 3A76A1E7235AD08C00964901 /* Settings.swift */; };
3A76A1EA235AE6D500964901 /* Elements.swift in Sources */ = {isa = PBXBuildFile; fileRef = 3A76A1E9235AE6D500964901 /* Elements.swift */; };
+ 3AE17D01236954260098D4BD /* RecordingDataModel.swift in Sources */ = {isa = PBXBuildFile; fileRef = 3AE17D00236954260098D4BD /* RecordingDataModel.swift */; };
/* End PBXBuildFile section */
/* Begin PBXFileReference section */
@@ -29,9 +31,11 @@
3A35D6E823579F7C005B7610 /* Preview Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = "Preview Assets.xcassets"; sourceTree = "<group>"; };
3A35D6EB23579F7C005B7610 /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/LaunchScreen.storyboard; sourceTree = "<group>"; };
3A35D6ED23579F7C005B7610 /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = "<group>"; };
+ 3A5D1BF323691F6F00677C61 /* AudioRecorder.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AudioRecorder.swift; sourceTree = SOURCE_ROOT; };
3A64E4F42363E98A00B5389D /* GlobalVars.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = GlobalVars.swift; sourceTree = SOURCE_ROOT; };
3A76A1E7235AD08C00964901 /* Settings.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = Settings.swift; sourceTree = SOURCE_ROOT; };
3A76A1E9235AE6D500964901 /* Elements.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = Elements.swift; sourceTree = "<group>"; };
+ 3AE17D00236954260098D4BD /* RecordingDataModel.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = RecordingDataModel.swift; sourceTree = "<group>"; };
/* End PBXFileReference section */
/* Begin PBXFrameworksBuildPhase section */
@@ -67,6 +71,8 @@
3A35D6DF23579F7B005B7610 /* AppDelegate.swift */,
3A35D6E123579F7B005B7610 /* SceneDelegate.swift */,
3A35D6E323579F7B005B7610 /* ContentView.swift */,
+ 3A5D1BF323691F6F00677C61 /* AudioRecorder.swift */,
+ 3AE17D00236954260098D4BD /* RecordingDataModel.swift */,
3A76A1E7235AD08C00964901 /* Settings.swift */,
3A2D60F5236801FD004FA1CD /* PreviousRecordings.swift */,
3A76A1E9235AE6D500964901 /* Elements.swift */,
@@ -161,9 +167,11 @@
3A35D6E023579F7B005B7610 /* AppDelegate.swift in Sources */,
3A35D6E223579F7B005B7610 /* SceneDelegate.swift in Sources */,
3A35D6E423579F7B005B7610 /* ContentView.swift in Sources */,
+ 3AE17D01236954260098D4BD /* RecordingDataModel.swift in Sources */,
3A76A1E8235AD08C00964901 /* Settings.swift in Sources */,
3A2D60F6236801FD004FA1CD /* PreviousRecordings.swift in Sources */,
3A64E4F52363E98A00B5389D /* GlobalVars.swift in Sources */,
+ 3A5D1BF423691F6F00677C61 /* AudioRecorder.swift in Sources */,
3A76A1EA235AE6D500964901 /* Elements.swift in Sources */,
);
runOnlyForDeploymentPostprocessing = 0;
diff --git a/captainsLog/ContentView.swift b/captainsLog/ContentView.swift
@@ -10,7 +10,7 @@ import SwiftUI
struct ContentView: View {
@EnvironmentObject var globalVars: GlobalVars
- @State private var recording: Bool = false
+ @ObservedObject var audioRecorder: AudioRecorder
@State private var timeElapsed: Double = 0.00
@State private var timer:Timer?
@State private var msg: String = ""
@@ -21,8 +21,8 @@ struct ContentView: View {
Spacer()
VStack {
Button(action: {
- self.recording.toggle()
- if (self.recording) {
+ if (!self.audioRecorder.recording) {
+ self.audioRecorder.startRecording()
self.timer = Timer.scheduledTimer(withTimeInterval: 0.01, repeats: true, block: { timer in
self.timeElapsed += 0.01
if (self.timeElapsed <= 0.75) {
@@ -37,12 +37,13 @@ struct ContentView: View {
}
})
} else {
+ self.audioRecorder.pauseRecording()
self.timer?.invalidate()
}
}) {
Image(
uiImage: UIImage(
- systemName: recording ? "mic.fill" : "mic",
+ systemName: audioRecorder.recording ? "mic.fill" : "mic",
withConfiguration: UIImage.SymbolConfiguration(
pointSize: CGFloat.init(100),
weight: .regular,
@@ -57,7 +58,7 @@ struct ContentView: View {
}
Spacer()
VStack {
- if (self.recording) {
+ if (audioRecorder.recording) {
Text("\(self.msg)")
.font(.system(size: 24, weight: .regular, design: .rounded))
Image(systemName: "waveform")
@@ -70,10 +71,12 @@ struct ContentView: View {
[
"title": "Temporary",
"date": "\(self.globalVars.strDate)",
+ "length": "\(self.timeElapsed)",
"number": "\((Int(self.globalVars.previousRecordings.last!["number"]!)!)+1)"
]
)
self.timeElapsed = 0.00
+ self.audioRecorder.endRecording()
}) {
imgTextButtonStyle(sysImg: "waveform", imgSize: 15, buttonText: "Save")
}
@@ -93,7 +96,7 @@ struct ContentView: View {
struct ContentView_Previews: PreviewProvider {
static var previews: some View {
- ContentView().environmentObject(GlobalVars())
+ ContentView(audioRecorder: AudioRecorder()).environmentObject(GlobalVars())
//.colorScheme(.dark)
}
}
diff --git a/captainsLog/Elements.swift b/captainsLog/Elements.swift
@@ -209,6 +209,11 @@ extension Date {
let day = String(numberFormatter.string(from: NSNumber(value: dayNum)) ?? "day")
return [day, " of ", month, ", ", year]
}
+ func toString( dateFormat format : String ) -> String {
+ let dateFormatter = DateFormatter()
+ dateFormatter.dateFormat = format
+ return dateFormatter.string(from: self)
+ }
}
struct imgTextButtonStyle: View {
diff --git a/captainsLog/Info.plist b/captainsLog/Info.plist
@@ -2,6 +2,8 @@
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
+ <key>NSMicrophoneUsageDescription</key>
+ <string>Captain's Log requires microphone access for audiojournal recording</string>
<key>CFBundleDevelopmentRegion</key>
<string>$(DEVELOPMENT_LANGUAGE)</string>
<key>CFBundleExecutable</key>
diff --git a/captainsLog/PreviousRecordings.swift b/captainsLog/PreviousRecordings.swift
@@ -14,11 +14,19 @@ struct PreviousRecordings: View {
Form {
Section {
ForEach(0 ..< globalVars.previousRecordings.count) {pos in
- VStack(alignment: .leading) {
- Text("\(self.globalVars.previousRecordings[pos]["title"] ?? "Title")")
- .font(.headline)
- Text("\(self.globalVars.previousRecordings[pos]["date"] ?? "Date")")
- .font(.caption)
+ HStack {
+ VStack(alignment: .leading) {
+ Text("\(self.globalVars.previousRecordings[pos]["title"] ?? "Title")")
+ .font(.headline)
+ Text("\(self.globalVars.previousRecordings[pos]["date"] ?? "Date")")
+ .font(.caption)
+ }
+ Spacer()
+ VStack(alignment: .trailing) {
+ Text("#\(self.globalVars.previousRecordings[pos]["number"]!)")
+ Text("\(self.globalVars.previousRecordings[pos]["length"]!)")
+ .font(.caption)
+ }
}
}
}
diff --git a/captainsLog/RecordingDataModel.swift b/captainsLog/RecordingDataModel.swift
@@ -0,0 +1,14 @@
+//
+// RecordingDataModel.swift
+// captainsLog
+//
+// Created by Benjamin Welner on 10/29/19.
+// Copyright © 2019 FIGBERT Industries. All rights reserved.
+//
+
+import Foundation
+
+struct Recording {
+ let fileURL: URL
+ let createdAt: Date
+}
diff --git a/captainsLog/SceneDelegate.swift b/captainsLog/SceneDelegate.swift
@@ -20,7 +20,7 @@ class SceneDelegate: UIResponder, UIWindowSceneDelegate {
// This delegate does not imply the connecting scene or session are new (see `application:configurationForConnectingSceneSession` instead).
// Create the SwiftUI view that provides the window contents.
- let contentView = ContentView()
+ let contentView = ContentView(audioRecorder: AudioRecorder())
// Use a UIHostingController as window root view controller.
if let windowScene = scene as? UIWindowScene {