Skip to content

Commit

Permalink
add some unit tests for shared presenter
Browse files Browse the repository at this point in the history
  • Loading branch information
jack45j committed Feb 21, 2023
1 parent 19555ee commit 0fadc4c
Show file tree
Hide file tree
Showing 9 changed files with 356 additions and 74 deletions.
4 changes: 4 additions & 0 deletions BSSpeechRecognizer.xcodeproj/project.pbxproj
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@
DB86680629A34AA500D22114 /* BSSpeechDisplayView.swift in Sources */ = {isa = PBXBuildFile; fileRef = DB86680529A34AA500D22114 /* BSSpeechDisplayView.swift */; };
DB86680829A34AB400D22114 /* BSSpeechDisplayViewModel.swift in Sources */ = {isa = PBXBuildFile; fileRef = DB86680729A34AB400D22114 /* BSSpeechDisplayViewModel.swift */; };
DB86680A29A34AEB00D22114 /* BSSpeechWaveViewModel.swift in Sources */ = {isa = PBXBuildFile; fileRef = DB86680929A34AEB00D22114 /* BSSpeechWaveViewModel.swift */; };
DB86682029A466F600D22114 /* BSSpeechRecognizerConfigurable.swift in Sources */ = {isa = PBXBuildFile; fileRef = DB86681F29A466F600D22114 /* BSSpeechRecognizerConfigurable.swift */; };
/* End PBXBuildFile section */

/* Begin PBXContainerItemProxy section */
Expand Down Expand Up @@ -51,6 +52,7 @@
DB86680529A34AA500D22114 /* BSSpeechDisplayView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = BSSpeechDisplayView.swift; sourceTree = "<group>"; };
DB86680729A34AB400D22114 /* BSSpeechDisplayViewModel.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = BSSpeechDisplayViewModel.swift; sourceTree = "<group>"; };
DB86680929A34AEB00D22114 /* BSSpeechWaveViewModel.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = BSSpeechWaveViewModel.swift; sourceTree = "<group>"; };
DB86681F29A466F600D22114 /* BSSpeechRecognizerConfigurable.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = BSSpeechRecognizerConfigurable.swift; sourceTree = "<group>"; };
/* End PBXFileReference section */

/* Begin PBXFrameworksBuildPhase section */
Expand Down Expand Up @@ -97,6 +99,7 @@
DB8667BE29A3497600D22114 /* BSSpeechRecognizer.swift */,
DB8667B529A3497600D22114 /* BSSpeechRecognizerAuthorizeManager.swift */,
DB8667BD29A3497600D22114 /* BSWaveVisualizeView.swift */,
DB86681F29A466F600D22114 /* BSSpeechRecognizerConfigurable.swift */,
DB8667B629A3497600D22114 /* Share Presentation */,
);
path = BSSpeechRecognizer;
Expand Down Expand Up @@ -245,6 +248,7 @@
DB8667C129A3497600D22114 /* BSSpeechStateView.swift in Sources */,
DB8667C429A3497600D22114 /* BSSpeechErrorViewModel.swift in Sources */,
DB86680829A34AB400D22114 /* BSSpeechDisplayViewModel.swift in Sources */,
DB86682029A466F600D22114 /* BSSpeechRecognizerConfigurable.swift in Sources */,
DB8667BF29A3497600D22114 /* BSSpeechRecognizerAuthorizeManager.swift in Sources */,
DB86680A29A34AEB00D22114 /* BSSpeechWaveViewModel.swift in Sources */,
DB8667C729A3497600D22114 /* BSSpeechRecognizer.swift in Sources */,
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,90 @@
<?xml version="1.0" encoding="UTF-8"?>
<Scheme
LastUpgradeVersion = "1420"
version = "1.3">
<BuildAction
parallelizeBuildables = "YES"
buildImplicitDependencies = "YES">
<BuildActionEntries>
<BuildActionEntry
buildForTesting = "YES"
buildForRunning = "YES"
buildForProfiling = "YES"
buildForArchiving = "YES"
buildForAnalyzing = "YES">
<BuildableReference
BuildableIdentifier = "primary"
BlueprintIdentifier = "DB86679C29A3491700D22114"
BuildableName = "BSSpeechRecognizer.framework"
BlueprintName = "BSSpeechRecognizer"
ReferencedContainer = "container:BSSpeechRecognizer.xcodeproj">
</BuildableReference>
</BuildActionEntry>
</BuildActionEntries>
</BuildAction>
<TestAction
buildConfiguration = "Debug"
selectedDebuggerIdentifier = "Xcode.DebuggerFoundation.Debugger.LLDB"
selectedLauncherIdentifier = "Xcode.DebuggerFoundation.Launcher.LLDB"
shouldUseLaunchSchemeArgsEnv = "YES"
codeCoverageEnabled = "YES"
onlyGenerateCoverageForSpecifiedTargets = "YES">
<CodeCoverageTargets>
<BuildableReference
BuildableIdentifier = "primary"
BlueprintIdentifier = "DB86679C29A3491700D22114"
BuildableName = "BSSpeechRecognizer.framework"
BlueprintName = "BSSpeechRecognizer"
ReferencedContainer = "container:BSSpeechRecognizer.xcodeproj">
</BuildableReference>
</CodeCoverageTargets>
<Testables>
<TestableReference
skipped = "NO"
parallelizable = "YES"
testExecutionOrdering = "random">
<BuildableReference
BuildableIdentifier = "primary"
BlueprintIdentifier = "DB8667A429A3491700D22114"
BuildableName = "BSSpeechRecognizerTests.xctest"
BlueprintName = "BSSpeechRecognizerTests"
ReferencedContainer = "container:BSSpeechRecognizer.xcodeproj">
</BuildableReference>
</TestableReference>
</Testables>
</TestAction>
<LaunchAction
buildConfiguration = "Debug"
selectedDebuggerIdentifier = "Xcode.DebuggerFoundation.Debugger.LLDB"
selectedLauncherIdentifier = "Xcode.DebuggerFoundation.Launcher.LLDB"
launchStyle = "0"
useCustomWorkingDirectory = "NO"
ignoresPersistentStateOnLaunch = "NO"
debugDocumentVersioning = "YES"
debugServiceExtension = "internal"
allowLocationSimulation = "YES">
</LaunchAction>
<ProfileAction
buildConfiguration = "Release"
shouldUseLaunchSchemeArgsEnv = "YES"
savedToolIdentifier = ""
useCustomWorkingDirectory = "NO"
debugDocumentVersioning = "YES">
<MacroExpansion>
<BuildableReference
BuildableIdentifier = "primary"
BlueprintIdentifier = "DB86679C29A3491700D22114"
BuildableName = "BSSpeechRecognizer.framework"
BlueprintName = "BSSpeechRecognizer"
ReferencedContainer = "container:BSSpeechRecognizer.xcodeproj">
</BuildableReference>
</MacroExpansion>
</ProfileAction>
<AnalyzeAction
buildConfiguration = "Debug">
</AnalyzeAction>
<ArchiveAction
buildConfiguration = "Release"
revealArchiveInOrganizer = "YES">
</ArchiveAction>
</Scheme>
Original file line number Diff line number Diff line change
Expand Up @@ -10,5 +10,18 @@
<integer>0</integer>
</dict>
</dict>
<key>SuppressBuildableAutocreation</key>
<dict>
<key>DB86679C29A3491700D22114</key>
<dict>
<key>primary</key>
<true/>
</dict>
<key>DB8667A429A3491700D22114</key>
<dict>
<key>primary</key>
<true/>
</dict>
</dict>
</dict>
</plist>
42 changes: 8 additions & 34 deletions BSSpeechRecognizer/BSSpeechRecognizer.swift
Original file line number Diff line number Diff line change
Expand Up @@ -11,29 +11,6 @@ import AVFoundation
import Speech
import Accelerate

public protocol BSSpeechRecognizerConfigurable {
var locale: Locale { get set }
var maxRecordDuration: Double { get set }

var waveViewVisibleAnimateDuration: TimeInterval { get set }
}

public struct BSSpeechRecognizerConfiguration: BSSpeechRecognizerConfigurable {
public var locale: Locale
public var maxRecordDuration: Double
public var waveViewVisibleAnimateDuration: TimeInterval

public init(locale: Locale = .init(identifier: "zh-TW"),
maxRecordDuration: Double = 60,
waveViewVisibleAnimateDuration: TimeInterval = 0.5) {
self.locale = locale
self.maxRecordDuration = maxRecordDuration
self.waveViewVisibleAnimateDuration = waveViewVisibleAnimateDuration
}
}



final public class BSSpeechRecognizer: NSObject {

// AudioEngine
Expand All @@ -47,7 +24,7 @@ final public class BSSpeechRecognizer: NSObject {

private var autoStopCounter: UInt = 0
private var autoStopPower: Float = 0.25
private var autoStopTimer: UInt = 120
private var autoStopTimer: UInt

private let LEVEL_LOWPASS_TRIG: Float32 = 0.2
private var averagePower: Float = 0
Expand All @@ -57,31 +34,30 @@ final public class BSSpeechRecognizer: NSObject {
private var recognitionRequest: SFSpeechAudioBufferRecognitionRequest?
private var recognitionTask: SFSpeechRecognitionTask?


public init(presenter: BSSpeechRecognizeWaveViewPresenter, config: any BSSpeechRecognizerConfigurable = BSSpeechRecognizerConfiguration()) {
self.presenter = presenter
self.config = config
self.autoStopTimer = UInt(config.maxRecordDuration / 3)
}

public func start() {
presenter.didChangeSpeechState(to: false)
authorizer.validatePermissions { result in
switch result {
case .success:
self.startRecognition()
case let .failure(error):
self.presenter.didFinishRecognition(with: error)
self.presenter.didFinishRecognition(with: error, animateDuration: self.config.waveViewVisibleAnimateDuration)
}
}
}

private func startRecognition() {
OperationQueue.main.addOperation {
self.presenter.didStartRecognition(0.5)
self.presenter.didStartRecognition(animateDuration: self.config.waveViewVisibleAnimateDuration)
do {
try self.beginRecognition()
} catch {
self.presenter.didFinishRecognition(with: .unknown(error: error))
self.presenter.didFinishRecognition(with: .unknown(error: error), animateDuration: self.config.waveViewVisibleAnimateDuration)
}
}
}
Expand All @@ -97,7 +73,7 @@ final public class BSSpeechRecognizer: NSObject {

// Configure the audio session for the app.
if configureAudioSession().isFailure {
self.presenter.didFinishRecognition(with: .audioUnitFailed)
self.presenter.didFinishRecognition(with: .audioUnitFailed, animateDuration: config.waveViewVisibleAnimateDuration)
}

// Create and configuration RecognitionRequest.
Expand All @@ -113,8 +89,6 @@ final public class BSSpeechRecognizer: NSObject {
if startAudioEngine().isFailure == false {
autoStopCounter = 0
}

presenter.didChangeSpeechState(to: true)
}

private func cancelPreviousTask() {
Expand All @@ -134,7 +108,7 @@ final public class BSSpeechRecognizer: NSObject {
stopAudioEngine()
cancelRecognitionRequest()
OperationQueue.main.addOperation {
self.presenter.didWaveViewVisible(to: false, duration: 0.5)
self.presenter.didFinishRecognition(animateDuration: self.config.waveViewVisibleAnimateDuration)
}
}

Expand All @@ -156,7 +130,7 @@ final public class BSSpeechRecognizer: NSObject {
cancelRecognitionRequest()
stopAudioEngine()
OperationQueue.main.addOperation {
self.presenter.didFinishRecognition(with: error)
self.presenter.didFinishRecognition(with: error, animateDuration: self.config.waveViewVisibleAnimateDuration)
}
}

Expand Down
33 changes: 31 additions & 2 deletions BSSpeechRecognizer/BSSpeechRecognizerAuthorizeManager.swift
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,9 @@ final public class BSSpeechRecognizerAuthorizeManager {

public enum BSSpeechRecognizerError: Error {
public enum AuthorizationReason {
case denied, restricted, usageDescription(missing: BSSpeechRecognizerAuthorizeManager.UsageDescriptionKey)
case denied
case restricted
case usageDescription(missing: BSSpeechRecognizerAuthorizeManager.UsageDescriptionKey)
}
public enum CancellationReason {
case user, notFound
Expand All @@ -31,7 +33,14 @@ final public class BSSpeechRecognizerAuthorizeManager {
private let speechRecognitionUsageDescriptionKey: UsageDescriptionKey = UsageDescriptionKey("NSSpeechRecognitionUsageDescription")

public func validatePermissions(_ handler: @escaping ResultHandlerClouse) {
checkSpeechRecognizerAuthorization(handler)
checkSpeechRecognizerAuthorization { result in
switch result {
case .success:
self.checkMicrophoneAuthorization(handler)
case let .failure(error):
handler(.failure(error))
}
}
}

private func checkSpeechRecognizerAuthorization(_ handler: @escaping ResultHandlerClouse) {
Expand All @@ -52,4 +61,24 @@ final public class BSSpeechRecognizerAuthorizeManager {
}
}
}

private func checkMicrophoneAuthorization(_ handler: @escaping ResultHandlerClouse) {
guard Bundle.main.object(forInfoDictionaryKey: microphoneUsageDescriptionKey) != nil else {
handler(.failure(.authorization(reason: .usageDescription(missing: microphoneUsageDescriptionKey))))
return
}

switch AVAudioSession.sharedInstance().recordPermission {
case .granted:
return handler(.success(()))
case .denied:
return handler(.failure(.authorization(reason: .denied)))
case .undetermined:
AVAudioSession.sharedInstance().requestRecordPermission { _ in
self.checkMicrophoneAuthorization(handler)
}
@unknown default:
return handler(.failure(.unknown(error: nil)))
}
}
}
32 changes: 32 additions & 0 deletions BSSpeechRecognizer/BSSpeechRecognizerConfigurable.swift
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@
//
// BSSpeechRecognizerConfigurable.swift
// BSSpeechRecognizer
//
// Created by 林翌埕-20001107 on 2023/2/21.
//

import Foundation

public protocol BSSpeechRecognizerConfigurable {
var locale: Locale { get set }

/// This is NOT a time based duration
/// This value is more like a counter based on AVAudioNode's tap
var maxRecordDuration: Double { get set }

var waveViewVisibleAnimateDuration: TimeInterval { get set }
}

public struct BSSpeechRecognizerConfiguration: BSSpeechRecognizerConfigurable {
public var locale: Locale
public var maxRecordDuration: Double
public var waveViewVisibleAnimateDuration: TimeInterval

public init(locale: Locale = .init(identifier: "zh-TW"),
maxRecordDuration: Double = 300,
waveViewVisibleAnimateDuration: TimeInterval = 0.5) {
self.locale = locale
self.maxRecordDuration = maxRecordDuration
self.waveViewVisibleAnimateDuration = waveViewVisibleAnimateDuration
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -13,8 +13,4 @@ public struct BSSpeechErrorViewModel {
static var noError: BSSpeechErrorViewModel {
return BSSpeechErrorViewModel(message: nil)
}

static func error(message: String) -> BSSpeechErrorViewModel {
return BSSpeechErrorViewModel(message: message)
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -20,30 +20,32 @@ final public class BSSpeechRecognizeWaveViewPresenter {
self.stateView = stateView
}

func didChangeSpeechState(to isRecognizing: Bool) {
stateView.display(.init(isRecognizing: isRecognizing))
}

func didRecognize(_ result: String, isFinal: Bool) {
resourceView.display(.init(result: result, isFinal: isFinal))
}

func didStartRecognition(_ duration: TimeInterval) {
didWaveViewVisible(to: true, duration: duration)
}

func didFinishRecognition(with error: BSSpeechRecognizerAuthorizeManager.BSSpeechRecognizerError) {
didWaveViewVisible(to: false, duration: 0.5)
stateView.display(.init(isRecognizing: false))
errorView.display(.error(message: error.localizedDescription))
func didStartRecognition(animateDuration: TimeInterval) {
stateView.display(.init(isRecognizing: true))
errorView.display(.noError)
waveView.updateWithLevel(0.0)
waveView.display(.init(duration: animateDuration, isShow: true))
}

func didWaveViewVisible(to show: Bool, duration: TimeInterval) {
func didFinishRecognition(with error: BSSpeechRecognizerAuthorizeManager.BSSpeechRecognizerError? = nil, animateDuration: TimeInterval) {
waveView.updateWithLevel(0.0)
waveView.display(.init(duration: duration, isShow: show))
waveView.display(.init(duration: animateDuration, isShow: false))

stateView.display(.init(isRecognizing: false))
errorView.display(.init(message: error?.localizedDescription))
}

func updateWithLevel(_ level: CGFloat) {
waveView.updateWithLevel(level)
if level < 0 {
waveView.updateWithLevel(0)
} else if level > 1 {
waveView.updateWithLevel(1)
} else {
waveView.updateWithLevel(level)
}
}
}
Loading

0 comments on commit 0fadc4c

Please sign in to comment.