Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Refactor ios face detector #221

Merged
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
Next Next commit
change name
  • Loading branch information
st-tuanmai committed Aug 30, 2023
commit 9a86a5c0a4cf6e29d3b0274872b358eff2a12045
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ import MediaPipeTasksVision
import AVFoundation

/**
This protocol must be adopted by any class that wants to get the detection results of the object detector in live stream mode.
This protocol must be adopted by any class that wants to get the detection results of the face detector in live stream mode.
*/
protocol FaceDetectorServiceLiveStreamDelegate: AnyObject {
func faceDetectorService(_ faceDetectorService: FaceDetectorService,
Expand All @@ -26,7 +26,7 @@ protocol FaceDetectorServiceLiveStreamDelegate: AnyObject {
}

/**
This protocol must be adopted by any class that wants to take appropriate actions during different stages of object detection on videos.
This protocol must be adopted by any class that wants to take appropriate actions during different stages of face detection on videos.
*/
protocol FaceDetectorServiceVideoDelegate: AnyObject {
func faceDetectorService(_ faceDetectorService: FaceDetectorService,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ protocol InferenceViewControllerDelegate: AnyObject {
didSwitchBottomSheetViewState isOpen: Bool)
}

/** The view controller is responsible for presenting the controls to change the meta data for the object detector (model, max results,
/** The view controller is responsible for presenting the controls to change the meta data for the face detector (model, max results,
* score threshold) and updating the singleton`` DetectorMetadata`` on user input.
*/
class BottomSheetViewController: UIViewController {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,17 +29,17 @@ class MediaLibraryViewController: UIViewController {
static let milliSeconds = 1000.0
static let savedPhotosNotAvailableText = "Saved photos album is not available."
static let mediaEmptyText =
"Click + to add an image or a video to begin running the object detection."
"Click + to add an image or a video to begin running the face detection."
static let pickFromGalleryButtonInset: CGFloat = 10.0
}
// MARK: Object Detector Service
// MARK: Face Detector Service
weak var interfaceUpdatesDelegate: InterfaceUpdatesDelegate?

// MARK: Controllers that manage functionality
private lazy var pickerController = UIImagePickerController()
private var playerViewController: AVPlayerViewController?

// MARK: Object Detector Service
// MARK: Face Detector Service
private var faceDetectorService: FaceDetectorService?

// MARK: Private properties
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,7 @@ class RootViewController: UIViewController {
// MARK: View Handling Methods
override func viewDidLoad() {
super.viewDidLoad()
// Create object detector helper
// Create face detector helper

inferenceViewController?.isUIEnabled = true
runningModeTabbar.selectedItem = runningModeTabbar.items?.first
Expand Down