Skip to content

Commit

Permalink
Split VCs delegates
Browse files Browse the repository at this point in the history
  • Loading branch information
xzzz9097 committed Jul 3, 2017
1 parent ca6d5fc commit 7f28356
Show file tree
Hide file tree
Showing 4 changed files with 101 additions and 75 deletions.
8 changes: 8 additions & 0 deletions Machine.xcodeproj/project.pbxproj
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,8 @@
0C26FE431F0526B400E56DD8 /* VNRequestCaptureDelegate.swift in Sources */ = {isa = PBXBuildFile; fileRef = 0C26FE421F0526B400E56DD8 /* VNRequestCaptureDelegate.swift */; };
0C813BEB1F09509C00A9CCFF /* Status.swift in Sources */ = {isa = PBXBuildFile; fileRef = 0C813BEA1F09509C00A9CCFF /* Status.swift */; };
0C813BFC1F0A85AB00A9CCFF /* ObservationTag.h in Sources */ = {isa = PBXBuildFile; fileRef = 0C813BFB1F0A85AB00A9CCFF /* ObservationTag.h */; };
0C813BFE1F0A9F0300A9CCFF /* ViewController+VNDetectedObject.swift in Sources */ = {isa = PBXBuildFile; fileRef = 0C813BFD1F0A9F0300A9CCFF /* ViewController+VNDetectedObject.swift */; };
0C813C001F0A9FA500A9CCFF /* ViewController+VNClassificationObservation.swift in Sources */ = {isa = PBXBuildFile; fileRef = 0C813BFF1F0A9FA500A9CCFF /* ViewController+VNClassificationObservation.swift */; };
0C83D70C1F08E9E600D651FC /* Resnet50.mlmodel in Sources */ = {isa = PBXBuildFile; fileRef = 0C83D70B1F08E9E600D651FC /* Resnet50.mlmodel */; };
0C83D70E1F09084700D651FC /* VNClassificationObservationHandler.swift in Sources */ = {isa = PBXBuildFile; fileRef = 0C83D70D1F09084700D651FC /* VNClassificationObservationHandler.swift */; };
0C9A215B1F03E16F00861B50 /* EmojiView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 0C9A215A1F03E16F00861B50 /* EmojiView.swift */; };
Expand All @@ -35,6 +37,8 @@
0C813BEA1F09509C00A9CCFF /* Status.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = Status.swift; sourceTree = "<group>"; };
0C813BFA1F0A85AB00A9CCFF /* Machine-Bridging-Header.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = "Machine-Bridging-Header.h"; sourceTree = "<group>"; };
0C813BFB1F0A85AB00A9CCFF /* ObservationTag.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = ObservationTag.h; sourceTree = "<group>"; };
0C813BFD1F0A9F0300A9CCFF /* ViewController+VNDetectedObject.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "ViewController+VNDetectedObject.swift"; sourceTree = "<group>"; };
0C813BFF1F0A9FA500A9CCFF /* ViewController+VNClassificationObservation.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "ViewController+VNClassificationObservation.swift"; sourceTree = "<group>"; };
0C83D70B1F08E9E600D651FC /* Resnet50.mlmodel */ = {isa = PBXFileReference; lastKnownFileType = file.mlmodel; name = Resnet50.mlmodel; path = ../../../Downloads/Resnet50.mlmodel; sourceTree = "<group>"; };
0C83D70D1F09084700D651FC /* VNClassificationObservationHandler.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = VNClassificationObservationHandler.swift; sourceTree = "<group>"; };
0C9A215A1F03E16F00861B50 /* EmojiView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = EmojiView.swift; sourceTree = "<group>"; };
Expand Down Expand Up @@ -119,6 +123,8 @@
0C813BEA1F09509C00A9CCFF /* Status.swift */,
0CB13FF21F01B3AC00D3F18A /* ViewController.swift */,
0C813BFB1F0A85AB00A9CCFF /* ObservationTag.h */,
0C813BFD1F0A9F0300A9CCFF /* ViewController+VNDetectedObject.swift */,
0C813BFF1F0A9FA500A9CCFF /* ViewController+VNClassificationObservation.swift */,
0C813BE81F094FF500A9CCFF /* Delegates */,
0C813BE71F094F6C00A9CCFF /* Views */,
0C83D70B1F08E9E600D651FC /* Resnet50.mlmodel */,
Expand Down Expand Up @@ -212,12 +218,14 @@
0C050C361F0699E40091B8BA /* VNDetectedObjectHandler.swift in Sources */,
0C9A215B1F03E16F00861B50 /* EmojiView.swift in Sources */,
0C9A21661F03E61B00861B50 /* FaceView.swift in Sources */,
0C813C001F0A9FA500A9CCFF /* ViewController+VNClassificationObservation.swift in Sources */,
0C9A21601F03E35F00861B50 /* NSTextField.swift in Sources */,
0C26FE431F0526B400E56DD8 /* VNRequestCaptureDelegate.swift in Sources */,
0C9A21621F03E55D00861B50 /* NSColor.swift in Sources */,
0C9A215D1F03E2A100861B50 /* NSView.swift in Sources */,
0C813BEB1F09509C00A9CCFF /* Status.swift in Sources */,
0C9A21641F03E59400861B50 /* CGRect.swift in Sources */,
0C813BFE1F0A9F0300A9CCFF /* ViewController+VNDetectedObject.swift in Sources */,
0C050C331F0679B80091B8BA /* VNRequestResultHandler.swift in Sources */,
0C83D70E1F09084700D651FC /* VNClassificationObservationHandler.swift in Sources */,
0C813BFC1F0A85AB00A9CCFF /* ObservationTag.h in Sources */,
Expand Down
32 changes: 32 additions & 0 deletions Machine/ViewController+VNClassificationObservation.swift
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@
//
// ViewController+VNClassificationObservation.swift
// Machine
//
// Created by lyrae on 03/07/2017.
// Copyright © 2017 lyrae. All rights reserved.
//

import Vision

extension ViewController: VNClassificationObservationDelegate {

func didReceiveClassificationObservations(tag: ObservationTag,
_ observations: [VNClassificationObservation]) {
let classifications = observations[0...4] // top 4 results
.filter { $0.confidence > 0.3 }
.map { "\($0.identifier) \(($0.confidence * 100.0).rounded())" }

guard let first = classifications.first else {
DispatchQueue.main.async {
self.status.components[.classificationObservation] = StatusComponent.classificationObservation.defaultValue
}

return
}

DispatchQueue.main.async {
self.status.components[.classificationObservation] = String(describing: first)
}
}

}
61 changes: 61 additions & 0 deletions Machine/ViewController+VNDetectedObject.swift
Original file line number Diff line number Diff line change
@@ -0,0 +1,61 @@
//
// ViewController+VNDetectedObject.swift
// Machine
//
// Created by lyrae on 03/07/2017.
// Copyright © 2017 lyrae. All rights reserved.
//

extension ViewController: VNDetectedObjectDelegate {

func didReceiveBoundingBoxes(tag: ObservationTag,
_ boxes: [NSRect]) {
let delta = boxes.count - faceViews.count

if abs(delta) > 0 {
DispatchQueue.main.async {
switch boxes.count {
case 0:
self.status.components[.faceDetection] = StatusComponent.faceDetection.defaultValue
default:
self.status.components[.faceDetection] = "\(boxes.count) 😀 detected"
}
}
}

if delta > 0 {
for _ in 0..<delta {
DispatchQueue.main.async {
self.faceViews.append(FaceView(frame: NSRect(),
hiddenFace: self.hideFace))
}
}
} else if delta < 0 {
for _ in 0..<abs(delta) {
DispatchQueue.main.async {
if !self.faceViews.isEmpty { self.faceViews.removeLast() }
}
}
}

if boxes.isEmpty {
resetFaceViews()
return
}

DispatchQueue.main.async {
self.faceViews = self.faceViews.sorted { $0.frame.minX < $1.frame.minX }
}

for (box, view) in zip(boxes, faceViews) {
DispatchQueue.main.async {
view.updateFrame(to: box.scaled(
width: self.cameraView.bounds.width,
height: self.cameraView.bounds.height
)
)
}
}
}

}
75 changes: 0 additions & 75 deletions Machine/ViewController.swift
Original file line number Diff line number Diff line change
Expand Up @@ -18,8 +18,6 @@ extension ObservationTag {
}

class ViewController: NSViewController,
VNDetectedObjectDelegate,
VNClassificationObservationDelegate,
NSWindowDelegate {

var requestDelegate = VNRequestCaptureDelegate.default
Expand Down Expand Up @@ -188,79 +186,6 @@ class ViewController: NSViewController,
}
}

// MARK: VisionDetectedObjectHandlerDelegate

func didReceiveBoundingBoxes(tag: ObservationTag,
_ boxes: [NSRect]) {
let delta = boxes.count - faceViews.count

if abs(delta) > 0 {
DispatchQueue.main.async {
switch boxes.count {
case 0:
self.status.components[.faceDetection] = StatusComponent.faceDetection.defaultValue
default:
self.status.components[.faceDetection] = "\(boxes.count) 😀 detected"
}
}
}

if delta > 0 {
for _ in 0..<delta {
DispatchQueue.main.async {
self.faceViews.append(FaceView(frame: NSRect(),
hiddenFace: self.hideFace))
}
}
} else if delta < 0 {
for _ in 0..<abs(delta) {
DispatchQueue.main.async {
if !self.faceViews.isEmpty { self.faceViews.removeLast() }
}
}
}

if boxes.isEmpty {
resetFaceViews()
return
}

DispatchQueue.main.async {
self.faceViews = self.faceViews.sorted { $0.frame.minX < $1.frame.minX }
}

for (box, view) in zip(boxes, faceViews) {
DispatchQueue.main.async {
view.updateFrame(to: box.scaled(
width: self.cameraView.bounds.width,
height: self.cameraView.bounds.height
)
)
}
}
}

// MARK: VisionClassificationObservationHandlerDelegate

func didReceiveClassificationObservations(tag: ObservationTag,
_ observations: [VNClassificationObservation]) {
let classifications = observations[0...4] // top 4 results
.filter { $0.confidence > 0.3 }
.map { "\($0.identifier) \(($0.confidence * 100.0).rounded())" }

guard let first = classifications.first else {
DispatchQueue.main.async {
self.status.components[.classificationObservation] = StatusComponent.classificationObservation.defaultValue
}

return
}

DispatchQueue.main.async {
self.status.components[.classificationObservation] = String(describing: first)
}
}

// MARK: NSWindowDelegate

func windowDidChangeOcclusionState(_ notification: Notification) {
Expand Down

0 comments on commit 7f28356

Please sign in to comment.