-
Notifications
You must be signed in to change notification settings - Fork 1
/
ViewController.swift
162 lines (118 loc) · 5.84 KB
/
ViewController.swift
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
//
// ViewController.swift
// ObjectDetectionSwift
//
// Created by Maitham Dib on 22/02/2016.
// Copyright © 2016 HelloOpenCV. All rights reserved.
//
import UIKit
import AVFoundation
import GLKit
class ViewController: UIViewController, AVCaptureVideoDataOutputSampleBufferDelegate{
override func viewDidLoad() {
super.viewDidLoad()
// Do any additional setup after loading the view, typically from a nib.
if initCamera() {
// If we can initialise camera start the session
mySession.startRunning()
}
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
// Dispose of any resources that can be recreated.
}
@IBOutlet weak var imageView: UIImageView!
// create instances from AV foundation
var mySession: AVCaptureSession!
var myDevice: AVCaptureDevice!
var myOutput: AVCaptureVideoDataOutput!
var myFocus: AVCaptureFocusMode!
//*************************************************************************************Compute Projection Matrix for Iphone 6 camera
// func calculateProjectionMatrix(){
// let projectionMatrix = cameraCalibration.createCameraProjection(myDevice, output:self.imageView , zNear: 5.0, zFar: 1000.0)
// print("\(projectionMatrix)")
// }
//
// Create instance of Image Processing Class
let detectCars = ImageProcessing()
// Function to initate camera and appropriate buffers to prevent lag etc.
func initCamera() -> Bool {
// Create instance of capture Sessions
mySession = AVCaptureSession()
// Set output of session so it is suitable for required output Photo = high quality photo output
mySession.sessionPreset = AVCaptureSessionPreset1280x720
// Returns array of Available devices that phone/system has
let devices = AVCaptureDevice.devices()
// Look through Array and Find the Back Camera and cast it as the AVCapture Device that we will use for object Detection, make sure to check if device array is empty
for device in devices {
if(device.position == AVCaptureDevicePosition.Back){
myDevice = device as! AVCaptureDevice
}
}
if myDevice == nil {
return false
}
myFocus = AVCaptureFocusMode.AutoFocus
// Check to see if we can capture input from the specific Device
var myInput: AVCaptureDeviceInput! = nil
do {
myInput = try AVCaptureDeviceInput(device: myDevice) as AVCaptureDeviceInput
} catch let error {
print(error)
}
// Check to see if we can add input to the specific session so we can write out respective rectangles and circles to show object is recognised
if mySession.canAddInput(myInput) {
mySession.addInput(myInput)
} else {
return false
}
// Create instance of AVcapture Output that we can output to, specify the buffer pixel Format Type Key
myOutput = AVCaptureVideoDataOutput()
myOutput.videoSettings = [ kCVPixelBufferPixelFormatTypeKey: Int(kCVPixelFormatType_32BGRA) ]
// acquire access to device configuration and set minimum and maximum allowed framerate
do {
try myDevice.lockForConfiguration()
myDevice.activeVideoMinFrameDuration = CMTimeMake(1, 30);
myDevice.activeVideoMaxFrameDuration = CMTimeMake(1, 30);
myDevice.unlockForConfiguration()
} catch let error {
print("lock error: \(error)")
return false
}
//If the queue is blocked when new frames are captured, those frames will be automatically dropped at a time determined by the value of the alwaysDiscardsLateVideoFrames property. This allows you to process existing frames on the same queue without having to manage the potential memory usage increases that would otherwise occur when that processing is unable to keep up with the rate of incoming frames.
let queue: dispatch_queue_t = dispatch_queue_create("myqueue", nil)
myOutput.setSampleBufferDelegate(self,queue: queue)
// See above comment, but this is to prevent frames from being stuck to the main Queue
myOutput.alwaysDiscardsLateVideoFrames = true
// Make sure a given output can be added to the session
if mySession.canAddOutput(myOutput) {
mySession.addOutput(myOutput)
} else {
return false
}
// detectCars
for connection in myOutput.connections {
if let conn = connection as? AVCaptureConnection {
if conn.supportsVideoOrientation {
conn.videoOrientation = AVCaptureVideoOrientation.LandscapeLeft
}
}
}
return true
}
// Function to capture Output
func captureOutput(captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, fromConnection connection: AVCaptureConnection!)
{
dispatch_sync(dispatch_get_main_queue(), {
let image = CameraUtil.imageFromSampleBuffer(sampleBuffer)
let image2 = CameraUtil.imageFromSampleBuffer(sampleBuffer)
// Calculate optic Flow and Merge with Image 2.
let mergedImage = self.detectCars.detectLanesGetOpticFlow(image);
// let carImage = self.detectCars.recognizeCars(detectedVanishingPoint)
// let addedLines = self.detectCars.carryOutHough(image)
//output the CarImage to the ImageView to Show User
// self.imageView.contentMode = .ScaleAspectFill
self.imageView.image = mergedImage
});
}
}