Commit d3b4ccc1 by Daniel Dahan

decoupling views for CaptureView

parent 61a63660
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<document type="com.apple.InterfaceBuilder3.CocoaTouch.Storyboard.XIB" version="3.0" toolsVersion="9060" systemVersion="15B42" targetRuntime="iOS.CocoaTouch" propertyAccessControl="none" useAutolayout="YES" useTraitCollections="YES" initialViewController="BYZ-38-t0r">
<document type="com.apple.InterfaceBuilder3.CocoaTouch.Storyboard.XIB" version="3.0" toolsVersion="9531" systemVersion="15B42" targetRuntime="iOS.CocoaTouch" propertyAccessControl="none" useAutolayout="YES" useTraitCollections="YES" initialViewController="BYZ-38-t0r">
<dependencies>
<deployment identifier="iOS"/>
<plugIn identifier="com.apple.InterfaceBuilder.IBCocoaTouchPlugin" version="9051"/>
<plugIn identifier="com.apple.InterfaceBuilder.IBCocoaTouchPlugin" version="9529"/>
</dependencies>
<scenes>
<!--View Controller-->
......@@ -19,21 +19,17 @@
<subviews>
<button opaque="NO" contentMode="scaleToFill" fixedFrame="YES" contentHorizontalAlignment="center" contentVerticalAlignment="center" buttonType="roundedRect" lineBreakMode="middleTruncation" translatesAutoresizingMaskIntoConstraints="NO" id="g8F-Xx-oIC" customClass="RaisedButton" customModule="MaterialKit">
<rect key="frame" x="107" y="207" width="200" height="65"/>
<animations/>
<state key="normal" title="Button"/>
</button>
<button opaque="NO" contentMode="scaleToFill" fixedFrame="YES" contentHorizontalAlignment="center" contentVerticalAlignment="center" buttonType="roundedRect" lineBreakMode="middleTruncation" translatesAutoresizingMaskIntoConstraints="NO" id="KGx-lb-zep" customClass="FlatButton" customModule="MaterialKit">
<rect key="frame" x="107" y="107" width="200" height="65"/>
<animations/>
<state key="normal" title="Button"/>
</button>
<button opaque="NO" contentMode="scaleToFill" fixedFrame="YES" contentHorizontalAlignment="center" contentVerticalAlignment="center" buttonType="roundedRect" lineBreakMode="middleTruncation" translatesAutoresizingMaskIntoConstraints="NO" id="ZEH-7f-aTd" customClass="FabButton" customModule="MaterialKit">
<rect key="frame" x="175" y="315" width="64" height="64"/>
<animations/>
<color key="backgroundColor" red="1" green="0.0" blue="0.0" alpha="1" colorSpace="calibratedRGB"/>
</button>
</subviews>
<animations/>
<color key="backgroundColor" white="1" alpha="1" colorSpace="custom" customColorSpace="calibratedWhite"/>
</view>
<simulatedScreenMetrics key="simulatedDestinationMetrics" type="retina47"/>
......
......@@ -79,6 +79,7 @@
96D88C731C132ACC00B91418 /* MaterialAnimation.swift in Headers */ = {isa = PBXBuildFile; fileRef = 96D88BFF1C1328D800B91418 /* MaterialAnimation.swift */; settings = {ATTRIBUTES = (Public, ); }; };
96D88C741C132ACC00B91418 /* MaterialBasicAnimation.swift in Headers */ = {isa = PBXBuildFile; fileRef = 96D88C001C1328D800B91418 /* MaterialBasicAnimation.swift */; settings = {ATTRIBUTES = (Public, ); }; };
96D88C751C132AD500B91418 /* NavigationBarView.swift in Headers */ = {isa = PBXBuildFile; fileRef = 96D88C151C1328D800B91418 /* NavigationBarView.swift */; settings = {ATTRIBUTES = (Public, ); }; };
96F367031C20B87D00DD91F4 /* CameraView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 96F367021C20B87D00DD91F4 /* CameraView.swift */; };
/* End PBXBuildFile section */
/* Begin PBXContainerItemProxy section */
......@@ -136,6 +137,7 @@
96D88C1B1C1328D800B91418 /* Roboto-Thin.ttf */ = {isa = PBXFileReference; lastKnownFileType = file; path = "Roboto-Thin.ttf"; sourceTree = "<group>"; };
96D88C1C1C1328D800B91418 /* RobotoFont.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = RobotoFont.swift; sourceTree = "<group>"; };
96D88C1D1C1328D800B91418 /* SideNavigationViewController.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = SideNavigationViewController.swift; sourceTree = "<group>"; };
96F367021C20B87D00DD91F4 /* CameraView.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = CameraView.swift; sourceTree = "<group>"; };
/* End PBXFileReference section */
/* Begin PBXFrameworksBuildPhase section */
......@@ -233,6 +235,7 @@
96D88C491C13292700B91418 /* Capture */ = {
isa = PBXGroup;
children = (
96F367021C20B87D00DD91F4 /* CameraView.swift */,
96D88BF51C1328D800B91418 /* CaptureView.swift */,
96D88BF71C1328D800B91418 /* CapturePreviewView.swift */,
96D88BF81C1328D800B91418 /* CaptureSession.swift */,
......@@ -515,6 +518,7 @@
96D88C3F1C1328D800B91418 /* RaisedButton.swift in Sources */,
96D88C3C1C1328D800B91418 /* MaterialTransitionAnimation.swift in Sources */,
96D88C361C1328D800B91418 /* MaterialPulseView.swift in Sources */,
96F367031C20B87D00DD91F4 /* CameraView.swift in Sources */,
96D88C1E1C1328D800B91418 /* CaptureView.swift in Sources */,
96D88C2D1C1328D800B91418 /* MaterialDepth.swift in Sources */,
96D88C331C1328D800B91418 /* MaterialLabel.swift in Sources */,
......
......@@ -19,152 +19,26 @@
import UIKit
import AVFoundation
@objc(CapturePreviewViewDelegate)
public protocol CapturePreviewViewDelegate : MaterialDelegate {
public class CapturePreviewView : MaterialView {
/**
:name: capturePreviewViewDidTapToFocusAtPoint
:name: layerClass
*/
optional func capturePreviewViewDidTapToFocusAtPoint(capturePreviewView: CapturePreviewView, point: CGPoint)
/**
:name: capturePreviewViewDidTapToExposeAtPoint
*/
optional func capturePreviewViewDidTapToExposeAtPoint(capturePreviewView: CapturePreviewView, point: CGPoint)
/**
:name: capturePreviewViewDidTapToResetAtPoint
*/
optional func capturePreviewViewDidTapToResetAtPoint(capturePreviewView: CapturePreviewView, point: CGPoint)
}
public class CapturePreviewView : MaterialView, UIGestureRecognizerDelegate {
/**
:name: tapToFocusGesture
*/
private var tapToFocusGesture: UITapGestureRecognizer?
/**
:name: tapToExposeGesture
*/
private var tapToExposeGesture: UITapGestureRecognizer?
/**
:name: tapToResetGesture
*/
private var tapToResetGesture: UITapGestureRecognizer?
/**
:name: previewLayer
*/
public private(set) lazy var previewLayer: AVCaptureVideoPreviewLayer = AVCaptureVideoPreviewLayer()
/**
:name: capture
*/
public private(set) lazy var captureSession: CaptureSession = CaptureSession()
/**
:name: tapToFocusEnabled
*/
public var tapToFocusEnabled: Bool {
didSet {
if tapToFocusEnabled {
tapToResetEnabled = true
prepareTapGesture(&tapToFocusGesture, numberOfTapsRequired: 1, numberOfTouchesRequired: 1, selector: "handleTapToFocusGesture:")
if let v: UITapGestureRecognizer = tapToExposeGesture {
tapToFocusGesture!.requireGestureRecognizerToFail(v)
}
} else {
removeTapGesture(&tapToFocusGesture)
}
}
}
/**
:name: tapToExposeEnabled
*/
public var tapToExposeEnabled: Bool {
didSet {
if tapToExposeEnabled {
tapToResetEnabled = true
prepareTapGesture(&tapToExposeGesture, numberOfTapsRequired: 2, numberOfTouchesRequired: 1, selector: "handleTapToExposeGesture:")
if let v: UITapGestureRecognizer = tapToFocusGesture {
v.requireGestureRecognizerToFail(tapToExposeGesture!)
}
} else {
removeTapGesture(&tapToExposeGesture)
}
}
}
/**
:name: tapToResetEnabled
*/
public var tapToResetEnabled: Bool {
didSet {
if tapToResetEnabled {
prepareTapGesture(&tapToResetGesture, numberOfTapsRequired: 2, numberOfTouchesRequired: 2, selector: "handleTapToResetGesture:")
if let v: UITapGestureRecognizer = tapToFocusGesture {
v.requireGestureRecognizerToFail(tapToResetGesture!)
}
if let v: UITapGestureRecognizer = tapToExposeGesture {
v.requireGestureRecognizerToFail(tapToResetGesture!)
}
} else {
removeTapGesture(&tapToResetGesture)
}
}
}
/**
:name: init
*/
public required init?(coder aDecoder: NSCoder) {
tapToFocusEnabled = true
tapToExposeEnabled = true
tapToResetEnabled = true
super.init(coder: aDecoder)
}
/**
:name: init
*/
public override init(frame: CGRect) {
tapToFocusEnabled = true
tapToExposeEnabled = true
tapToResetEnabled = true
super.init(frame: frame)
}
/**
:name: init
*/
public convenience init() {
self.init(frame: CGRectNull)
}
/**
:name: layoutSublayersOfLayer
*/
public override func layoutSublayersOfLayer(layer: CALayer) {
super.layoutSublayersOfLayer(layer)
if self.layer == layer {
layoutPreviewLayer()
}
public override class func layerClass() -> AnyClass {
return AVCaptureVideoPreviewLayer.self
}
/**
:name: captureDevicePointOfInterestForPoint
*/
public func captureDevicePointOfInterestForPoint(point: CGPoint) -> CGPoint {
return previewLayer.captureDevicePointOfInterestForPoint(point)
return (layer as! AVCaptureVideoPreviewLayer).captureDevicePointOfInterestForPoint(point)
}
/**
:name: pointForCaptureDevicePointOfInterest
*/
public func pointForCaptureDevicePointOfInterest(point: CGPoint) -> CGPoint {
return previewLayer.pointForCaptureDevicePointOfInterest(point)
return (layer as! AVCaptureVideoPreviewLayer).pointForCaptureDevicePointOfInterest(point)
}
/**
......@@ -173,79 +47,15 @@ public class CapturePreviewView : MaterialView, UIGestureRecognizerDelegate {
public override func prepareView() {
super.prepareView()
preparePreviewLayer()
tapToFocusEnabled = true
tapToExposeEnabled = true
}
/**
:name: handleTapToFocusGesture
*/
internal func handleTapToFocusGesture(recognizer: UITapGestureRecognizer) {
if tapToFocusEnabled && captureSession.cameraSupportsTapToFocus {
let point: CGPoint = recognizer.locationInView(self)
captureSession.focusAtPoint(captureDevicePointOfInterestForPoint(point))
(delegate as? CapturePreviewViewDelegate)?.capturePreviewViewDidTapToFocusAtPoint?(self, point: point)
}
}
/**
:name: handleTapToExposeGesture
*/
internal func handleTapToExposeGesture(recognizer: UITapGestureRecognizer) {
if tapToExposeEnabled && captureSession.cameraSupportsTapToExpose {
let point: CGPoint = recognizer.locationInView(self)
captureSession.exposeAtPoint(captureDevicePointOfInterestForPoint(point))
(delegate as? CapturePreviewViewDelegate)?.capturePreviewViewDidTapToExposeAtPoint?(self, point: point)
}
}
/**
:name: handleTapToResetGesture
*/
internal func handleTapToResetGesture(recognizer: UITapGestureRecognizer) {
if tapToResetEnabled {
captureSession.resetFocusAndExposureModes()
(delegate as? CapturePreviewViewDelegate)?.capturePreviewViewDidTapToResetAtPoint?(self, point: pointForCaptureDevicePointOfInterest(CGPointMake(0.5, 0.5)))
}
}
/**
:name: preparePreviewLayer
*/
private func preparePreviewLayer() {
previewLayer.session = captureSession.session
visualLayer.addSublayer(previewLayer)
}
/**
:name: layoutPreviewLayer
*/
private func layoutPreviewLayer() {
previewLayer.frame = visualLayer.bounds
previewLayer.position = CGPointMake(width / 2, height / 2)
previewLayer.cornerRadius = visualLayer.cornerRadius
previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill
}
/**
:name: prepareTapGesture
*/
private func prepareTapGesture(inout gesture: UITapGestureRecognizer?, numberOfTapsRequired: Int, numberOfTouchesRequired: Int, selector: Selector) {
removeTapGesture(&gesture)
gesture = UITapGestureRecognizer(target: self, action: selector)
gesture!.delegate = self
gesture!.numberOfTapsRequired = numberOfTapsRequired
gesture!.numberOfTouchesRequired = numberOfTouchesRequired
addGestureRecognizer(gesture!)
}
/**
:name: removeTapToFocusGesture
*/
private func removeTapGesture(inout gesture: UITapGestureRecognizer?) {
if let v: UIGestureRecognizer = gesture {
removeGestureRecognizer(v)
gesture = nil
}
layer.addAnimation(MaterialAnimation.transition(.Fade), forKey: kCATransition)
layer.backgroundColor = MaterialColor.black.CGColor
layer.masksToBounds = true
(layer as! AVCaptureVideoPreviewLayer).videoGravity = AVLayerVideoGravityResizeAspectFill
}
}
\ No newline at end of file
......@@ -123,9 +123,9 @@ public protocol CaptureSessionDelegate {
@objc(CaptureSession)
public class CaptureSession : NSObject, AVCaptureFileOutputRecordingDelegate {
/**
:name: videoQueue
:name: sessionQueue
*/
private lazy var videoQueue: dispatch_queue_t = dispatch_queue_create("io.materialkit.CaptureSession", nil)
private lazy var sessionQueue: dispatch_queue_t = dispatch_queue_create("io.materialkit.CaptureSession", DISPATCH_QUEUE_SERIAL)
/**
:name: activeVideoInput
......@@ -182,15 +182,6 @@ public class CaptureSession : NSObject, AVCaptureFileOutputRecordingDelegate {
}
/**
:name: init
*/
public override init() {
sessionPreset = .PresetHigh
super.init()
prepareSession()
}
/**
:name: inactiveCamera
*/
public var inactiveCamera: AVCaptureDevice? {
......@@ -368,11 +359,20 @@ public class CaptureSession : NSObject, AVCaptureFileOutputRecordingDelegate {
public weak var delegate: CaptureSessionDelegate?
/**
:name: init
*/
public override init() {
sessionPreset = .PresetHigh
super.init()
prepareSession()
}
/**
:name: startSession
*/
public func startSession() {
if !isRunning {
dispatch_async(videoQueue) {
dispatch_async(sessionQueue) {
self.session.startRunning()
}
}
......@@ -383,7 +383,7 @@ public class CaptureSession : NSObject, AVCaptureFileOutputRecordingDelegate {
*/
public func stopSession() {
if isRunning {
dispatch_async(videoQueue) {
dispatch_async(sessionQueue) {
self.session.stopRunning()
}
}
......@@ -542,9 +542,10 @@ public class CaptureSession : NSObject, AVCaptureFileOutputRecordingDelegate {
:name: captureStillImage
*/
public func captureStillImage() {
let connection: AVCaptureConnection = imageOutput.connectionWithMediaType(AVMediaTypeVideo)
connection.videoOrientation = currentVideoOrientation
imageOutput.captureStillImageAsynchronouslyFromConnection(connection) { (sampleBuffer: CMSampleBuffer!, error: NSError!) -> Void in
dispatch_async(sessionQueue) {
let connection: AVCaptureConnection = self.imageOutput.connectionWithMediaType(AVMediaTypeVideo)
connection.videoOrientation = self.currentVideoOrientation
self.imageOutput.captureStillImageAsynchronouslyFromConnection(connection) { (sampleBuffer: CMSampleBuffer!, error: NSError!) -> Void in
if nil == error {
let data: NSData = AVCaptureStillImageOutput.jpegStillImageNSDataRepresentation(sampleBuffer)
self.delegate?.captureStillImageAsynchronously?(self, image: UIImage(data: data)!)
......@@ -553,12 +554,14 @@ public class CaptureSession : NSObject, AVCaptureFileOutputRecordingDelegate {
}
}
}
}
/**
:name: startRecording
*/
public func startRecording() {
if !self.isRecording {
dispatch_async(sessionQueue) {
let connection: AVCaptureConnection = self.movieOutput.connectionWithMediaType(AVMediaTypeVideo)
connection.videoOrientation = self.currentVideoOrientation
connection.preferredVideoStabilizationMode = .Auto
......@@ -580,6 +583,7 @@ public class CaptureSession : NSObject, AVCaptureFileOutputRecordingDelegate {
}
}
}
}
/**
:name: captureOutput
......
......@@ -49,8 +49,45 @@ public func MaterialAnimationFillModeToValue(mode: MaterialAnimationFillMode) ->
}
}
public typealias MaterialAnimationDelayCancelBlock = (cancel : Bool) -> Void
public struct MaterialAnimation {
/**
:name: delay
*/
public static func delay(time: NSTimeInterval, completion: ()-> Void) -> MaterialAnimationDelayCancelBlock? {
func dispatch_later(completion: ()-> Void) {
dispatch_after(dispatch_time(DISPATCH_TIME_NOW, Int64(time * Double(NSEC_PER_SEC))), dispatch_get_main_queue(), completion)
}
var cancelable: MaterialAnimationDelayCancelBlock?
let delayed: MaterialAnimationDelayCancelBlock = { cancel in
if !cancel {
dispatch_async(dispatch_get_main_queue(), completion)
}
cancelable = nil
}
cancelable = delayed
dispatch_later {
cancelable?(cancel: false)
}
return cancelable;
}
/**
:name: delayCancel
*/
public static func delayCancel(completion: MaterialAnimationDelayCancelBlock?) {
completion?(cancel: true)
}
/**
:name: animationDisabled
*/
public static func animationDisabled(animations: (() -> Void)) {
......@@ -81,4 +118,13 @@ public struct MaterialAnimation {
group.timingFunction = CAMediaTimingFunction(name: kCAMediaTimingFunctionEaseInEaseOut)
return group
}
/**
:name: animateWithDelay
*/
public static func animateWithDelay(delay d: CFTimeInterval, duration: CFTimeInterval, animations: (() -> Void), options: UIViewAnimationOptions? = nil, completion: (() -> Void)? = nil) {
delay(d) {
animateWithDuration(duration, animations: animations, options: options, completion: completion)
}
}
}
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment