Commit 71b92d5c by Daniel Dahan

development: added convenience CALayer backgroundColor property called bgColor…

development: added convenience CALayer backgroundColor property called bgColor to avoid dealing with cgColor
parent 29c8b327
...@@ -110,6 +110,7 @@ ...@@ -110,6 +110,7 @@
TargetAttributes = { TargetAttributes = {
96784F6F1D901FB90061C06C = { 96784F6F1D901FB90061C06C = {
CreatedOnToolsVersion = 8.0; CreatedOnToolsVersion = 8.0;
DevelopmentTeam = 9Z76XCNLGL;
ProvisioningStyle = Automatic; ProvisioningStyle = Automatic;
}; };
}; };
...@@ -266,7 +267,7 @@ ...@@ -266,7 +267,7 @@
isa = XCBuildConfiguration; isa = XCBuildConfiguration;
buildSettings = { buildSettings = {
ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
DEVELOPMENT_TEAM = ""; DEVELOPMENT_TEAM = 9Z76XCNLGL;
INFOPLIST_FILE = CaptureController/Info.plist; INFOPLIST_FILE = CaptureController/Info.plist;
LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks"; LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks";
PRODUCT_BUNDLE_IDENTIFIER = io.cosmicmind.CaptureController; PRODUCT_BUNDLE_IDENTIFIER = io.cosmicmind.CaptureController;
...@@ -279,7 +280,7 @@ ...@@ -279,7 +280,7 @@
isa = XCBuildConfiguration; isa = XCBuildConfiguration;
buildSettings = { buildSettings = {
ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
DEVELOPMENT_TEAM = ""; DEVELOPMENT_TEAM = 9Z76XCNLGL;
INFOPLIST_FILE = CaptureController/Info.plist; INFOPLIST_FILE = CaptureController/Info.plist;
LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks"; LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks";
PRODUCT_BUNDLE_IDENTIFIER = io.cosmicmind.CaptureController; PRODUCT_BUNDLE_IDENTIFIER = io.cosmicmind.CaptureController;
......
...@@ -46,7 +46,7 @@ class AppCaptureController: CaptureController { ...@@ -46,7 +46,7 @@ class AppCaptureController: CaptureController {
toolbar.detailLabel.isHidden = true toolbar.detailLabel.isHidden = true
toolbar.detail = "Recording" toolbar.detail = "Recording"
toolbar.detailLabel.textColor = red.accent1 toolbar.detailLabel.textColor = Color.red.accent1
toolbar.leftViews = [switchCamerasButton] toolbar.leftViews = [switchCamerasButton]
toolbar.rightViews = [flashButton] toolbar.rightViews = [flashButton]
...@@ -55,7 +55,7 @@ class AppCaptureController: CaptureController { ...@@ -55,7 +55,7 @@ class AppCaptureController: CaptureController {
private func prepareCaptureButton() { private func prepareCaptureButton() {
captureButton.width = 72 captureButton.width = 72
captureButton.height = 72 captureButton.height = 72
captureButton.backgroundColor = red.darken1.withAlphaComponent(0.3) captureButton.backgroundColor = Color.red.darken1.withAlphaComponent(0.3)
captureButton.borderColor = .white captureButton.borderColor = .white
captureButton.borderWidthPreset = .border3 captureButton.borderWidthPreset = .border3
captureButton.depthPreset = .none captureButton.depthPreset = .none
......
...@@ -49,7 +49,7 @@ class ViewController: UIViewController { ...@@ -49,7 +49,7 @@ class ViewController: UIViewController {
layer = Layer(frame: CGRect(x: (w - d) / 2, y: (h - d) / 2, width: d, height: d)) layer = Layer(frame: CGRect(x: (w - d) / 2, y: (h - d) / 2, width: d, height: d))
layer.depthPreset = .depth3 layer.depthPreset = .depth3
layer.shapePreset = .circle layer.shapePreset = .circle
layer.backgroundColor = Color.white.cgColor layer.bgColor = .white
layer.image = UIImage(named: "CosmicMind") layer.image = UIImage(named: "CosmicMind")
view.layer.addSublayer(layer) view.layer.addSublayer(layer)
......
Pod::Spec.new do |s| Pod::Spec.new do |s|
s.name = 'Material' s.name = 'Material'
s.version = '2.1.2' s.version = '2.2.0'
s.license = 'BSD-3-Clause' s.license = 'BSD-3-Clause'
s.summary = 'Material is an animation and graphics framework that is used to create beautiful applications.' s.summary = 'Material is an animation and graphics framework that is used to create beautiful applications.'
s.homepage = 'http://cosmicmind.io' s.homepage = 'http://cosmicmind.io'
......
...@@ -1204,8 +1204,10 @@ ...@@ -1204,8 +1204,10 @@
CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR; CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
CLANG_WARN_EMPTY_BODY = YES; CLANG_WARN_EMPTY_BODY = YES;
CLANG_WARN_ENUM_CONVERSION = YES; CLANG_WARN_ENUM_CONVERSION = YES;
CLANG_WARN_INFINITE_RECURSION = YES;
CLANG_WARN_INT_CONVERSION = YES; CLANG_WARN_INT_CONVERSION = YES;
CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR; CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
CLANG_WARN_SUSPICIOUS_MOVE = YES;
CLANG_WARN_UNREACHABLE_CODE = YES; CLANG_WARN_UNREACHABLE_CODE = YES;
CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
"CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer"; "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer";
...@@ -1256,8 +1258,10 @@ ...@@ -1256,8 +1258,10 @@
CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR; CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
CLANG_WARN_EMPTY_BODY = YES; CLANG_WARN_EMPTY_BODY = YES;
CLANG_WARN_ENUM_CONVERSION = YES; CLANG_WARN_ENUM_CONVERSION = YES;
CLANG_WARN_INFINITE_RECURSION = YES;
CLANG_WARN_INT_CONVERSION = YES; CLANG_WARN_INT_CONVERSION = YES;
CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR; CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
CLANG_WARN_SUSPICIOUS_MOVE = YES;
CLANG_WARN_UNREACHABLE_CODE = YES; CLANG_WARN_UNREACHABLE_CODE = YES;
CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
"CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer"; "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer";
...@@ -1279,6 +1283,7 @@ ...@@ -1279,6 +1283,7 @@
MTL_ENABLE_DEBUG_INFO = NO; MTL_ENABLE_DEBUG_INFO = NO;
PRODUCT_NAME = Material; PRODUCT_NAME = Material;
SDKROOT = iphoneos; SDKROOT = iphoneos;
SWIFT_OPTIMIZATION_LEVEL = "-Owholemodule";
SWIFT_VERSION = 3.0; SWIFT_VERSION = 3.0;
TARGETED_DEVICE_FAMILY = "1,2"; TARGETED_DEVICE_FAMILY = "1,2";
VALIDATE_PRODUCT = YES; VALIDATE_PRODUCT = YES;
...@@ -1293,7 +1298,7 @@ ...@@ -1293,7 +1298,7 @@
CLANG_ANALYZER_OBJC_UNUSED_IVARS = YES; CLANG_ANALYZER_OBJC_UNUSED_IVARS = YES;
CLANG_ENABLE_MODULES = YES; CLANG_ENABLE_MODULES = YES;
CODE_SIGN_IDENTITY = "iPhone Developer"; CODE_SIGN_IDENTITY = "iPhone Developer";
"CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer"; "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "";
DEFINES_MODULE = YES; DEFINES_MODULE = YES;
DYLIB_COMPATIBILITY_VERSION = 1; DYLIB_COMPATIBILITY_VERSION = 1;
DYLIB_CURRENT_VERSION = 1; DYLIB_CURRENT_VERSION = 1;
...@@ -1318,7 +1323,7 @@ ...@@ -1318,7 +1323,7 @@
CLANG_ANALYZER_OBJC_UNUSED_IVARS = YES; CLANG_ANALYZER_OBJC_UNUSED_IVARS = YES;
CLANG_ENABLE_MODULES = YES; CLANG_ENABLE_MODULES = YES;
CODE_SIGN_IDENTITY = "iPhone Developer"; CODE_SIGN_IDENTITY = "iPhone Developer";
"CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer"; "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "";
DEFINES_MODULE = YES; DEFINES_MODULE = YES;
DYLIB_COMPATIBILITY_VERSION = 1; DYLIB_COMPATIBILITY_VERSION = 1;
DYLIB_CURRENT_VERSION = 1; DYLIB_CURRENT_VERSION = 1;
...@@ -1382,7 +1387,7 @@ ...@@ -1382,7 +1387,7 @@
buildSettings = { buildSettings = {
CLANG_ANALYZER_NONNULL = YES; CLANG_ANALYZER_NONNULL = YES;
CLANG_ENABLE_MODULES = YES; CLANG_ENABLE_MODULES = YES;
CODE_SIGN_IDENTITY = "Mac Developer"; CODE_SIGN_IDENTITY = "";
COMBINE_HIDPI_IMAGES = YES; COMBINE_HIDPI_IMAGES = YES;
DEBUG_INFORMATION_FORMAT = dwarf; DEBUG_INFORMATION_FORMAT = dwarf;
DEFINES_MODULE = YES; DEFINES_MODULE = YES;
...@@ -1407,7 +1412,7 @@ ...@@ -1407,7 +1412,7 @@
buildSettings = { buildSettings = {
CLANG_ANALYZER_NONNULL = YES; CLANG_ANALYZER_NONNULL = YES;
CLANG_ENABLE_MODULES = YES; CLANG_ENABLE_MODULES = YES;
CODE_SIGN_IDENTITY = "Mac Developer"; CODE_SIGN_IDENTITY = "";
COMBINE_HIDPI_IMAGES = YES; COMBINE_HIDPI_IMAGES = YES;
DEFINES_MODULE = YES; DEFINES_MODULE = YES;
DYLIB_COMPATIBILITY_VERSION = 1; DYLIB_COMPATIBILITY_VERSION = 1;
......
...@@ -15,7 +15,7 @@ ...@@ -15,7 +15,7 @@
<key>CFBundlePackageType</key> <key>CFBundlePackageType</key>
<string>FMWK</string> <string>FMWK</string>
<key>CFBundleShortVersionString</key> <key>CFBundleShortVersionString</key>
<string>2.1.2</string> <string>2.2.0</string>
<key>CFBundleSignature</key> <key>CFBundleSignature</key>
<string>????</string> <string>????</string>
<key>CFBundleVersion</key> <key>CFBundleVersion</key>
......
...@@ -79,7 +79,7 @@ open class Button: UIButton { ...@@ -79,7 +79,7 @@ open class Button: UIButton {
@IBInspectable @IBInspectable
open override var backgroundColor: UIColor? { open override var backgroundColor: UIColor? {
didSet { didSet {
layer.backgroundColor = backgroundColor?.cgColor layer.bgColor = backgroundColor
} }
} }
......
...@@ -148,24 +148,27 @@ open class Capture: View, UIGestureRecognizerDelegate { ...@@ -148,24 +148,27 @@ open class Capture: View, UIGestureRecognizerDelegate {
private var tapToResetGesture: UITapGestureRecognizer? private var tapToResetGesture: UITapGestureRecognizer?
/// A reference to the capture mode. /// A reference to the capture mode.
open lazy var captureMode: CaptureMode = .video open var captureMode = CaptureMode.video
/// A boolean indicating whether to enable tap to focus. /// A boolean indicating whether to enable tap to focus.
@IBInspectable @IBInspectable
open var isTapToFocusEnabled = false { open var isTapToFocusEnabled = false {
didSet { didSet {
if isTapToFocusEnabled { guard isTapToFocusEnabled else {
isTapToResetEnabled = true removeTapGesture(gesture: &tapToFocusGesture)
prepareFocusLayer() focusView?.removeFromSuperview()
prepareTapGesture(gesture: &tapToFocusGesture, numberOfTapsRequired: 1, numberOfTouchesRequired: 1, selector: #selector(handleTapToFocusGesture)) focusView = nil
if let v: UITapGestureRecognizer = tapToExposeGesture { return
tapToFocusGesture!.require(toFail: v) }
}
} else { isTapToResetEnabled = true
removeTapGesture(gesture: &tapToFocusGesture)
focusLayer?.removeFromSuperlayer() prepareFocusLayer()
focusLayer = nil prepareTapGesture(gesture: &tapToFocusGesture, numberOfTapsRequired: 1, numberOfTouchesRequired: 1, selector: #selector(handleTapToFocusGesture))
}
if let v = tapToExposeGesture {
tapToFocusGesture!.require(toFail: v)
}
} }
} }
...@@ -173,18 +176,21 @@ open class Capture: View, UIGestureRecognizerDelegate { ...@@ -173,18 +176,21 @@ open class Capture: View, UIGestureRecognizerDelegate {
@IBInspectable @IBInspectable
open var isTapToExposeEnabled = false { open var isTapToExposeEnabled = false {
didSet { didSet {
if isTapToExposeEnabled { guard isTapToExposeEnabled else {
isTapToResetEnabled = true removeTapGesture(gesture: &tapToExposeGesture)
prepareExposureLayer() exposureView?.removeFromSuperview()
prepareTapGesture(gesture: &tapToExposeGesture, numberOfTapsRequired: 2, numberOfTouchesRequired: 1, selector: #selector(handleTapToExposeGesture)) exposureView = nil
if let v: UITapGestureRecognizer = tapToFocusGesture { return
v.require(toFail: tapToExposeGesture!) }
}
} else { isTapToResetEnabled = true
removeTapGesture(gesture: &tapToExposeGesture)
exposureLayer?.removeFromSuperlayer() prepareExposureLayer()
exposureLayer = nil prepareTapGesture(gesture: &tapToExposeGesture, numberOfTapsRequired: 2, numberOfTouchesRequired: 1, selector: #selector(handleTapToExposeGesture))
}
if let v = tapToFocusGesture {
v.require(toFail: tapToExposeGesture!)
}
} }
} }
...@@ -192,20 +198,23 @@ open class Capture: View, UIGestureRecognizerDelegate { ...@@ -192,20 +198,23 @@ open class Capture: View, UIGestureRecognizerDelegate {
@IBInspectable @IBInspectable
open var isTapToResetEnabled = false { open var isTapToResetEnabled = false {
didSet { didSet {
if isTapToResetEnabled { guard isTapToResetEnabled else {
prepareResetLayer() removeTapGesture(gesture: &tapToResetGesture)
prepareTapGesture(gesture: &tapToResetGesture, numberOfTapsRequired: 2, numberOfTouchesRequired: 2, selector: #selector(handleTapToResetGesture)) resetView?.removeFromSuperview()
if let v: UITapGestureRecognizer = tapToFocusGesture { resetView = nil
v.require(toFail: tapToResetGesture!) return
} }
if let v: UITapGestureRecognizer = tapToExposeGesture {
v.require(toFail: tapToResetGesture!) prepareResetLayer()
} prepareTapGesture(gesture: &tapToResetGesture, numberOfTapsRequired: 2, numberOfTouchesRequired: 2, selector: #selector(handleTapToResetGesture))
} else {
removeTapGesture(gesture: &tapToResetGesture) if let v = tapToFocusGesture {
resetLayer?.removeFromSuperlayer() v.require(toFail: tapToResetGesture!)
resetLayer = nil }
}
if let v = tapToExposeGesture {
v.require(toFail: tapToResetGesture!)
}
} }
} }
...@@ -230,17 +239,17 @@ open class Capture: View, UIGestureRecognizerDelegate { ...@@ -230,17 +239,17 @@ open class Capture: View, UIGestureRecognizerDelegate {
/// A reference to the CaptureSession. /// A reference to the CaptureSession.
open internal(set) var session: CaptureSession! open internal(set) var session: CaptureSession!
/// A reference to the focus layer used in focus animations. /// A reference to the focusView used in focus animations.
open internal(set) var focusLayer: Layer? open internal(set) var focusView: UIView?
/// A reference to the exposure layer used in exposure animations. /// A reference to the exposureView used in exposure animations.
open internal(set) var exposureLayer: Layer? open internal(set) var exposureView: UIView?
/// A reference to the reset layer used in reset animations. /// A reference to the resetView used in reset animations.
open internal(set) var resetLayer: Layer? open internal(set) var resetView: UIView?
/// A reference to the cameraButton. /// A reference to the cameraButton.
open var cameraButton: IconButton! { open private(set) var cameraButton: IconButton! {
didSet { didSet {
if let v = cameraButton { if let v = cameraButton {
v.addTarget(self, action: #selector(handleCameraButton), for: .touchUpInside) v.addTarget(self, action: #selector(handleCameraButton), for: .touchUpInside)
...@@ -250,7 +259,7 @@ open class Capture: View, UIGestureRecognizerDelegate { ...@@ -250,7 +259,7 @@ open class Capture: View, UIGestureRecognizerDelegate {
} }
/// A reference to the captureButton. /// A reference to the captureButton.
open var captureButton: FabButton! { open private(set) var captureButton: FabButton! {
didSet { didSet {
if let v = captureButton { if let v = captureButton {
v.addTarget(self, action: #selector(handleCaptureButton), for: .touchUpInside) v.addTarget(self, action: #selector(handleCaptureButton), for: .touchUpInside)
...@@ -261,7 +270,7 @@ open class Capture: View, UIGestureRecognizerDelegate { ...@@ -261,7 +270,7 @@ open class Capture: View, UIGestureRecognizerDelegate {
/// A reference to the videoButton. /// A reference to the videoButton.
open var videoButton: IconButton! { open private(set) var videoButton: IconButton! {
didSet { didSet {
if let v = videoButton { if let v = videoButton {
v.addTarget(self, action: #selector(handleVideoButton), for: .touchUpInside) v.addTarget(self, action: #selector(handleVideoButton), for: .touchUpInside)
...@@ -271,7 +280,7 @@ open class Capture: View, UIGestureRecognizerDelegate { ...@@ -271,7 +280,7 @@ open class Capture: View, UIGestureRecognizerDelegate {
} }
/// A reference to the switchCameraButton. /// A reference to the switchCameraButton.
open var switchCamerasButton: IconButton! { open private(set) var switchCamerasButton: IconButton! {
didSet { didSet {
if let v = switchCamerasButton { if let v = switchCamerasButton {
v.addTarget(self, action: #selector(handleSwitchCamerasButton), for: .touchUpInside) v.addTarget(self, action: #selector(handleSwitchCamerasButton), for: .touchUpInside)
...@@ -280,7 +289,7 @@ open class Capture: View, UIGestureRecognizerDelegate { ...@@ -280,7 +289,7 @@ open class Capture: View, UIGestureRecognizerDelegate {
} }
/// A reference to the flashButton. /// A reference to the flashButton.
open var flashButton: IconButton! { open private(set) var flashButton: IconButton! {
didSet { didSet {
if let v = flashButton { if let v = flashButton {
v.addTarget(self, action: #selector(handleFlashButton), for: .touchUpInside) v.addTarget(self, action: #selector(handleFlashButton), for: .touchUpInside)
...@@ -298,18 +307,21 @@ open class Capture: View, UIGestureRecognizerDelegate { ...@@ -298,18 +307,21 @@ open class Capture: View, UIGestureRecognizerDelegate {
preview.frame = bounds preview.frame = bounds
if let v = cameraButton { if let v = cameraButton {
v.frame.origin.y = bounds.height - contentEdgeInsets.bottom - v.bounds.height v.y = bounds.height - contentEdgeInsets.bottom - v.bounds.height
v.frame.origin.x = contentEdgeInsets.left v.x = contentEdgeInsets.left
} }
if let v = captureButton { if let v = captureButton {
v.frame.origin.y = bounds.height - contentEdgeInsets.bottom - v.bounds.height v.y = bounds.height - contentEdgeInsets.bottom - v.bounds.height
v.frame.origin.x = (bounds.width - v.bounds.width) / 2 v.x = (bounds.width - v.width) / 2
} }
if let v = videoButton {
v.frame.origin.y = bounds.height - contentEdgeInsets.bottom - v.bounds.height if let v = videoButton {
v.frame.origin.x = bounds.width - v.bounds.width - contentEdgeInsets.right v.y = bounds.height - contentEdgeInsets.bottom - v.bounds.height
v.x = bounds.width - v.width - contentEdgeInsets.right
} }
if let v: AVCaptureConnection = (preview.layer as! AVCaptureVideoPreviewLayer).connection {
if let v = (preview.layer as! AVCaptureVideoPreviewLayer).connection {
v.videoOrientation = session.videoOrientation v.videoOrientation = session.videoOrientation
} }
} }
...@@ -324,16 +336,17 @@ open class Capture: View, UIGestureRecognizerDelegate { ...@@ -324,16 +336,17 @@ open class Capture: View, UIGestureRecognizerDelegate {
open override func prepare() { open override func prepare() {
super.prepare() super.prepare()
backgroundColor = .black backgroundColor = .black
isTapToFocusEnabled = true
isTapToExposeEnabled = true
prepareCaptureSession() prepareCaptureSession()
preparePreviewView() preparePreviewView()
prepareCaptureButton() prepareCaptureButton()
prepareCameraButton() prepareCameraButton()
prepareVideoButton() prepareVideoButton()
prepareSwitchCamerasButton() prepareSwitchCamerasButton()
prepareFlashButton() prepareFlashButton()
isTapToFocusEnabled = true
isTapToExposeEnabled = true
} }
/// Reloads the view. /// Reloads the view.
...@@ -363,30 +376,35 @@ open class Capture: View, UIGestureRecognizerDelegate { ...@@ -363,30 +376,35 @@ open class Capture: View, UIGestureRecognizerDelegate {
internal func startTimer() { internal func startTimer() {
timer?.invalidate() timer?.invalidate()
timer = Timer(timeInterval: 0.5, target: self, selector: #selector(updateTimer), userInfo: nil, repeats: true) timer = Timer(timeInterval: 0.5, target: self, selector: #selector(updateTimer), userInfo: nil, repeats: true)
RunLoop.main.add(timer!, forMode: .commonModes)
delegate?.captureDidStartRecordTimer?(capture: self) RunLoop.main.add(timer!, forMode: .commonModes)
delegate?.captureDidStartRecordTimer?(capture: self)
} }
/// Updates the timer when recording. /// Updates the timer when recording.
internal func updateTimer() { internal func updateTimer() {
let duration: CMTime = session.recordedDuration let duration = session.recordedDuration
let time: Double = CMTimeGetSeconds(duration) let time = CMTimeGetSeconds(duration)
let hours: Int = Int(time / 3600) let hours = Int(time / 3600)
let minutes: Int = Int((time / 60).truncatingRemainder(dividingBy: 60)) let minutes = Int((time / 60).truncatingRemainder(dividingBy: 60))
let seconds: Int = Int(time.truncatingRemainder(dividingBy: 60)) let seconds = Int(time.truncatingRemainder(dividingBy: 60))
delegate?.captureDidUpdateRecordTimer?(capture: self, hours: hours, minutes: minutes, seconds: seconds)
delegate?.captureDidUpdateRecordTimer?(capture: self, hours: hours, minutes: minutes, seconds: seconds)
} }
/// Stops the timer when recording. /// Stops the timer when recording.
internal func stopTimer() { internal func stopTimer() {
let duration: CMTime = session.recordedDuration let duration = session.recordedDuration
let time: Double = CMTimeGetSeconds(duration) let time = CMTimeGetSeconds(duration)
let hours: Int = Int(time / 3600) let hours = Int(time / 3600)
let minutes: Int = Int((time / 60).truncatingRemainder(dividingBy: 60)) let minutes = Int((time / 60).truncatingRemainder(dividingBy: 60))
let seconds: Int = Int(time.truncatingRemainder(dividingBy: 60)) let seconds = Int(time.truncatingRemainder(dividingBy: 60))
timer?.invalidate() timer?.invalidate()
timer = nil timer = nil
delegate?.captureDidStopRecordTimer?(capture: self, hours: hours, minutes: minutes, seconds: seconds)
delegate?.captureDidStopRecordTimer?(capture: self, hours: hours, minutes: minutes, seconds: seconds)
} }
/** /**
...@@ -414,17 +432,19 @@ open class Capture: View, UIGestureRecognizerDelegate { ...@@ -414,17 +432,19 @@ open class Capture: View, UIGestureRecognizerDelegate {
*/ */
@objc @objc
internal func handleCaptureButton(button: UIButton) { internal func handleCaptureButton(button: UIButton) {
if .photo == captureMode { switch captureMode {
session.captureStillImage() case .photo:
} else if .video == captureMode { session.captureStillImage()
if session.isRecording { case .video:
session.stopRecording() if session.isRecording {
stopTimer() session.stopRecording()
} else { stopTimer()
session.startRecording() } else {
startTimer() session.startRecording()
} startTimer()
} }
}
delegate?.captureDidPressCaptureButton?(capture: self, button: button) delegate?.captureDidPressCaptureButton?(capture: self, button: button)
} }
...@@ -454,12 +474,14 @@ open class Capture: View, UIGestureRecognizerDelegate { ...@@ -454,12 +474,14 @@ open class Capture: View, UIGestureRecognizerDelegate {
*/ */
@objc @objc
internal func handleTapToFocusGesture(recognizer: UITapGestureRecognizer) { internal func handleTapToFocusGesture(recognizer: UITapGestureRecognizer) {
if isTapToFocusEnabled && session.isFocusPointOfInterestSupported { guard isTapToFocusEnabled && session.isFocusPointOfInterestSupported else {
let point: CGPoint = recognizer.location(in: self) return
session.focus(at: preview.captureDevicePointOfInterestForPoint(point: point)) }
animateTapLayer(layer: focusLayer!, point: point)
delegate?.captureDidTapToFocusAtPoint?(capture: self, point: point) let point: CGPoint = recognizer.location(in: self)
} session.focus(at: preview.captureDevicePointOfInterestForPoint(point: point))
animateTap(view: focusView!, point: point)
delegate?.captureDidTapToFocusAtPoint?(capture: self, point: point)
} }
/** /**
...@@ -468,12 +490,14 @@ open class Capture: View, UIGestureRecognizerDelegate { ...@@ -468,12 +490,14 @@ open class Capture: View, UIGestureRecognizerDelegate {
*/ */
@objc @objc
internal func handleTapToExposeGesture(recognizer: UITapGestureRecognizer) { internal func handleTapToExposeGesture(recognizer: UITapGestureRecognizer) {
if isTapToExposeEnabled && session.isExposurePointOfInterestSupported { guard isTapToExposeEnabled && session.isExposurePointOfInterestSupported else {
let point: CGPoint = recognizer.location(in: self) return
session.expose(at: preview.captureDevicePointOfInterestForPoint(point: point)) }
animateTapLayer(layer: exposureLayer!, point: point)
delegate?.captureDidTapToExposeAtPoint?(capture: self, point: point) let point: CGPoint = recognizer.location(in: self)
} session.expose(at: preview.captureDevicePointOfInterestForPoint(point: point))
animateTap(view: exposureView!, point: point)
delegate?.captureDidTapToExposeAtPoint?(capture: self, point: point)
} }
/** /**
...@@ -482,12 +506,15 @@ open class Capture: View, UIGestureRecognizerDelegate { ...@@ -482,12 +506,15 @@ open class Capture: View, UIGestureRecognizerDelegate {
*/ */
@objc @objc
internal func handleTapToResetGesture(recognizer: UITapGestureRecognizer) { internal func handleTapToResetGesture(recognizer: UITapGestureRecognizer) {
if isTapToResetEnabled { guard isTapToResetEnabled else {
session.reset() return
let point: CGPoint = preview.pointForCaptureDevicePointOfInterest(point: CGPoint(x: 0.5, y: 0.5)) }
animateTapLayer(layer: resetLayer!, point: point)
delegate?.captureDidTapToResetAtPoint?(capture: self, point: point) session.reset()
}
let point: CGPoint = preview.pointForCaptureDevicePointOfInterest(point: CGPoint(x: 0.5, y: 0.5))
animateTap(view: resetView!, point: point)
delegate?.captureDidTapToResetAtPoint?(capture: self, point: point)
} }
/** /**
...@@ -513,10 +540,12 @@ open class Capture: View, UIGestureRecognizerDelegate { ...@@ -513,10 +540,12 @@ open class Capture: View, UIGestureRecognizerDelegate {
- Parameter gesture: An optional UITapGestureRecognizer to remove. - Parameter gesture: An optional UITapGestureRecognizer to remove.
*/ */
private func removeTapGesture(gesture: inout UITapGestureRecognizer?) { private func removeTapGesture(gesture: inout UITapGestureRecognizer?) {
if let v: UIGestureRecognizer = gesture { guard let v = gesture else {
removeGestureRecognizer(v) return
gesture = nil }
}
removeGestureRecognizer(v)
gesture = nil
} }
/// Prepare the session. /// Prepare the session.
...@@ -559,61 +588,69 @@ open class Capture: View, UIGestureRecognizerDelegate { ...@@ -559,61 +588,69 @@ open class Capture: View, UIGestureRecognizerDelegate {
/// Prepares the focusLayer. /// Prepares the focusLayer.
private func prepareFocusLayer() { private func prepareFocusLayer() {
if nil == focusLayer { guard nil == focusView else {
focusLayer = Layer(frame: CGRect(x: 0, y: 0, width: 150, height: 150)) return
focusLayer!.isHidden = true }
focusLayer!.borderWidth = 2
focusLayer!.borderColor = Color.white.cgColor focusView = UIView(frame: CGRect(x: 0, y: 0, width: 150, height: 150))
preview.layer.addSublayer(focusLayer!) focusView!.isHidden = true
} focusView!.borderWidth = 2
focusView!.borderColor = .white
preview.addSubview(focusView!)
} }
/// Prepares the exposureLayer. /// Prepares the exposureLayer.
private func prepareExposureLayer() { private func prepareExposureLayer() {
if nil == exposureLayer { guard nil == exposureView else {
exposureLayer = Layer(frame: CGRect(x: 0, y: 0, width: 150, height: 150)) return
exposureLayer!.isHidden = true }
exposureLayer!.borderWidth = 2
exposureLayer!.borderColor = Color.yellow.darken1.cgColor exposureView = UIView(frame: CGRect(x: 0, y: 0, width: 150, height: 150))
preview.layer.addSublayer(exposureLayer!) exposureView!.isHidden = true
} exposureView!.borderWidth = 2
exposureView!.borderColor = Color.yellow.darken1
preview.addSubview(exposureView!)
} }
/// Prepares the resetLayer. /// Prepares the resetLayer.
private func prepareResetLayer() { private func prepareResetLayer() {
if nil == resetLayer { guard nil == resetView else {
resetLayer = Layer(frame: CGRect(x: 0, y: 0, width: 150, height: 150)) return
resetLayer!.isHidden = true }
resetLayer!.borderWidth = 2
resetLayer!.borderColor = Color.red.accent1.cgColor resetView = UIView(frame: CGRect(x: 0, y: 0, width: 150, height: 150))
preview.layer.addSublayer(resetLayer!) resetView!.isHidden = true
} resetView!.borderWidth = 2
resetView!.borderColor = Color.red.accent1
preview.addSubview(resetView!)
} }
/// Animates the tap and layer. /// Animates the tap and layer.
private func animateTapLayer(layer: Layer, point: CGPoint) { private func animateTap(view: UIView, point: CGPoint) {
Animation.animationDisabled { [weak layer] in // Animation.animationDisabled { [weak layer] in
guard let v = layer else { // guard let v = layer else {
return // return
} // }
v.transform = CATransform3DIdentity // v.transform = CATransform3DIdentity
v.position = point // v.position = point
v.isHidden = false // v.isHidden = false
} // }
Animation.animateWithDuration(duration: 0.25, animations: { [weak layer] in // Animation.animateWithDuration(duration: 0.25, animations: { [weak layer] in
guard let v = layer else { // guard let v = layer else {
return // return
} // }
v.transform = CATransform3DMakeScale(0.5, 0.5, 1) // v.transform = CATransform3DMakeScale(0.5, 0.5, 1)
}) { // }) {
Animation.delay(time: 0.4) { [weak layer] in // Animation.delay(time: 0.4) { [weak layer] in
Animation.animationDisabled { [weak layer] in // Animation.animationDisabled { [weak layer] in
guard let v = layer else { // guard let v = layer else {
return // return
} // }
v.isHidden = true // v.isHidden = true
} // }
} // }
} // }
} }
} }
...@@ -89,11 +89,17 @@ open class CaptureController: ToolbarController, CaptureDelegate, CaptureSession ...@@ -89,11 +89,17 @@ open class CaptureController: ToolbarController, CaptureDelegate, CaptureSession
view.backgroundColor = .black view.backgroundColor = .black
display = .full display = .full
prepareStatusBar()
prepareToolbar() prepareToolbar()
prepareCapture() prepareCapture()
} }
/// Prepares the Toolbar. /// Prepares the statusBar.
private func prepareStatusBar() {
statusBar.backgroundColor = .clear
}
/// Prepares the toolbar.
private func prepareToolbar() { private func prepareToolbar() {
toolbar.backgroundColor = .clear toolbar.backgroundColor = .clear
toolbar.depthPreset = .none toolbar.depthPreset = .none
......
...@@ -70,7 +70,7 @@ open class CapturePreview: View { ...@@ -70,7 +70,7 @@ open class CapturePreview: View {
/// Prepares the previewLayer. /// Prepares the previewLayer.
private func preparePreviewLayer() { private func preparePreviewLayer() {
layer.backgroundColor = Color.black.cgColor layer.bgColor = .black
layer.masksToBounds = true layer.masksToBounds = true
(layer as! AVCaptureVideoPreviewLayer).videoGravity = AVLayerVideoGravityResizeAspectFill (layer as! AVCaptureVideoPreviewLayer).videoGravity = AVLayerVideoGravityResizeAspectFill
} }
......
...@@ -278,6 +278,7 @@ open class CaptureSession: NSObject, AVCaptureFileOutputRecordingDelegate { ...@@ -278,6 +278,7 @@ open class CaptureSession: NSObject, AVCaptureFileOutputRecordingDelegate {
error = NSError(domain: "io.cosmicmind.Material.Capture", code: 0001, userInfo: userInfo) error = NSError(domain: "io.cosmicmind.Material.Capture", code: 0001, userInfo: userInfo)
userInfo[NSUnderlyingErrorKey] = error userInfo[NSUnderlyingErrorKey] = error
} }
if let e = error { if let e = error {
delegate?.sessionFailedWithError?(session: self, error: e) delegate?.sessionFailedWithError?(session: self, error: e)
} }
...@@ -307,6 +308,7 @@ open class CaptureSession: NSObject, AVCaptureFileOutputRecordingDelegate { ...@@ -307,6 +308,7 @@ open class CaptureSession: NSObject, AVCaptureFileOutputRecordingDelegate {
error = NSError(domain: "io.cosmicmind.Material.Capture", code: 0002, userInfo: userInfo) error = NSError(domain: "io.cosmicmind.Material.Capture", code: 0002, userInfo: userInfo)
userInfo[NSUnderlyingErrorKey] = error userInfo[NSUnderlyingErrorKey] = error
} }
if let e = error { if let e = error {
delegate?.sessionFailedWithError?(session: self, error: e) delegate?.sessionFailedWithError?(session: self, error: e)
} }
...@@ -336,7 +338,8 @@ open class CaptureSession: NSObject, AVCaptureFileOutputRecordingDelegate { ...@@ -336,7 +338,8 @@ open class CaptureSession: NSObject, AVCaptureFileOutputRecordingDelegate {
error = NSError(domain: "io.cosmicmind.Material.Capture", code: 0003, userInfo: userInfo) error = NSError(domain: "io.cosmicmind.Material.Capture", code: 0003, userInfo: userInfo)
userInfo[NSUnderlyingErrorKey] = error userInfo[NSUnderlyingErrorKey] = error
} }
if let e: NSError = error {
if let e = error {
delegate?.sessionFailedWithError?(session: self, error: e) delegate?.sessionFailedWithError?(session: self, error: e)
} }
} }
...@@ -372,6 +375,7 @@ open class CaptureSession: NSObject, AVCaptureFileOutputRecordingDelegate { ...@@ -372,6 +375,7 @@ open class CaptureSession: NSObject, AVCaptureFileOutputRecordingDelegate {
public override init() { public override init() {
preset = .presetHigh preset = .presetHigh
super.init() super.init()
prepareSession() prepareSession()
prepareSessionQueue() prepareSessionQueue()
prepareActiveVideoInput() prepareActiveVideoInput()
...@@ -382,45 +386,55 @@ open class CaptureSession: NSObject, AVCaptureFileOutputRecordingDelegate { ...@@ -382,45 +386,55 @@ open class CaptureSession: NSObject, AVCaptureFileOutputRecordingDelegate {
/// Starts the session. /// Starts the session.
open func startSession() { open func startSession() {
if !isRunning { guard !isRunning else {
sessionQueue.async() { [weak self] in return
self?.session.startRunning() }
}
} sessionQueue.async() { [weak self] in
self?.session.startRunning()
}
} }
/// Stops the session. /// Stops the session.
open func stopSession() { open func stopSession() {
if isRunning { guard isRunning else {
sessionQueue.async() { [weak self] in return
self?.session.stopRunning() }
}
} sessionQueue.async() { [weak self] in
self?.session.stopRunning()
}
} }
/// Switches the camera if possible. /// Switches the camera if possible.
open func switchCameras() { open func switchCameras() {
if canSwitchCameras { guard canSwitchCameras else {
do { return
if let v: AVCaptureDevicePosition = position { }
delegate?.sessionWillSwitchCameras?(session: self, position: v)
let videoInput: AVCaptureDeviceInput? = try AVCaptureDeviceInput(device: inactiveCamera!) do {
session.beginConfiguration() guard let v = position else {
session.removeInput(activeVideoInput) return
}
if session.canAddInput(videoInput) {
session.addInput(videoInput) delegate?.sessionWillSwitchCameras?(session: self, position: v)
activeVideoInput = videoInput
} else { let videoInput: AVCaptureDeviceInput? = try AVCaptureDeviceInput(device: inactiveCamera!)
session.addInput(activeVideoInput) session.beginConfiguration()
} session.removeInput(activeVideoInput)
session.commitConfiguration()
delegate?.sessionDidSwitchCameras?(session: self, position: position!) if session.canAddInput(videoInput) {
} session.addInput(videoInput)
} catch let e as NSError { activeVideoInput = videoInput
delegate?.sessionFailedWithError?(session: self, error: e) } else {
} session.addInput(activeVideoInput)
} }
session.commitConfiguration()
delegate?.sessionDidSwitchCameras?(session: self, position: position!)
} catch let e as NSError {
delegate?.sessionFailedWithError?(session: self, error: e)
}
} }
/** /**
...@@ -482,6 +496,7 @@ open class CaptureSession: NSObject, AVCaptureFileOutputRecordingDelegate { ...@@ -482,6 +496,7 @@ open class CaptureSession: NSObject, AVCaptureFileOutputRecordingDelegate {
error = NSError(domain: "io.cosmicmind.Material.Capture", code: 0004, userInfo: userInfo) error = NSError(domain: "io.cosmicmind.Material.Capture", code: 0004, userInfo: userInfo)
userInfo[NSUnderlyingErrorKey] = error userInfo[NSUnderlyingErrorKey] = error
} }
if let e = error { if let e = error {
delegate?.sessionFailedWithError?(session: self, error: e) delegate?.sessionFailedWithError?(session: self, error: e)
} }
...@@ -513,6 +528,7 @@ open class CaptureSession: NSObject, AVCaptureFileOutputRecordingDelegate { ...@@ -513,6 +528,7 @@ open class CaptureSession: NSObject, AVCaptureFileOutputRecordingDelegate {
error = NSError(domain: "io.cosmicmind.Material.Capture", code: 0005, userInfo: userInfo) error = NSError(domain: "io.cosmicmind.Material.Capture", code: 0005, userInfo: userInfo)
userInfo[NSUnderlyingErrorKey] = error userInfo[NSUnderlyingErrorKey] = error
} }
if let e = error { if let e = error {
delegate?.sessionFailedWithError?(session: self, error: e) delegate?.sessionFailedWithError?(session: self, error: e)
} }
...@@ -548,17 +564,20 @@ open class CaptureSession: NSObject, AVCaptureFileOutputRecordingDelegate { ...@@ -548,17 +564,20 @@ open class CaptureSession: NSObject, AVCaptureFileOutputRecordingDelegate {
let canResetFocus: Bool = device.isFocusPointOfInterestSupported && device.isFocusModeSupported(.continuousAutoFocus) let canResetFocus: Bool = device.isFocusPointOfInterestSupported && device.isFocusModeSupported(.continuousAutoFocus)
let canResetExposure: Bool = device.isExposurePointOfInterestSupported && device.isExposureModeSupported(.continuousAutoExposure) let canResetExposure: Bool = device.isExposurePointOfInterestSupported && device.isExposureModeSupported(.continuousAutoExposure)
let centerPoint: CGPoint = CGPoint(x: 0.5, y: 0.5) let centerPoint: CGPoint = CGPoint(x: 0.5, y: 0.5)
do {
do {
try device.lockForConfiguration() try device.lockForConfiguration()
if canResetFocus && focus { if canResetFocus && focus {
device.focusMode = .continuousAutoFocus device.focusMode = .continuousAutoFocus
device.focusPointOfInterest = centerPoint device.focusPointOfInterest = centerPoint
} }
if canResetExposure && exposure {
if canResetExposure && exposure {
device.exposureMode = .continuousAutoExposure device.exposureMode = .continuousAutoExposure
device.exposurePointOfInterest = centerPoint device.exposurePointOfInterest = centerPoint
} }
device.unlockForConfiguration()
device.unlockForConfiguration()
} catch let e as NSError { } catch let e as NSError {
delegate?.sessionFailedWithError?(session: self, error: e) delegate?.sessionFailedWithError?(session: self, error: e)
} }
...@@ -567,40 +586,47 @@ open class CaptureSession: NSObject, AVCaptureFileOutputRecordingDelegate { ...@@ -567,40 +586,47 @@ open class CaptureSession: NSObject, AVCaptureFileOutputRecordingDelegate {
/// Captures a still image. /// Captures a still image.
open func captureStillImage() { open func captureStillImage() {
sessionQueue.async() { [weak self] in sessionQueue.async() { [weak self] in
if let s: CaptureSession = self { guard let s = self else {
if let v: AVCaptureConnection = s.imageOutput.connection(withMediaType: AVMediaTypeVideo) { return
v.videoOrientation = s.videoOrientation }
s.imageOutput.captureStillImageAsynchronously(from: v) { [weak self] (sampleBuffer: CMSampleBuffer?, error: Error?) -> Void in
if let s = self { guard let v = s.imageOutput.connection(withMediaType: AVMediaTypeVideo) else {
var captureError = error return
if nil == captureError { }
let data = AVCaptureStillImageOutput.jpegStillImageNSDataRepresentation(sampleBuffer)!
if let image1 = UIImage(data: data) { v.videoOrientation = s.videoOrientation
if let image2 = image1.adjustOrientation() { s.imageOutput.captureStillImageAsynchronously(from: v) { [weak self] (sampleBuffer: CMSampleBuffer?, error: Error?) -> Void in
s.delegate?.sessionStillImageAsynchronously?(session: s, image: image2) guard let s = self else {
} else { return
var userInfo = [String: Any]() }
userInfo[NSLocalizedDescriptionKey] = "[Material Error: Cannot fix image orientation.]"
userInfo[NSLocalizedFailureReasonErrorKey] = "[Material Error: Cannot fix image orientation.]" var captureError = error
captureError = NSError(domain: "io.cosmicmind.Material.Capture", code: 0006, userInfo: userInfo) if nil == captureError {
userInfo[NSUnderlyingErrorKey] = error let data = AVCaptureStillImageOutput.jpegStillImageNSDataRepresentation(sampleBuffer)!
}
} else { if let image1 = UIImage(data: data) {
var userInfo = [String: Any]() if let image2 = image1.adjustOrientation() {
userInfo[NSLocalizedDescriptionKey] = "[Material Error: Cannot capture image from data.]" s.delegate?.sessionStillImageAsynchronously?(session: s, image: image2)
userInfo[NSLocalizedFailureReasonErrorKey] = "[Material Error: Cannot capture image from data.]" } else {
captureError = NSError(domain: "io.cosmicmind.Material.Capture", code: 0007, userInfo: userInfo) var userInfo = [String: Any]()
userInfo[NSUnderlyingErrorKey] = error userInfo[NSLocalizedDescriptionKey] = "[Material Error: Cannot fix image orientation.]"
} userInfo[NSLocalizedFailureReasonErrorKey] = "[Material Error: Cannot fix image orientation.]"
} captureError = NSError(domain: "io.cosmicmind.Material.Capture", code: 0006, userInfo: userInfo)
userInfo[NSUnderlyingErrorKey] = error
if let e: Error = captureError { }
s.delegate?.sessionStillImageAsynchronouslyFailedWithError?(session: s, error: e) } else {
} var userInfo = [String: Any]()
} userInfo[NSLocalizedDescriptionKey] = "[Material Error: Cannot capture image from data.]"
} userInfo[NSLocalizedFailureReasonErrorKey] = "[Material Error: Cannot capture image from data.]"
} captureError = NSError(domain: "io.cosmicmind.Material.Capture", code: 0007, userInfo: userInfo)
} userInfo[NSUnderlyingErrorKey] = error
}
}
if let e = captureError {
s.delegate?.sessionStillImageAsynchronouslyFailedWithError?(session: s, error: e)
}
}
} }
} }
...@@ -608,37 +634,44 @@ open class CaptureSession: NSObject, AVCaptureFileOutputRecordingDelegate { ...@@ -608,37 +634,44 @@ open class CaptureSession: NSObject, AVCaptureFileOutputRecordingDelegate {
open func startRecording() { open func startRecording() {
if !isRecording { if !isRecording {
sessionQueue.async() { [weak self] in sessionQueue.async() { [weak self] in
if let s: CaptureSession = self { guard let s = self else {
if let v: AVCaptureConnection = s.movieOutput.connection(withMediaType: AVMediaTypeVideo) { return
v.videoOrientation = s.videoOrientation }
v.preferredVideoStabilizationMode = .auto
} if let v = s.movieOutput.connection(withMediaType: AVMediaTypeVideo) {
if let v: AVCaptureDevice = s.activeCamera { v.videoOrientation = s.videoOrientation
if v.isSmoothAutoFocusSupported { v.preferredVideoStabilizationMode = .auto
do { }
try v.lockForConfiguration()
v.isSmoothAutoFocusEnabled = true guard let v = s.activeCamera else {
v.unlockForConfiguration() return
} catch let e as NSError { }
s.delegate?.sessionFailedWithError?(session: s, error: e)
} if v.isSmoothAutoFocusSupported {
} do {
try v.lockForConfiguration()
s.movieOutputURL = s.uniqueURL() v.isSmoothAutoFocusEnabled = true
if let v = s.movieOutputURL { v.unlockForConfiguration()
s.movieOutput.startRecording(toOutputFileURL: v as URL!, recordingDelegate: s) } catch let e as NSError {
} s.delegate?.sessionFailedWithError?(session: s, error: e)
} }
} }
s.movieOutputURL = s.uniqueURL()
if let v = s.movieOutputURL {
s.movieOutput.startRecording(toOutputFileURL: v as URL!, recordingDelegate: s)
}
} }
} }
} }
/// Stops recording. /// Stops recording.
open func stopRecording() { open func stopRecording() {
if isRecording { guard isRecording else {
movieOutput.stopRecording() return
} }
movieOutput.stopRecording()
} }
public func capture(_ captureOutput: AVCaptureFileOutput!, didStartRecordingToOutputFileAt fileURL: URL!, fromConnections connections: [Any]!) { public func capture(_ captureOutput: AVCaptureFileOutput!, didStartRecordingToOutputFileAt fileURL: URL!, fromConnections connections: [Any]!) {
...@@ -665,9 +698,12 @@ open class CaptureSession: NSObject, AVCaptureFileOutputRecordingDelegate { ...@@ -665,9 +698,12 @@ open class CaptureSession: NSObject, AVCaptureFileOutputRecordingDelegate {
private func prepareActiveVideoInput() { private func prepareActiveVideoInput() {
do { do {
activeVideoInput = try AVCaptureDeviceInput(device: AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeVideo)) activeVideoInput = try AVCaptureDeviceInput(device: AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeVideo))
if session.canAddInput(activeVideoInput) {
session.addInput(activeVideoInput) guard session.canAddInput(activeVideoInput) else {
} return
}
session.addInput(activeVideoInput)
} catch let e as NSError { } catch let e as NSError {
delegate?.sessionFailedWithError?(session: self, error: e) delegate?.sessionFailedWithError?(session: self, error: e)
} }
...@@ -677,9 +713,12 @@ open class CaptureSession: NSObject, AVCaptureFileOutputRecordingDelegate { ...@@ -677,9 +713,12 @@ open class CaptureSession: NSObject, AVCaptureFileOutputRecordingDelegate {
private func prepareActiveAudioInput() { private func prepareActiveAudioInput() {
do { do {
activeAudioInput = try AVCaptureDeviceInput(device: AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeAudio)) activeAudioInput = try AVCaptureDeviceInput(device: AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeAudio))
if session.canAddInput(activeAudioInput) {
session.addInput(activeAudioInput) guard session.canAddInput(activeAudioInput) else {
} return
}
session.addInput(activeAudioInput)
} catch let e as NSError { } catch let e as NSError {
delegate?.sessionFailedWithError?(session: self, error: e) delegate?.sessionFailedWithError?(session: self, error: e)
} }
...@@ -688,18 +727,24 @@ open class CaptureSession: NSObject, AVCaptureFileOutputRecordingDelegate { ...@@ -688,18 +727,24 @@ open class CaptureSession: NSObject, AVCaptureFileOutputRecordingDelegate {
/// Prepares the imageOutput. /// Prepares the imageOutput.
private func prepareImageOutput() { private func prepareImageOutput() {
imageOutput = AVCaptureStillImageOutput() imageOutput = AVCaptureStillImageOutput()
if session.canAddOutput(imageOutput) {
imageOutput.outputSettings = [AVVideoCodecKey: AVVideoCodecJPEG] guard session.canAddOutput(imageOutput) else {
session.addOutput(imageOutput) return
} }
imageOutput.outputSettings = [AVVideoCodecKey: AVVideoCodecJPEG]
session.addOutput(imageOutput)
} }
/// Prepares the movieOutput. /// Prepares the movieOutput.
private func prepareMovieOutput() { private func prepareMovieOutput() {
movieOutput = AVCaptureMovieFileOutput() movieOutput = AVCaptureMovieFileOutput()
if session.canAddOutput(movieOutput) {
session.addOutput(movieOutput) guard session.canAddOutput(movieOutput) else {
} return
}
session.addOutput(movieOutput)
} }
/** /**
...@@ -708,7 +753,7 @@ open class CaptureSession: NSObject, AVCaptureFileOutputRecordingDelegate { ...@@ -708,7 +753,7 @@ open class CaptureSession: NSObject, AVCaptureFileOutputRecordingDelegate {
- Returns: An AVCaptureDevice if one exists, or nil otherwise. - Returns: An AVCaptureDevice if one exists, or nil otherwise.
*/ */
private func camera(at position: AVCaptureDevicePosition) -> AVCaptureDevice? { private func camera(at position: AVCaptureDevicePosition) -> AVCaptureDevice? {
let devices: Array<AVCaptureDevice> = AVCaptureDevice.devices(withMediaType: AVMediaTypeVideo) as! Array<AVCaptureDevice> let devices = AVCaptureDevice.devices(withMediaType: AVMediaTypeVideo) as! [AVCaptureDevice]
for device in devices { for device in devices {
if device.position == position { if device.position == position {
return device return device
...@@ -725,8 +770,10 @@ open class CaptureSession: NSObject, AVCaptureFileOutputRecordingDelegate { ...@@ -725,8 +770,10 @@ open class CaptureSession: NSObject, AVCaptureFileOutputRecordingDelegate {
do { do {
let directory = try FileManager.default.url(for: .documentDirectory, in: .userDomainMask, appropriateFor: nil, create: true) let directory = try FileManager.default.url(for: .documentDirectory, in: .userDomainMask, appropriateFor: nil, create: true)
let dateFormatter = DateFormatter() let dateFormatter = DateFormatter()
dateFormatter.dateStyle = .full
dateFormatter.dateStyle = .full
dateFormatter.timeStyle = .full dateFormatter.timeStyle = .full
return directory.appendingPathComponent(dateFormatter.string(from: NSDate() as Date) + ".mov") return directory.appendingPathComponent(dateFormatter.string(from: NSDate() as Date) + ".mov")
} catch let e as NSError { } catch let e as NSError {
delegate?.sessionCreateMovieFileFailedWithError?(session: self, error: e) delegate?.sessionCreateMovieFileFailedWithError?(session: self, error: e)
......
...@@ -186,7 +186,7 @@ open class CollectionReusableView: UICollectionReusableView { ...@@ -186,7 +186,7 @@ open class CollectionReusableView: UICollectionReusableView {
/// A property that accesses the backing layer's background /// A property that accesses the backing layer's background
@IBInspectable open override var backgroundColor: UIColor? { @IBInspectable open override var backgroundColor: UIColor? {
didSet { didSet {
layer.backgroundColor = backgroundColor?.cgColor layer.bgColor = backgroundColor
} }
} }
......
...@@ -193,7 +193,7 @@ open class CollectionViewCell: UICollectionViewCell { ...@@ -193,7 +193,7 @@ open class CollectionViewCell: UICollectionViewCell {
@IBInspectable @IBInspectable
open override var backgroundColor: UIColor? { open override var backgroundColor: UIColor? {
didSet { didSet {
layer.backgroundColor = backgroundColor?.cgColor layer.bgColor = backgroundColor
} }
} }
......
...@@ -248,13 +248,13 @@ extension CALayer { ...@@ -248,13 +248,13 @@ extension CALayer {
} }
} }
/// A UIColor reference to the `backgroundcgColor`. /// A UIColor reference to the `backgroundColor`.
open var color: UIColor? { open var bgColor: UIColor? {
get { get {
return nil == backgroundColor ? nil : UIColor(cgColor: backgroundColor!) return nil == backgroundColor ? nil : UIColor(cgColor: backgroundColor!)
} }
set(value) { set(value) {
backgroundColor = color?.cgColor backgroundColor = bgColor?.cgColor
} }
} }
......
...@@ -81,7 +81,7 @@ internal extension Animation { ...@@ -81,7 +81,7 @@ internal extension Animation {
} }
pLayer.cornerRadius = n / 2 pLayer.cornerRadius = n / 2
pLayer.backgroundColor = pulse.color.withAlphaComponent(pulse.opacity).cgColor pLayer.bgColor = pulse.color.withAlphaComponent(pulse.opacity)
pLayer.transform = CATransform3DMakeAffineTransform(CGAffineTransform(scaleX: 0, y: 0)) pLayer.transform = CATransform3DMakeAffineTransform(CGAffineTransform(scaleX: 0, y: 0))
}) })
......
...@@ -78,7 +78,7 @@ open class TableViewCell: UITableViewCell { ...@@ -78,7 +78,7 @@ open class TableViewCell: UITableViewCell {
@IBInspectable @IBInspectable
open override var backgroundColor: UIColor? { open override var backgroundColor: UIColor? {
didSet { didSet {
layer.backgroundColor = backgroundColor?.cgColor layer.bgColor = backgroundColor
} }
} }
......
...@@ -38,7 +38,7 @@ public class TextView: UITextView { ...@@ -38,7 +38,7 @@ public class TextView: UITextView {
/// A property that accesses the backing layer's background /// A property that accesses the backing layer's background
@IBInspectable public override var backgroundColor: UIColor? { @IBInspectable public override var backgroundColor: UIColor? {
didSet { didSet {
layer.backgroundColor = backgroundColor?.cgColor layer.bgColor = backgroundColor
} }
} }
......
...@@ -120,7 +120,7 @@ open class View: UIView { ...@@ -120,7 +120,7 @@ open class View: UIView {
@IBInspectable @IBInspectable
open override var backgroundColor: UIColor? { open override var backgroundColor: UIColor? {
didSet { didSet {
layer.backgroundColor = backgroundColor?.cgColor layer.bgColor = backgroundColor
} }
} }
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment