Commit 71b92d5c by Daniel Dahan

development: added convenience CALayer backgroundColor property called bgColor…

development: added convenience CALayer backgroundColor property called bgColor to avoid dealing with cgColor
parent 29c8b327
......@@ -110,6 +110,7 @@
TargetAttributes = {
96784F6F1D901FB90061C06C = {
CreatedOnToolsVersion = 8.0;
DevelopmentTeam = 9Z76XCNLGL;
ProvisioningStyle = Automatic;
};
};
......@@ -266,7 +267,7 @@
isa = XCBuildConfiguration;
buildSettings = {
ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
DEVELOPMENT_TEAM = "";
DEVELOPMENT_TEAM = 9Z76XCNLGL;
INFOPLIST_FILE = CaptureController/Info.plist;
LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks";
PRODUCT_BUNDLE_IDENTIFIER = io.cosmicmind.CaptureController;
......@@ -279,7 +280,7 @@
isa = XCBuildConfiguration;
buildSettings = {
ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
DEVELOPMENT_TEAM = "";
DEVELOPMENT_TEAM = 9Z76XCNLGL;
INFOPLIST_FILE = CaptureController/Info.plist;
LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks";
PRODUCT_BUNDLE_IDENTIFIER = io.cosmicmind.CaptureController;
......
......@@ -46,7 +46,7 @@ class AppCaptureController: CaptureController {
toolbar.detailLabel.isHidden = true
toolbar.detail = "Recording"
toolbar.detailLabel.textColor = red.accent1
toolbar.detailLabel.textColor = Color.red.accent1
toolbar.leftViews = [switchCamerasButton]
toolbar.rightViews = [flashButton]
......@@ -55,7 +55,7 @@ class AppCaptureController: CaptureController {
private func prepareCaptureButton() {
captureButton.width = 72
captureButton.height = 72
captureButton.backgroundColor = red.darken1.withAlphaComponent(0.3)
captureButton.backgroundColor = Color.red.darken1.withAlphaComponent(0.3)
captureButton.borderColor = .white
captureButton.borderWidthPreset = .border3
captureButton.depthPreset = .none
......
......@@ -49,7 +49,7 @@ class ViewController: UIViewController {
layer = Layer(frame: CGRect(x: (w - d) / 2, y: (h - d) / 2, width: d, height: d))
layer.depthPreset = .depth3
layer.shapePreset = .circle
layer.backgroundColor = Color.white.cgColor
layer.bgColor = .white
layer.image = UIImage(named: "CosmicMind")
view.layer.addSublayer(layer)
......
Pod::Spec.new do |s|
s.name = 'Material'
s.version = '2.1.2'
s.version = '2.2.0'
s.license = 'BSD-3-Clause'
s.summary = 'Material is an animation and graphics framework that is used to create beautiful applications.'
s.homepage = 'http://cosmicmind.io'
......
......@@ -1204,8 +1204,10 @@
CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
CLANG_WARN_EMPTY_BODY = YES;
CLANG_WARN_ENUM_CONVERSION = YES;
CLANG_WARN_INFINITE_RECURSION = YES;
CLANG_WARN_INT_CONVERSION = YES;
CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
CLANG_WARN_SUSPICIOUS_MOVE = YES;
CLANG_WARN_UNREACHABLE_CODE = YES;
CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
"CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer";
......@@ -1256,8 +1258,10 @@
CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
CLANG_WARN_EMPTY_BODY = YES;
CLANG_WARN_ENUM_CONVERSION = YES;
CLANG_WARN_INFINITE_RECURSION = YES;
CLANG_WARN_INT_CONVERSION = YES;
CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
CLANG_WARN_SUSPICIOUS_MOVE = YES;
CLANG_WARN_UNREACHABLE_CODE = YES;
CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
"CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer";
......@@ -1279,6 +1283,7 @@
MTL_ENABLE_DEBUG_INFO = NO;
PRODUCT_NAME = Material;
SDKROOT = iphoneos;
SWIFT_OPTIMIZATION_LEVEL = "-Owholemodule";
SWIFT_VERSION = 3.0;
TARGETED_DEVICE_FAMILY = "1,2";
VALIDATE_PRODUCT = YES;
......@@ -1293,7 +1298,7 @@
CLANG_ANALYZER_OBJC_UNUSED_IVARS = YES;
CLANG_ENABLE_MODULES = YES;
CODE_SIGN_IDENTITY = "iPhone Developer";
"CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer";
"CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "";
DEFINES_MODULE = YES;
DYLIB_COMPATIBILITY_VERSION = 1;
DYLIB_CURRENT_VERSION = 1;
......@@ -1318,7 +1323,7 @@
CLANG_ANALYZER_OBJC_UNUSED_IVARS = YES;
CLANG_ENABLE_MODULES = YES;
CODE_SIGN_IDENTITY = "iPhone Developer";
"CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer";
"CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "";
DEFINES_MODULE = YES;
DYLIB_COMPATIBILITY_VERSION = 1;
DYLIB_CURRENT_VERSION = 1;
......@@ -1382,7 +1387,7 @@
buildSettings = {
CLANG_ANALYZER_NONNULL = YES;
CLANG_ENABLE_MODULES = YES;
CODE_SIGN_IDENTITY = "Mac Developer";
CODE_SIGN_IDENTITY = "";
COMBINE_HIDPI_IMAGES = YES;
DEBUG_INFORMATION_FORMAT = dwarf;
DEFINES_MODULE = YES;
......@@ -1407,7 +1412,7 @@
buildSettings = {
CLANG_ANALYZER_NONNULL = YES;
CLANG_ENABLE_MODULES = YES;
CODE_SIGN_IDENTITY = "Mac Developer";
CODE_SIGN_IDENTITY = "";
COMBINE_HIDPI_IMAGES = YES;
DEFINES_MODULE = YES;
DYLIB_COMPATIBILITY_VERSION = 1;
......
......@@ -15,7 +15,7 @@
<key>CFBundlePackageType</key>
<string>FMWK</string>
<key>CFBundleShortVersionString</key>
<string>2.1.2</string>
<string>2.2.0</string>
<key>CFBundleSignature</key>
<string>????</string>
<key>CFBundleVersion</key>
......
......@@ -79,7 +79,7 @@ open class Button: UIButton {
@IBInspectable
open override var backgroundColor: UIColor? {
didSet {
layer.backgroundColor = backgroundColor?.cgColor
layer.bgColor = backgroundColor
}
}
......
......@@ -148,24 +148,27 @@ open class Capture: View, UIGestureRecognizerDelegate {
private var tapToResetGesture: UITapGestureRecognizer?
/// A reference to the capture mode.
open lazy var captureMode: CaptureMode = .video
open var captureMode = CaptureMode.video
/// A boolean indicating whether to enable tap to focus.
@IBInspectable
open var isTapToFocusEnabled = false {
didSet {
if isTapToFocusEnabled {
isTapToResetEnabled = true
prepareFocusLayer()
prepareTapGesture(gesture: &tapToFocusGesture, numberOfTapsRequired: 1, numberOfTouchesRequired: 1, selector: #selector(handleTapToFocusGesture))
if let v: UITapGestureRecognizer = tapToExposeGesture {
tapToFocusGesture!.require(toFail: v)
}
} else {
removeTapGesture(gesture: &tapToFocusGesture)
focusLayer?.removeFromSuperlayer()
focusLayer = nil
}
guard isTapToFocusEnabled else {
removeTapGesture(gesture: &tapToFocusGesture)
focusView?.removeFromSuperview()
focusView = nil
return
}
isTapToResetEnabled = true
prepareFocusLayer()
prepareTapGesture(gesture: &tapToFocusGesture, numberOfTapsRequired: 1, numberOfTouchesRequired: 1, selector: #selector(handleTapToFocusGesture))
if let v = tapToExposeGesture {
tapToFocusGesture!.require(toFail: v)
}
}
}
......@@ -173,18 +176,21 @@ open class Capture: View, UIGestureRecognizerDelegate {
@IBInspectable
open var isTapToExposeEnabled = false {
didSet {
if isTapToExposeEnabled {
isTapToResetEnabled = true
prepareExposureLayer()
prepareTapGesture(gesture: &tapToExposeGesture, numberOfTapsRequired: 2, numberOfTouchesRequired: 1, selector: #selector(handleTapToExposeGesture))
if let v: UITapGestureRecognizer = tapToFocusGesture {
v.require(toFail: tapToExposeGesture!)
}
} else {
removeTapGesture(gesture: &tapToExposeGesture)
exposureLayer?.removeFromSuperlayer()
exposureLayer = nil
}
guard isTapToExposeEnabled else {
removeTapGesture(gesture: &tapToExposeGesture)
exposureView?.removeFromSuperview()
exposureView = nil
return
}
isTapToResetEnabled = true
prepareExposureLayer()
prepareTapGesture(gesture: &tapToExposeGesture, numberOfTapsRequired: 2, numberOfTouchesRequired: 1, selector: #selector(handleTapToExposeGesture))
if let v = tapToFocusGesture {
v.require(toFail: tapToExposeGesture!)
}
}
}
......@@ -192,20 +198,23 @@ open class Capture: View, UIGestureRecognizerDelegate {
@IBInspectable
open var isTapToResetEnabled = false {
didSet {
if isTapToResetEnabled {
prepareResetLayer()
prepareTapGesture(gesture: &tapToResetGesture, numberOfTapsRequired: 2, numberOfTouchesRequired: 2, selector: #selector(handleTapToResetGesture))
if let v: UITapGestureRecognizer = tapToFocusGesture {
v.require(toFail: tapToResetGesture!)
}
if let v: UITapGestureRecognizer = tapToExposeGesture {
v.require(toFail: tapToResetGesture!)
}
} else {
removeTapGesture(gesture: &tapToResetGesture)
resetLayer?.removeFromSuperlayer()
resetLayer = nil
}
guard isTapToResetEnabled else {
removeTapGesture(gesture: &tapToResetGesture)
resetView?.removeFromSuperview()
resetView = nil
return
}
prepareResetLayer()
prepareTapGesture(gesture: &tapToResetGesture, numberOfTapsRequired: 2, numberOfTouchesRequired: 2, selector: #selector(handleTapToResetGesture))
if let v = tapToFocusGesture {
v.require(toFail: tapToResetGesture!)
}
if let v = tapToExposeGesture {
v.require(toFail: tapToResetGesture!)
}
}
}
......@@ -230,17 +239,17 @@ open class Capture: View, UIGestureRecognizerDelegate {
/// A reference to the CaptureSession.
open internal(set) var session: CaptureSession!
/// A reference to the focus layer used in focus animations.
open internal(set) var focusLayer: Layer?
/// A reference to the focusView used in focus animations.
open internal(set) var focusView: UIView?
/// A reference to the exposure layer used in exposure animations.
open internal(set) var exposureLayer: Layer?
/// A reference to the exposureView used in exposure animations.
open internal(set) var exposureView: UIView?
/// A reference to the reset layer used in reset animations.
open internal(set) var resetLayer: Layer?
/// A reference to the resetView used in reset animations.
open internal(set) var resetView: UIView?
/// A reference to the cameraButton.
open var cameraButton: IconButton! {
open private(set) var cameraButton: IconButton! {
didSet {
if let v = cameraButton {
v.addTarget(self, action: #selector(handleCameraButton), for: .touchUpInside)
......@@ -250,7 +259,7 @@ open class Capture: View, UIGestureRecognizerDelegate {
}
/// A reference to the captureButton.
open var captureButton: FabButton! {
open private(set) var captureButton: FabButton! {
didSet {
if let v = captureButton {
v.addTarget(self, action: #selector(handleCaptureButton), for: .touchUpInside)
......@@ -261,7 +270,7 @@ open class Capture: View, UIGestureRecognizerDelegate {
/// A reference to the videoButton.
open var videoButton: IconButton! {
open private(set) var videoButton: IconButton! {
didSet {
if let v = videoButton {
v.addTarget(self, action: #selector(handleVideoButton), for: .touchUpInside)
......@@ -271,7 +280,7 @@ open class Capture: View, UIGestureRecognizerDelegate {
}
/// A reference to the switchCameraButton.
open var switchCamerasButton: IconButton! {
open private(set) var switchCamerasButton: IconButton! {
didSet {
if let v = switchCamerasButton {
v.addTarget(self, action: #selector(handleSwitchCamerasButton), for: .touchUpInside)
......@@ -280,7 +289,7 @@ open class Capture: View, UIGestureRecognizerDelegate {
}
/// A reference to the flashButton.
open var flashButton: IconButton! {
open private(set) var flashButton: IconButton! {
didSet {
if let v = flashButton {
v.addTarget(self, action: #selector(handleFlashButton), for: .touchUpInside)
......@@ -298,18 +307,21 @@ open class Capture: View, UIGestureRecognizerDelegate {
preview.frame = bounds
if let v = cameraButton {
v.frame.origin.y = bounds.height - contentEdgeInsets.bottom - v.bounds.height
v.frame.origin.x = contentEdgeInsets.left
v.y = bounds.height - contentEdgeInsets.bottom - v.bounds.height
v.x = contentEdgeInsets.left
}
if let v = captureButton {
v.frame.origin.y = bounds.height - contentEdgeInsets.bottom - v.bounds.height
v.frame.origin.x = (bounds.width - v.bounds.width) / 2
v.y = bounds.height - contentEdgeInsets.bottom - v.bounds.height
v.x = (bounds.width - v.width) / 2
}
if let v = videoButton {
v.frame.origin.y = bounds.height - contentEdgeInsets.bottom - v.bounds.height
v.frame.origin.x = bounds.width - v.bounds.width - contentEdgeInsets.right
if let v = videoButton {
v.y = bounds.height - contentEdgeInsets.bottom - v.bounds.height
v.x = bounds.width - v.width - contentEdgeInsets.right
}
if let v: AVCaptureConnection = (preview.layer as! AVCaptureVideoPreviewLayer).connection {
if let v = (preview.layer as! AVCaptureVideoPreviewLayer).connection {
v.videoOrientation = session.videoOrientation
}
}
......@@ -324,16 +336,17 @@ open class Capture: View, UIGestureRecognizerDelegate {
open override func prepare() {
super.prepare()
backgroundColor = .black
isTapToFocusEnabled = true
isTapToExposeEnabled = true
prepareCaptureSession()
preparePreviewView()
preparePreviewView()
prepareCaptureButton()
prepareCameraButton()
prepareVideoButton()
prepareSwitchCamerasButton()
prepareFlashButton()
isTapToFocusEnabled = true
isTapToExposeEnabled = true
}
/// Reloads the view.
......@@ -363,30 +376,35 @@ open class Capture: View, UIGestureRecognizerDelegate {
internal func startTimer() {
timer?.invalidate()
timer = Timer(timeInterval: 0.5, target: self, selector: #selector(updateTimer), userInfo: nil, repeats: true)
RunLoop.main.add(timer!, forMode: .commonModes)
delegate?.captureDidStartRecordTimer?(capture: self)
RunLoop.main.add(timer!, forMode: .commonModes)
delegate?.captureDidStartRecordTimer?(capture: self)
}
/// Updates the timer when recording.
internal func updateTimer() {
let duration: CMTime = session.recordedDuration
let time: Double = CMTimeGetSeconds(duration)
let hours: Int = Int(time / 3600)
let minutes: Int = Int((time / 60).truncatingRemainder(dividingBy: 60))
let seconds: Int = Int(time.truncatingRemainder(dividingBy: 60))
delegate?.captureDidUpdateRecordTimer?(capture: self, hours: hours, minutes: minutes, seconds: seconds)
let duration = session.recordedDuration
let time = CMTimeGetSeconds(duration)
let hours = Int(time / 3600)
let minutes = Int((time / 60).truncatingRemainder(dividingBy: 60))
let seconds = Int(time.truncatingRemainder(dividingBy: 60))
delegate?.captureDidUpdateRecordTimer?(capture: self, hours: hours, minutes: minutes, seconds: seconds)
}
/// Stops the timer when recording.
internal func stopTimer() {
let duration: CMTime = session.recordedDuration
let time: Double = CMTimeGetSeconds(duration)
let hours: Int = Int(time / 3600)
let minutes: Int = Int((time / 60).truncatingRemainder(dividingBy: 60))
let seconds: Int = Int(time.truncatingRemainder(dividingBy: 60))
let duration = session.recordedDuration
let time = CMTimeGetSeconds(duration)
let hours = Int(time / 3600)
let minutes = Int((time / 60).truncatingRemainder(dividingBy: 60))
let seconds = Int(time.truncatingRemainder(dividingBy: 60))
timer?.invalidate()
timer = nil
delegate?.captureDidStopRecordTimer?(capture: self, hours: hours, minutes: minutes, seconds: seconds)
delegate?.captureDidStopRecordTimer?(capture: self, hours: hours, minutes: minutes, seconds: seconds)
}
/**
......@@ -414,17 +432,19 @@ open class Capture: View, UIGestureRecognizerDelegate {
*/
@objc
internal func handleCaptureButton(button: UIButton) {
if .photo == captureMode {
session.captureStillImage()
} else if .video == captureMode {
if session.isRecording {
session.stopRecording()
stopTimer()
} else {
session.startRecording()
startTimer()
}
}
switch captureMode {
case .photo:
session.captureStillImage()
case .video:
if session.isRecording {
session.stopRecording()
stopTimer()
} else {
session.startRecording()
startTimer()
}
}
delegate?.captureDidPressCaptureButton?(capture: self, button: button)
}
......@@ -454,12 +474,14 @@ open class Capture: View, UIGestureRecognizerDelegate {
*/
@objc
internal func handleTapToFocusGesture(recognizer: UITapGestureRecognizer) {
if isTapToFocusEnabled && session.isFocusPointOfInterestSupported {
let point: CGPoint = recognizer.location(in: self)
session.focus(at: preview.captureDevicePointOfInterestForPoint(point: point))
animateTapLayer(layer: focusLayer!, point: point)
delegate?.captureDidTapToFocusAtPoint?(capture: self, point: point)
}
guard isTapToFocusEnabled && session.isFocusPointOfInterestSupported else {
return
}
let point: CGPoint = recognizer.location(in: self)
session.focus(at: preview.captureDevicePointOfInterestForPoint(point: point))
animateTap(view: focusView!, point: point)
delegate?.captureDidTapToFocusAtPoint?(capture: self, point: point)
}
/**
......@@ -468,12 +490,14 @@ open class Capture: View, UIGestureRecognizerDelegate {
*/
@objc
internal func handleTapToExposeGesture(recognizer: UITapGestureRecognizer) {
if isTapToExposeEnabled && session.isExposurePointOfInterestSupported {
let point: CGPoint = recognizer.location(in: self)
session.expose(at: preview.captureDevicePointOfInterestForPoint(point: point))
animateTapLayer(layer: exposureLayer!, point: point)
delegate?.captureDidTapToExposeAtPoint?(capture: self, point: point)
}
guard isTapToExposeEnabled && session.isExposurePointOfInterestSupported else {
return
}
let point: CGPoint = recognizer.location(in: self)
session.expose(at: preview.captureDevicePointOfInterestForPoint(point: point))
animateTap(view: exposureView!, point: point)
delegate?.captureDidTapToExposeAtPoint?(capture: self, point: point)
}
/**
......@@ -482,12 +506,15 @@ open class Capture: View, UIGestureRecognizerDelegate {
*/
@objc
internal func handleTapToResetGesture(recognizer: UITapGestureRecognizer) {
if isTapToResetEnabled {
session.reset()
let point: CGPoint = preview.pointForCaptureDevicePointOfInterest(point: CGPoint(x: 0.5, y: 0.5))
animateTapLayer(layer: resetLayer!, point: point)
delegate?.captureDidTapToResetAtPoint?(capture: self, point: point)
}
guard isTapToResetEnabled else {
return
}
session.reset()
let point: CGPoint = preview.pointForCaptureDevicePointOfInterest(point: CGPoint(x: 0.5, y: 0.5))
animateTap(view: resetView!, point: point)
delegate?.captureDidTapToResetAtPoint?(capture: self, point: point)
}
/**
......@@ -513,10 +540,12 @@ open class Capture: View, UIGestureRecognizerDelegate {
- Parameter gesture: An optional UITapGestureRecognizer to remove.
*/
private func removeTapGesture(gesture: inout UITapGestureRecognizer?) {
if let v: UIGestureRecognizer = gesture {
removeGestureRecognizer(v)
gesture = nil
}
guard let v = gesture else {
return
}
removeGestureRecognizer(v)
gesture = nil
}
/// Prepare the session.
......@@ -559,61 +588,69 @@ open class Capture: View, UIGestureRecognizerDelegate {
/// Prepares the focusLayer.
private func prepareFocusLayer() {
if nil == focusLayer {
focusLayer = Layer(frame: CGRect(x: 0, y: 0, width: 150, height: 150))
focusLayer!.isHidden = true
focusLayer!.borderWidth = 2
focusLayer!.borderColor = Color.white.cgColor
preview.layer.addSublayer(focusLayer!)
}
guard nil == focusView else {
return
}
focusView = UIView(frame: CGRect(x: 0, y: 0, width: 150, height: 150))
focusView!.isHidden = true
focusView!.borderWidth = 2
focusView!.borderColor = .white
preview.addSubview(focusView!)
}
/// Prepares the exposureLayer.
private func prepareExposureLayer() {
if nil == exposureLayer {
exposureLayer = Layer(frame: CGRect(x: 0, y: 0, width: 150, height: 150))
exposureLayer!.isHidden = true
exposureLayer!.borderWidth = 2
exposureLayer!.borderColor = Color.yellow.darken1.cgColor
preview.layer.addSublayer(exposureLayer!)
}
guard nil == exposureView else {
return
}
exposureView = UIView(frame: CGRect(x: 0, y: 0, width: 150, height: 150))
exposureView!.isHidden = true
exposureView!.borderWidth = 2
exposureView!.borderColor = Color.yellow.darken1
preview.addSubview(exposureView!)
}
/// Prepares the resetLayer.
private func prepareResetLayer() {
if nil == resetLayer {
resetLayer = Layer(frame: CGRect(x: 0, y: 0, width: 150, height: 150))
resetLayer!.isHidden = true
resetLayer!.borderWidth = 2
resetLayer!.borderColor = Color.red.accent1.cgColor
preview.layer.addSublayer(resetLayer!)
}
guard nil == resetView else {
return
}
resetView = UIView(frame: CGRect(x: 0, y: 0, width: 150, height: 150))
resetView!.isHidden = true
resetView!.borderWidth = 2
resetView!.borderColor = Color.red.accent1
preview.addSubview(resetView!)
}
/// Animates the tap and layer.
private func animateTapLayer(layer: Layer, point: CGPoint) {
Animation.animationDisabled { [weak layer] in
guard let v = layer else {
return
}
v.transform = CATransform3DIdentity
v.position = point
v.isHidden = false
}
Animation.animateWithDuration(duration: 0.25, animations: { [weak layer] in
guard let v = layer else {
return
}
v.transform = CATransform3DMakeScale(0.5, 0.5, 1)
}) {
Animation.delay(time: 0.4) { [weak layer] in
Animation.animationDisabled { [weak layer] in
guard let v = layer else {
return
}
v.isHidden = true
}
}
}
private func animateTap(view: UIView, point: CGPoint) {
// Animation.animationDisabled { [weak layer] in
// guard let v = layer else {
// return
// }
// v.transform = CATransform3DIdentity
// v.position = point
// v.isHidden = false
// }
// Animation.animateWithDuration(duration: 0.25, animations: { [weak layer] in
// guard let v = layer else {
// return
// }
// v.transform = CATransform3DMakeScale(0.5, 0.5, 1)
// }) {
// Animation.delay(time: 0.4) { [weak layer] in
// Animation.animationDisabled { [weak layer] in
// guard let v = layer else {
// return
// }
// v.isHidden = true
// }
// }
// }
}
}
......@@ -89,11 +89,17 @@ open class CaptureController: ToolbarController, CaptureDelegate, CaptureSession
view.backgroundColor = .black
display = .full
prepareStatusBar()
prepareToolbar()
prepareCapture()
}
/// Prepares the Toolbar.
/// Prepares the statusBar.
private func prepareStatusBar() {
statusBar.backgroundColor = .clear
}
/// Prepares the toolbar.
private func prepareToolbar() {
toolbar.backgroundColor = .clear
toolbar.depthPreset = .none
......
......@@ -70,7 +70,7 @@ open class CapturePreview: View {
/// Prepares the previewLayer.
private func preparePreviewLayer() {
layer.backgroundColor = Color.black.cgColor
layer.bgColor = .black
layer.masksToBounds = true
(layer as! AVCaptureVideoPreviewLayer).videoGravity = AVLayerVideoGravityResizeAspectFill
}
......
......@@ -278,6 +278,7 @@ open class CaptureSession: NSObject, AVCaptureFileOutputRecordingDelegate {
error = NSError(domain: "io.cosmicmind.Material.Capture", code: 0001, userInfo: userInfo)
userInfo[NSUnderlyingErrorKey] = error
}
if let e = error {
delegate?.sessionFailedWithError?(session: self, error: e)
}
......@@ -307,6 +308,7 @@ open class CaptureSession: NSObject, AVCaptureFileOutputRecordingDelegate {
error = NSError(domain: "io.cosmicmind.Material.Capture", code: 0002, userInfo: userInfo)
userInfo[NSUnderlyingErrorKey] = error
}
if let e = error {
delegate?.sessionFailedWithError?(session: self, error: e)
}
......@@ -336,7 +338,8 @@ open class CaptureSession: NSObject, AVCaptureFileOutputRecordingDelegate {
error = NSError(domain: "io.cosmicmind.Material.Capture", code: 0003, userInfo: userInfo)
userInfo[NSUnderlyingErrorKey] = error
}
if let e: NSError = error {
if let e = error {
delegate?.sessionFailedWithError?(session: self, error: e)
}
}
......@@ -372,6 +375,7 @@ open class CaptureSession: NSObject, AVCaptureFileOutputRecordingDelegate {
public override init() {
preset = .presetHigh
super.init()
prepareSession()
prepareSessionQueue()
prepareActiveVideoInput()
......@@ -382,45 +386,55 @@ open class CaptureSession: NSObject, AVCaptureFileOutputRecordingDelegate {
/// Starts the session.
open func startSession() {
if !isRunning {
sessionQueue.async() { [weak self] in
self?.session.startRunning()
}
}
guard !isRunning else {
return
}
sessionQueue.async() { [weak self] in
self?.session.startRunning()
}
}
/// Stops the session.
open func stopSession() {
if isRunning {
sessionQueue.async() { [weak self] in
self?.session.stopRunning()
}
}
guard isRunning else {
return
}
sessionQueue.async() { [weak self] in
self?.session.stopRunning()
}
}
/// Switches the camera if possible.
open func switchCameras() {
if canSwitchCameras {
do {
if let v: AVCaptureDevicePosition = position {
delegate?.sessionWillSwitchCameras?(session: self, position: v)
let videoInput: AVCaptureDeviceInput? = try AVCaptureDeviceInput(device: inactiveCamera!)
session.beginConfiguration()
session.removeInput(activeVideoInput)
if session.canAddInput(videoInput) {
session.addInput(videoInput)
activeVideoInput = videoInput
} else {
session.addInput(activeVideoInput)
}
session.commitConfiguration()
delegate?.sessionDidSwitchCameras?(session: self, position: position!)
}
} catch let e as NSError {
delegate?.sessionFailedWithError?(session: self, error: e)
}
}
guard canSwitchCameras else {
return
}
do {
guard let v = position else {
return
}
delegate?.sessionWillSwitchCameras?(session: self, position: v)
let videoInput: AVCaptureDeviceInput? = try AVCaptureDeviceInput(device: inactiveCamera!)
session.beginConfiguration()
session.removeInput(activeVideoInput)
if session.canAddInput(videoInput) {
session.addInput(videoInput)
activeVideoInput = videoInput
} else {
session.addInput(activeVideoInput)
}
session.commitConfiguration()
delegate?.sessionDidSwitchCameras?(session: self, position: position!)
} catch let e as NSError {
delegate?.sessionFailedWithError?(session: self, error: e)
}
}
/**
......@@ -482,6 +496,7 @@ open class CaptureSession: NSObject, AVCaptureFileOutputRecordingDelegate {
error = NSError(domain: "io.cosmicmind.Material.Capture", code: 0004, userInfo: userInfo)
userInfo[NSUnderlyingErrorKey] = error
}
if let e = error {
delegate?.sessionFailedWithError?(session: self, error: e)
}
......@@ -513,6 +528,7 @@ open class CaptureSession: NSObject, AVCaptureFileOutputRecordingDelegate {
error = NSError(domain: "io.cosmicmind.Material.Capture", code: 0005, userInfo: userInfo)
userInfo[NSUnderlyingErrorKey] = error
}
if let e = error {
delegate?.sessionFailedWithError?(session: self, error: e)
}
......@@ -548,17 +564,20 @@ open class CaptureSession: NSObject, AVCaptureFileOutputRecordingDelegate {
let canResetFocus: Bool = device.isFocusPointOfInterestSupported && device.isFocusModeSupported(.continuousAutoFocus)
let canResetExposure: Bool = device.isExposurePointOfInterestSupported && device.isExposureModeSupported(.continuousAutoExposure)
let centerPoint: CGPoint = CGPoint(x: 0.5, y: 0.5)
do {
do {
try device.lockForConfiguration()
if canResetFocus && focus {
device.focusMode = .continuousAutoFocus
device.focusPointOfInterest = centerPoint
}
if canResetExposure && exposure {
if canResetExposure && exposure {
device.exposureMode = .continuousAutoExposure
device.exposurePointOfInterest = centerPoint
}
device.unlockForConfiguration()
device.unlockForConfiguration()
} catch let e as NSError {
delegate?.sessionFailedWithError?(session: self, error: e)
}
......@@ -567,40 +586,47 @@ open class CaptureSession: NSObject, AVCaptureFileOutputRecordingDelegate {
/// Captures a still image.
open func captureStillImage() {
sessionQueue.async() { [weak self] in
if let s: CaptureSession = self {
if let v: AVCaptureConnection = s.imageOutput.connection(withMediaType: AVMediaTypeVideo) {
v.videoOrientation = s.videoOrientation
s.imageOutput.captureStillImageAsynchronously(from: v) { [weak self] (sampleBuffer: CMSampleBuffer?, error: Error?) -> Void in
if let s = self {
var captureError = error
if nil == captureError {
let data = AVCaptureStillImageOutput.jpegStillImageNSDataRepresentation(sampleBuffer)!
if let image1 = UIImage(data: data) {
if let image2 = image1.adjustOrientation() {
s.delegate?.sessionStillImageAsynchronously?(session: s, image: image2)
} else {
var userInfo = [String: Any]()
userInfo[NSLocalizedDescriptionKey] = "[Material Error: Cannot fix image orientation.]"
userInfo[NSLocalizedFailureReasonErrorKey] = "[Material Error: Cannot fix image orientation.]"
captureError = NSError(domain: "io.cosmicmind.Material.Capture", code: 0006, userInfo: userInfo)
userInfo[NSUnderlyingErrorKey] = error
}
} else {
var userInfo = [String: Any]()
userInfo[NSLocalizedDescriptionKey] = "[Material Error: Cannot capture image from data.]"
userInfo[NSLocalizedFailureReasonErrorKey] = "[Material Error: Cannot capture image from data.]"
captureError = NSError(domain: "io.cosmicmind.Material.Capture", code: 0007, userInfo: userInfo)
userInfo[NSUnderlyingErrorKey] = error
}
}
if let e: Error = captureError {
s.delegate?.sessionStillImageAsynchronouslyFailedWithError?(session: s, error: e)
}
}
}
}
}
guard let s = self else {
return
}
guard let v = s.imageOutput.connection(withMediaType: AVMediaTypeVideo) else {
return
}
v.videoOrientation = s.videoOrientation
s.imageOutput.captureStillImageAsynchronously(from: v) { [weak self] (sampleBuffer: CMSampleBuffer?, error: Error?) -> Void in
guard let s = self else {
return
}
var captureError = error
if nil == captureError {
let data = AVCaptureStillImageOutput.jpegStillImageNSDataRepresentation(sampleBuffer)!
if let image1 = UIImage(data: data) {
if let image2 = image1.adjustOrientation() {
s.delegate?.sessionStillImageAsynchronously?(session: s, image: image2)
} else {
var userInfo = [String: Any]()
userInfo[NSLocalizedDescriptionKey] = "[Material Error: Cannot fix image orientation.]"
userInfo[NSLocalizedFailureReasonErrorKey] = "[Material Error: Cannot fix image orientation.]"
captureError = NSError(domain: "io.cosmicmind.Material.Capture", code: 0006, userInfo: userInfo)
userInfo[NSUnderlyingErrorKey] = error
}
} else {
var userInfo = [String: Any]()
userInfo[NSLocalizedDescriptionKey] = "[Material Error: Cannot capture image from data.]"
userInfo[NSLocalizedFailureReasonErrorKey] = "[Material Error: Cannot capture image from data.]"
captureError = NSError(domain: "io.cosmicmind.Material.Capture", code: 0007, userInfo: userInfo)
userInfo[NSUnderlyingErrorKey] = error
}
}
if let e = captureError {
s.delegate?.sessionStillImageAsynchronouslyFailedWithError?(session: s, error: e)
}
}
}
}
......@@ -608,37 +634,44 @@ open class CaptureSession: NSObject, AVCaptureFileOutputRecordingDelegate {
open func startRecording() {
if !isRecording {
sessionQueue.async() { [weak self] in
if let s: CaptureSession = self {
if let v: AVCaptureConnection = s.movieOutput.connection(withMediaType: AVMediaTypeVideo) {
v.videoOrientation = s.videoOrientation
v.preferredVideoStabilizationMode = .auto
}
if let v: AVCaptureDevice = s.activeCamera {
if v.isSmoothAutoFocusSupported {
do {
try v.lockForConfiguration()
v.isSmoothAutoFocusEnabled = true
v.unlockForConfiguration()
} catch let e as NSError {
s.delegate?.sessionFailedWithError?(session: s, error: e)
}
}
s.movieOutputURL = s.uniqueURL()
if let v = s.movieOutputURL {
s.movieOutput.startRecording(toOutputFileURL: v as URL!, recordingDelegate: s)
}
}
}
guard let s = self else {
return
}
if let v = s.movieOutput.connection(withMediaType: AVMediaTypeVideo) {
v.videoOrientation = s.videoOrientation
v.preferredVideoStabilizationMode = .auto
}
guard let v = s.activeCamera else {
return
}
if v.isSmoothAutoFocusSupported {
do {
try v.lockForConfiguration()
v.isSmoothAutoFocusEnabled = true
v.unlockForConfiguration()
} catch let e as NSError {
s.delegate?.sessionFailedWithError?(session: s, error: e)
}
}
s.movieOutputURL = s.uniqueURL()
if let v = s.movieOutputURL {
s.movieOutput.startRecording(toOutputFileURL: v as URL!, recordingDelegate: s)
}
}
}
}
/// Stops recording.
open func stopRecording() {
if isRecording {
movieOutput.stopRecording()
}
guard isRecording else {
return
}
movieOutput.stopRecording()
}
public func capture(_ captureOutput: AVCaptureFileOutput!, didStartRecordingToOutputFileAt fileURL: URL!, fromConnections connections: [Any]!) {
......@@ -665,9 +698,12 @@ open class CaptureSession: NSObject, AVCaptureFileOutputRecordingDelegate {
private func prepareActiveVideoInput() {
do {
activeVideoInput = try AVCaptureDeviceInput(device: AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeVideo))
if session.canAddInput(activeVideoInput) {
session.addInput(activeVideoInput)
}
guard session.canAddInput(activeVideoInput) else {
return
}
session.addInput(activeVideoInput)
} catch let e as NSError {
delegate?.sessionFailedWithError?(session: self, error: e)
}
......@@ -677,9 +713,12 @@ open class CaptureSession: NSObject, AVCaptureFileOutputRecordingDelegate {
private func prepareActiveAudioInput() {
do {
activeAudioInput = try AVCaptureDeviceInput(device: AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeAudio))
if session.canAddInput(activeAudioInput) {
session.addInput(activeAudioInput)
}
guard session.canAddInput(activeAudioInput) else {
return
}
session.addInput(activeAudioInput)
} catch let e as NSError {
delegate?.sessionFailedWithError?(session: self, error: e)
}
......@@ -688,18 +727,24 @@ open class CaptureSession: NSObject, AVCaptureFileOutputRecordingDelegate {
/// Prepares the imageOutput.
private func prepareImageOutput() {
imageOutput = AVCaptureStillImageOutput()
if session.canAddOutput(imageOutput) {
imageOutput.outputSettings = [AVVideoCodecKey: AVVideoCodecJPEG]
session.addOutput(imageOutput)
}
guard session.canAddOutput(imageOutput) else {
return
}
imageOutput.outputSettings = [AVVideoCodecKey: AVVideoCodecJPEG]
session.addOutput(imageOutput)
}
/// Prepares the movieOutput.
private func prepareMovieOutput() {
movieOutput = AVCaptureMovieFileOutput()
if session.canAddOutput(movieOutput) {
session.addOutput(movieOutput)
}
guard session.canAddOutput(movieOutput) else {
return
}
session.addOutput(movieOutput)
}
/**
......@@ -708,7 +753,7 @@ open class CaptureSession: NSObject, AVCaptureFileOutputRecordingDelegate {
- Returns: An AVCaptureDevice if one exists, or nil otherwise.
*/
private func camera(at position: AVCaptureDevicePosition) -> AVCaptureDevice? {
let devices: Array<AVCaptureDevice> = AVCaptureDevice.devices(withMediaType: AVMediaTypeVideo) as! Array<AVCaptureDevice>
let devices = AVCaptureDevice.devices(withMediaType: AVMediaTypeVideo) as! [AVCaptureDevice]
for device in devices {
if device.position == position {
return device
......@@ -725,8 +770,10 @@ open class CaptureSession: NSObject, AVCaptureFileOutputRecordingDelegate {
do {
let directory = try FileManager.default.url(for: .documentDirectory, in: .userDomainMask, appropriateFor: nil, create: true)
let dateFormatter = DateFormatter()
dateFormatter.dateStyle = .full
dateFormatter.dateStyle = .full
dateFormatter.timeStyle = .full
return directory.appendingPathComponent(dateFormatter.string(from: NSDate() as Date) + ".mov")
} catch let e as NSError {
delegate?.sessionCreateMovieFileFailedWithError?(session: self, error: e)
......
......@@ -186,7 +186,7 @@ open class CollectionReusableView: UICollectionReusableView {
/// A property that accesses the backing layer's background
@IBInspectable open override var backgroundColor: UIColor? {
didSet {
layer.backgroundColor = backgroundColor?.cgColor
layer.bgColor = backgroundColor
}
}
......
......@@ -193,7 +193,7 @@ open class CollectionViewCell: UICollectionViewCell {
@IBInspectable
open override var backgroundColor: UIColor? {
didSet {
layer.backgroundColor = backgroundColor?.cgColor
layer.bgColor = backgroundColor
}
}
......
......@@ -248,13 +248,13 @@ extension CALayer {
}
}
/// A UIColor reference to the `backgroundcgColor`.
open var color: UIColor? {
/// A UIColor reference to the `backgroundColor`.
open var bgColor: UIColor? {
get {
return nil == backgroundColor ? nil : UIColor(cgColor: backgroundColor!)
}
set(value) {
backgroundColor = color?.cgColor
backgroundColor = bgColor?.cgColor
}
}
......
......@@ -81,7 +81,7 @@ internal extension Animation {
}
pLayer.cornerRadius = n / 2
pLayer.backgroundColor = pulse.color.withAlphaComponent(pulse.opacity).cgColor
pLayer.bgColor = pulse.color.withAlphaComponent(pulse.opacity)
pLayer.transform = CATransform3DMakeAffineTransform(CGAffineTransform(scaleX: 0, y: 0))
})
......
......@@ -78,7 +78,7 @@ open class TableViewCell: UITableViewCell {
@IBInspectable
open override var backgroundColor: UIColor? {
didSet {
layer.backgroundColor = backgroundColor?.cgColor
layer.bgColor = backgroundColor
}
}
......
......@@ -38,7 +38,7 @@ public class TextView: UITextView {
/// A property that accesses the backing layer's background
@IBInspectable public override var backgroundColor: UIColor? {
didSet {
layer.backgroundColor = backgroundColor?.cgColor
layer.bgColor = backgroundColor
}
}
......
......@@ -120,7 +120,7 @@ open class View: UIView {
@IBInspectable
open override var backgroundColor: UIColor? {
didSet {
layer.backgroundColor = backgroundColor?.cgColor
layer.bgColor = backgroundColor
}
}
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment