Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions CoreImageVideo.xcodeproj/project.pbxproj
100644 → 100755
Original file line number Diff line number Diff line change
Expand Up @@ -199,6 +199,7 @@
developmentRegion = English;
hasScannedForEncodings = 0;
knownRegions = (
English,
en,
Base,
);
Expand Down Expand Up @@ -368,6 +369,7 @@
IPHONEOS_DEPLOYMENT_TARGET = 8.0;
LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks";
PRODUCT_NAME = "$(TARGET_NAME)";
SWIFT_VERSION = 5.0;
};
name = Debug;
};
Expand All @@ -379,6 +381,7 @@
IPHONEOS_DEPLOYMENT_TARGET = 8.0;
LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks";
PRODUCT_NAME = "$(TARGET_NAME)";
SWIFT_VERSION = 5.0;
};
name = Release;
};
Expand Down
Empty file modified CoreImageVideo/AppDelegate.swift
100644 → 100755
Empty file.
Empty file modified CoreImageVideo/Base.lproj/Main.storyboard
100644 → 100755
Empty file.
15 changes: 8 additions & 7 deletions CoreImageVideo/CameraBufferSource.swift
100644 → 100755
Original file line number Diff line number Diff line change
Expand Up @@ -24,25 +24,26 @@ struct CaptureBufferSource {
}
}

init?(device: AVCaptureDevice, transform: CGAffineTransform, callback: BufferConsumer) {
init?(device: AVCaptureDevice, transform: CGAffineTransform, callback: @escaping BufferConsumer) {
captureSession = AVCaptureSession()
if let deviceInput = AVCaptureDeviceInput(device: device, error: nil) where captureSession.canAddInput(deviceInput) {

if let deviceInput = try? AVCaptureDeviceInput(device: device), captureSession.canAddInput(deviceInput) {
captureSession.addInput(deviceInput)
let dataOutput = AVCaptureVideoDataOutput()
dataOutput.alwaysDiscardsLateVideoFrames = true
dataOutput.videoSettings = pixelBufferDict
captureDelegate = CaptureBufferDelegate { buffer in
callback(buffer, transform)
}
dataOutput.setSampleBufferDelegate(captureDelegate, queue: dispatch_get_main_queue())
dataOutput.setSampleBufferDelegate(captureDelegate, queue: DispatchQueue.main)
captureSession.addOutput(dataOutput)
captureSession.commitConfiguration()
return
}
return nil
}

init?(position: AVCaptureDevicePosition, callback: BufferConsumer) {
init?(position: AVCaptureDevice.Position, callback: @escaping BufferConsumer) {
if let camera = position.device {
self.init(device: camera, transform: position.transform, callback: callback)
return
Expand All @@ -52,13 +53,13 @@ struct CaptureBufferSource {
}

private class CaptureBufferDelegate: NSObject, AVCaptureVideoDataOutputSampleBufferDelegate {
let callback: CMSampleBuffer -> ()
let callback: (CMSampleBuffer) -> ()

init(_ callback: CMSampleBuffer -> ()) {
init(_ callback: @escaping (CMSampleBuffer) -> ()) {
self.callback = callback
}

func captureOutput(captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, fromConnection connection: AVCaptureConnection!) {
callback(sampleBuffer)
}
}
}
Empty file modified CoreImageVideo/Cat.mp4
100644 → 100755
Empty file.
12 changes: 6 additions & 6 deletions CoreImageVideo/CoreImageView.swift
100644 → 100755
Original file line number Diff line number Diff line change
Expand Up @@ -18,12 +18,12 @@ class CoreImageView: GLKView {
let coreImageContext: CIContext

override convenience init(frame: CGRect) {
let eaglContext = EAGLContext(API: EAGLRenderingAPI.OpenGLES2)
let eaglContext = EAGLContext(api: EAGLRenderingAPI.openGLES2)
self.init(frame: frame, context: eaglContext)
}

override init(frame: CGRect, context eaglContext: EAGLContext!) {
coreImageContext = CIContext(EAGLContext: eaglContext)
coreImageContext = CIContext(eaglContext: eaglContext)
super.init(frame: frame, context: eaglContext)
// We will be calling display() directly, hence this needs to be false
enableSetNeedsDisplay = false
Expand All @@ -33,11 +33,11 @@ class CoreImageView: GLKView {
fatalError("init(coder:) has not been implemented")
}

override func drawRect(rect: CGRect) {
override func draw(_ rect: CGRect) {
if let img = image {
let scale = self.window?.screen.scale ?? 1.0
let destRect = CGRectApplyAffineTransform(bounds, CGAffineTransformMakeScale(scale, scale))
coreImageContext.drawImage(img, inRect: destRect, fromRect: img.extent())
let destRect = bounds.applying(CGAffineTransform(scaleX: scale, y: scale))
coreImageContext.draw(img, in: destRect, from: img.extent)
}
}
}
}
26 changes: 13 additions & 13 deletions CoreImageVideo/Extensions.swift
100644 → 100755
Original file line number Diff line number Diff line change
Expand Up @@ -12,27 +12,27 @@ import AVFoundation
extension CGAffineTransform {

init(rotatingWithAngle angle: CGFloat) {
let t = CGAffineTransformMakeRotation(angle)
let t = CGAffineTransform(rotationAngle: angle)
self.init(a: t.a, b: t.b, c: t.c, d: t.d, tx: t.tx, ty: t.ty)

}
init(scaleX sx: CGFloat, scaleY sy: CGFloat) {
let t = CGAffineTransformMakeScale(sx, sy)
let t = CGAffineTransform(scaleX: sx, y: sy)
self.init(a: t.a, b: t.b, c: t.c, d: t.d, tx: t.tx, ty: t.ty)

}

func scale(sx: CGFloat, sy: CGFloat) -> CGAffineTransform {
return CGAffineTransformScale(self, sx, sy)
return CGAffineTransform(scaleX: sx, y: sy)
}
func rotate(angle: CGFloat) -> CGAffineTransform {
return CGAffineTransformRotate(self, angle)
return CGAffineTransform(rotationAngle: angle)
}
}

extension CIImage {
convenience init(buffer: CMSampleBuffer) {
self.init(CVPixelBuffer: CMSampleBufferGetImageBuffer(buffer))
self.init(cvPixelBuffer: CMSampleBufferGetImageBuffer(buffer)!)
}
}

Expand All @@ -42,22 +42,22 @@ extension CGRect {
}
}

extension AVCaptureDevicePosition {
extension AVCaptureDevice.Position {
var transform: CGAffineTransform {
switch self {
case .Front:
return CGAffineTransform(rotatingWithAngle: -CGFloat(M_PI_2)).scale(1, sy: -1)
case .Back:
return CGAffineTransform(rotatingWithAngle: -CGFloat(M_PI_2))
case .front:
return CGAffineTransform(rotatingWithAngle: -CGFloat(Double.pi/2)).scale(sx: 1, sy: -1)
case .back:
return CGAffineTransform(rotatingWithAngle: -CGFloat(Double.pi/2))
default:
return CGAffineTransformIdentity
return .identity

}
}

var device: AVCaptureDevice? {
return AVCaptureDevice.devicesWithMediaType(AVMediaTypeVideo).filter {
return AVCaptureDevice.devices(for: AVMediaType.video).filter {
$0.position == self
}.first as? AVCaptureDevice
}
}
}
62 changes: 32 additions & 30 deletions CoreImageVideo/FunctionalCoreImage.swift
100644 → 100755
Original file line number Diff line number Diff line change
Expand Up @@ -9,48 +9,48 @@
import Foundation
import UIKit

typealias Filter = CIImage -> CIImage
typealias Filter = ((CIImage) -> (CIImage))

func blur(radius: Double) -> Filter {
return { image in
let parameters = [
let parameters:[String:Any] = [
kCIInputRadiusKey: radius,
kCIInputImageKey: image
]
let filter = CIFilter(name: "CIGaussianBlur",
withInputParameters: parameters)
return filter.outputImage
parameters: parameters)
return filter!.outputImage!
}
}

func colorGenerator(color: UIColor) -> Filter {
func colorGenerator(_ color: UIColor) -> Filter {
return { _ in
let parameters = [kCIInputColorKey: color]
let filter = CIFilter(name: "CIConstantColorGenerator",
withInputParameters: parameters)
return filter.outputImage
parameters: parameters)
return filter!.outputImage!
}
}

func hueAdjust(angleInRadians: Float) -> Filter {
return { image in
let parameters = [
let parameters:[String : Any] = [
kCIInputAngleKey: angleInRadians,
kCIInputImageKey: image
]
]
let filter = CIFilter(name: "CIHueAdjust",
withInputParameters: parameters)
return filter.outputImage
parameters: parameters)
return filter!.outputImage!
}
}

func pixellate(scale: Float) -> Filter {
return { image in
let parameters = [
let parameters:[String : Any] = [
kCIInputImageKey:image,
kCIInputScaleKey:scale
]
return CIFilter(name: "CIPixellate", withInputParameters: parameters).outputImage
return CIFilter(name: "CIPixellate", parameters: parameters)!.outputImage!
}
}

Expand All @@ -59,44 +59,44 @@ func kaleidoscope() -> Filter {
let parameters = [
kCIInputImageKey:image,
]
return CIFilter(name: "CITriangleKaleidoscope", withInputParameters: parameters).outputImage.imageByCroppingToRect(image.extent())
return CIFilter(name: "CITriangleKaleidoscope", parameters: parameters)!.outputImage!.cropped(to: image.extent)
}
}


func vibrance(amount: Float) -> Filter {
return { image in
let parameters = [
let parameters:[String:Any] = [
kCIInputImageKey: image,
"inputAmount": amount
]
return CIFilter(name: "CIVibrance", withInputParameters: parameters).outputImage
return CIFilter(name: "CIVibrance", parameters: parameters)!.outputImage!
}
}

func compositeSourceOver(overlay: CIImage) -> Filter {
func compositeSourceOver(_ overlay: CIImage) -> Filter {
return { image in
let parameters = [
let parameters:[String:Any] = [
kCIInputBackgroundImageKey: image,
kCIInputImageKey: overlay
]
let filter = CIFilter(name: "CISourceOverCompositing",
withInputParameters: parameters)
let cropRect = image.extent()
return filter.outputImage.imageByCroppingToRect(cropRect)
parameters: parameters)
let cropRect = image.extent
return filter!.outputImage!.cropped(to:cropRect)
}
}


func radialGradient(center: CGPoint, radius: CGFloat) -> CIImage {
let params: [NSObject: AnyObject] = [
let params: [String: Any] = [
"inputColor0": CIColor(red: 1, green: 1, blue: 1),
"inputColor1": CIColor(red: 0, green: 0, blue: 0),
"inputCenter": CIVector(CGPoint: center),
"inputCenter": CIVector(cgPoint: center),
"inputRadius0": radius,
"inputRadius1": radius + 1
]
return CIFilter(name: "CIRadialGradient", withInputParameters: params).outputImage
return CIFilter(name: "CIRadialGradient", parameters: params)!.outputImage!
}

func blendWithMask(background: CIImage, mask: CIImage) -> Filter {
Expand All @@ -107,9 +107,9 @@ func blendWithMask(background: CIImage, mask: CIImage) -> Filter {
kCIInputImageKey: image
]
let filter = CIFilter(name: "CIBlendWithMask",
withInputParameters: parameters)
let cropRect = image.extent()
return filter.outputImage.imageByCroppingToRect(cropRect)
parameters: parameters)
let cropRect = image.extent
return filter!.outputImage!.cropped(to:cropRect)
}
}

Expand All @@ -120,9 +120,11 @@ func colorOverlay(color: UIColor) -> Filter {
}
}

infix operator >>>: FilterPrecedence

infix operator >>> { associativity left }

func >>> (filter1: Filter, filter2: Filter) -> Filter {
precedencegroup FilterPrecedence {
associativity: left
}
func >>> (filter1: @escaping Filter, filter2: @escaping Filter) -> Filter {
return { img in filter2(filter1(img)) }
}
Empty file modified CoreImageVideo/Images.xcassets/AppIcon.appiconset/Contents.json
100644 → 100755
Empty file.
Empty file modified CoreImageVideo/Info.plist
100644 → 100755
Empty file.
12 changes: 6 additions & 6 deletions CoreImageVideo/SimpleFilterViewController.swift
100644 → 100755
Original file line number Diff line number Diff line change
Expand Up @@ -14,26 +14,26 @@ class SimpleFilterViewController: UIViewController {
var coreImageView: CoreImageView?

var angleForCurrentTime: Float {
return Float(NSDate.timeIntervalSinceReferenceDate() % M_PI*2)
return Float( Date.timeIntervalSinceReferenceDate.truncatingRemainder(dividingBy: Double.pi) * 2)
}

override func loadView() {
coreImageView = CoreImageView(frame: CGRect())
self.view = coreImageView
}

override func viewDidAppear(animated: Bool) {
override func viewDidAppear(_ animated: Bool) {
setupCameraSource()
}

override func viewDidDisappear(animated: Bool) {
override func viewDidDisappear(_ animated: Bool) {
source?.running = false
}

func setupCameraSource() {
source = CaptureBufferSource(position: AVCaptureDevicePosition.Front) { [unowned self] (buffer, transform) in
let input = CIImage(buffer: buffer).imageByApplyingTransform(transform)
let filter = hueAdjust(self.angleForCurrentTime)
source = CaptureBufferSource(position: AVCaptureDevice.Position.front) { [unowned self] (buffer, transform) in
let input = CIImage(buffer: buffer).transformed(by: transform)
let filter = hueAdjust(angleInRadians: self.angleForCurrentTime)
self.coreImageView?.image = filter(input)
}
source?.running = true
Expand Down
14 changes: 7 additions & 7 deletions CoreImageVideo/StaticVideoViewController.swift
100644 → 100755
Original file line number Diff line number Diff line change
Expand Up @@ -14,22 +14,22 @@ class StaticVideoViewController: UIViewController {
var videoSource: VideoSampleBufferSource?

var angleForCurrentTime: Float {
return Float(NSDate.timeIntervalSinceReferenceDate() % M_PI*2)
return Float(Date.timeIntervalSinceReferenceDate.truncatingRemainder(dividingBy: TimeInterval(Double.pi)) * 2.0)
}

override func loadView() {
coreImageView = CoreImageView(frame: CGRect())
self.view = coreImageView
}

override func viewDidAppear(animated: Bool) {
let url = NSBundle.mainBundle().URLForResource("Cat", withExtension: "mp4")!
override func viewDidAppear(_ animated: Bool) {
let url = Bundle.main.url(forResource: "Cat", withExtension: "mp4")!
videoSource = VideoSampleBufferSource(url: url) { [unowned self] buffer in
let image = CIImage(CVPixelBuffer: buffer)
let image = CIImage(cvPixelBuffer: buffer)
let background = kaleidoscope()(image)
let mask = radialGradient(image.extent().center, CGFloat(self.angleForCurrentTime) * 100)
let output = blendWithMask(image, mask)(background)
let mask = radialGradient(center: image.extent.center, radius: CGFloat(self.angleForCurrentTime) * 100)
let output = blendWithMask(background: image, mask: mask)(background)
self.coreImageView?.image = output
}
}
}
}
Loading