Ilya Loshkarev loshkarev.i@gmail.com
SFEDU 2017
// all available assets
let fetchResult = PHAsset.fetchAssets(with: nil)
let asset = fetchResult.object(at: indexPath.item)
Assets only contain metadata and are immutable
let collections = PHAssetCollection.fetchAssetCollections(
with: .smartAlbum, // collection type
subtype: .albumRegular, // album type
options: nil)
// all available assets in collection
let fetchResult = PHAsset.fetchAssets(
in: collections.object(at: indexPath.raw),
options: nil)
Assets grouping: moments, smart albums, user-created albums
PHImageManager().requestImage(for: asset,
targetSize: thumbnailSize, contentMode: .aspectFill,
options: nil) {
image, info in
if image != nil {
cell.imageView.image = image
}
}
Image Manager allows to download asset's from local and cloud storages
PHPhotoLibrary.shared().performChanges({
PHAssetCollectionChangeRequest
.creationRequestForAssetCollection(withTitle: title)
}, completionHandler: { success, error in
if !success { print("error creating album: \(String(describing: error))") }
})
Change request allows to change metadata, create or delete elements of photo library
let request = PHAssetChangeRequest.creationRequestForAsset(from: image)
let changeRequest = PHAssetCollectionChangeRequest(for: assetCollection)
PHPhotoLibrary.shared().performChanges({
changeRequest?.addAssets([request.placeholderForCreatedAsset!] as NSArray)
}, completionHandler: { success, error in
if !success { print("error creating asset: \(String(describing: error))") }
})
Placeholders allow to access metadata for objects that are not yet in the Library
imageView.kf.setImage(with: url)
imageView.kf.setImage(with: url, placeholder: image)
imageView.kf.setImage(with: url, progressBlock:
{ receivedSize, totalSize in
print("downloading progress: \(receivedSize)")
})
imageView.kf.setImage(with: url, completionHandler:
{ (image, error, cacheType, imageUrl) in
print("Downloaded Image: \(image)")
})
Supports image, UI and GIF placeholders
All downloaded images are cached by default
ImageCache.default.store(image, forKey: "key_for_image")
ImageCache.default.store(processedImage, original: imageData,
forKey: "key_for_another_image",
toDisk: false) // not persistent
ImageCache.default.retrieveImage(forKey: "key_for_image", options: nil)
{ image, cacheType in
if let image = image { imageView.image = image }
}
Allows to store images on disk or in memory
let task = ImageDownloader.default.downloadImage(with: url,
options: [], progressBlock: nil)
{ (image, error, url, data) in
print("Downloaded Image: \(image)") // not cached
}
task.cancel()
Images downladed directly are not cached by default
let processor = ResizingImageProcessor(targetSize: CGSize(width: 100, height: 100))
imageView.kf.setImage(with: url, options: [.processor(processor)])
There are several basic processors that can be combined
Both original and processed images can be cached
Lightweight, mutable object that stores input image and required parameters
CIKernel
represents a function that is executed for every single pixel on the input image
Renderable object that stores a filter graph reqired
to retrieve an image
CIContext
responsible for compiling and running the filters.
Represents render target for resulting image
All the built-in Core Image filters belong to one or more of 21 categories
for fName in CIFilter.filterNamesInCategory(kCICatetgoryBlur) {
let blur = CIFilter(name: fName)
}
Filters were designed to be used from UI applications
Rely heavely on dictionaries and descriptions
An image processor that identifies features
Barcodes, QR-codes and faces are supported
let faceDetector = CIDetector(ofType: CIDetectorTypeFace,
context: nil,
options: [CIDetectorAccuracy : CIDetectorAccuracyHigh])
let faces = faceDetector.featuresInImage(personciImage)
for face in faces as! [CIFaceFeature] { /*...*/ }
Replaced by Vision framework in iOS 11
not depricated
Create target image with described content
let data = "http://mmcs.sfedu.ru".data(using: String.Encoding.isoLatin1)
var qr = CIFilter(name: "CIQRCodeGenerator")!
qr.setValue(data, forKey: "inputMessage")
qr.setValue("H", forKey: "errorCorrection")
let scaleUp = CGAffineTransform(scaleX: 100, y: 100)
let output = qr.outputImage?.transformed(by: scaleUp)
let blur = CIFilter(name: "CIZoomBlur")!
blur.setDefaults()
if let input = UIImage(named: "my_image.jpg")?.ciImage {
blur.setValue(input, forKey: kCIInputImageKey)
}
outputView.image = UIImage(ciImage: blur.outputImage)
Each time you render UIImage a new CIContext is created
To to apply filter to each frame we need pixelBuffer associated with that frame
videoOutput = AVPlayerItemVideoOutput()
player.currentItem.addOutput(videoOutput)
// create display refresh callback
let displayLink = CADisplayLink(target: self, selector: self.displayLinkDidRefresh(link:))
displayLink.addToRunLoop(NSRunLoop.mainRunLoop(), forMode: NSRunLoopCommonModes)
// on each display refresh
func displayLinkDidRefresh(link: CADisplayLink) {
let itemTime = videoOutput.itemTimeForHostTime(CACurrentMediaTime())
if videoOutput.hasNewPixelBufferForItemTime(itemTime) {
let pixelBuffer = videoOutput.copyPixelBufferForItemTime(itemTime, itemTimeForDisplay: nil)
let image = CIImage(buffer: pixelBuffer)
}
}
For better perfomance we can render CIImage using OpenGL
let eaglContext = EAGLContext(api: .openGLES2)!
let glkView = GLKView(frame: self.bounds,
context: eaglContext)
lazy var ciContext: CIContext = {
[unowned self] in
return CIContext(eaglContext: eaglContext, // GPU-based
options: [kCIContextWorkingColorSpace: NSNull()])
}()
Drawing CIImage to an existing context is much faster.
By associating ciContext with a glContext we are able to store result in a buffer and redraw it only when neccesary
Several filters can be combined into a single filter
override var outputImage: CIImage? {
let glowingImage = CIFilter( name: "CIColorControls",
withInputParameters: [kCIInputImageKey: inputImage, kCIInputSaturationKey: 1.75])?
.outputImage?.applyingFilter( "CIBloom",
parameters: [ kCIInputRadiusKey: 2.5, kCIInputIntensityKey: 1.25])
.cropped(to: inputImage.extent)
return glowingImage
}
Provide generalized interface for custom filters
override var attributes: [String : Any] {
return [ kCIAttributeFilterDisplayName: "My Filter Name",
"inputParam": [ kCIAttributeIdentity: 0,
kCIAttributeDisplayName: "Parameter Name",
kCIAttributeMin: 0,
kCIAttributeDefault: 10,
kCIAttributeSliderMin: 0,
kCIAttributeSliderMax: 100,
kCIAttributeClass: "NSNumber",
kCIAttributeType: kCIAttributeTypeScalar ]
]
}
class MyFilterConstructor: NSObject, CIFilterConstructor {
func filterWithName(_ name: String) -> CIFilter? {
if name == "My Filter Name" {
return MyFilter()
}
return nil
}
}
CIFilter.registerName("My Filter Name",
constructor: MyFilterConstructor,
)
CIFilterConstructor
an object that represents filter category
and implements filterWithName(_:)
Defines functions, data types, and keywords that you can use to specify image processing operations for custom Core Image filters that you write
let kernel = CIWarpKernel(source: " /* kernel */ ")
kernel.apply(extent: inputImage.extent, roiCallback: {
(index, rect) in return rect
}, image: inputImage, arguments: arguments)
Changes pixel position
kernel vec2 upsideDownWarp(vec2 extent) {
return vec2(destCoord().x, extent.y - destCoord().y);
}
destCoord()
returns pixel coordinates for curent pixel
Changes pixel color
kernel vec4 thresholdFilter(__sample pixel) {
return vec4(sqrt(pixel.rgb), image.a);
}
__sample
contains data for current pixel