Skip to content

Instantly share code, notes, and snippets.

@serjooo
Last active May 7, 2018 12:56
Show Gist options
  • Save serjooo/1616957bf9afac0f862570bded93e2d1 to your computer and use it in GitHub Desktop.
Save serjooo/1616957bf9afac0f862570bded93e2d1 to your computer and use it in GitHub Desktop.
iOS Converts list of images to a video with .mov format
import UIKit
import AVFoundation
extension AVAsset {
func videoOrientation() -> (orientation: UIInterfaceOrientation, device: AVCaptureDevice.Position) {
var orientation: UIInterfaceOrientation = .unknown
var device: AVCaptureDevice.Position = .unspecified
let tracks: [AVAssetTrack] = self.tracks(withMediaType: .video)
if let videoTrack = tracks.first {
let t = videoTrack.preferredTransform
if (t.a == 0 && t.b == 1.0 && t.d == 0) {
orientation = .portrait
if t.c == 1.0 {
device = .front
} else if t.c == -1.0 {
device = .back
}
}
else if (t.a == 0 && t.b == -1.0 && t.d == 0) {
orientation = .portraitUpsideDown
if t.c == -1.0 {
device = .front
} else if t.c == 1.0 {
device = .back
}
}
else if (t.a == 1.0 && t.b == 0 && t.c == 0) {
orientation = .landscapeRight
if t.d == -1.0 {
device = .front
} else if t.d == 1.0 {
device = .back
}
}
else if (t.a == -1.0 && t.b == 0 && t.c == 0) {
orientation = .landscapeLeft
if t.d == 1.0 {
device = .front
} else if t.d == -1.0 {
device = .back
}
}
}
return (orientation, device)
}
func writeAudioTrackToURL(URL: URL, completion: @escaping (Bool, Error?) -> ()) {
do {
let audioAsset = try self.audioAsset()
audioAsset.writeToURL(URL: URL, completion: completion)
} catch {
completion(false, error)
}
}
func writeToURL(URL: URL, completion: @escaping (Bool, Error?) -> ()) {
guard let exportSession = AVAssetExportSession(asset: self, presetName: AVAssetExportPresetAppleM4A) else {
completion(false, nil)
return
}
exportSession.outputFileType = AVFileType.m4a
exportSession.outputURL = URL as URL
exportSession.exportAsynchronously {
switch exportSession.status {
case .completed:
completion(true, nil)
case .unknown, .waiting, .exporting, .failed, .cancelled:
completion(false, nil)
}
}
}
func audioAsset() throws -> AVAsset {
let composition = AVMutableComposition()
let audioTracks = tracks(withMediaType: AVMediaType.audio)
for track in audioTracks {
let compositionTrack = composition.addMutableTrack(withMediaType: AVMediaType.audio, preferredTrackID: kCMPersistentTrackID_Invalid)
do {
try compositionTrack?.insertTimeRange(track.timeRange, of: track, at: track.timeRange.start)
} catch {
throw error
}
compositionTrack?.preferredTransform = track.preferredTransform
}
return composition
}
}
import UIKit
extension UIImage {
/// Method to scale an image to the given size while keeping the aspect ratio
///
/// - Parameter newSize: the new size for the image
/// - Returns: the resized image
func scaleImageToSize(newSize: CGSize) -> UIImage {
var scaledImageRect: CGRect = CGRect.zero
let aspectWidth: CGFloat = newSize.width / size.width
let aspectHeight: CGFloat = newSize.height / size.height
let aspectRatio: CGFloat = min(aspectWidth, aspectHeight)
scaledImageRect.size.width = size.width * aspectRatio
scaledImageRect.size.height = size.height * aspectRatio
scaledImageRect.origin.x = (newSize.width - scaledImageRect.size.width) / 2.0
scaledImageRect.origin.y = (newSize.height - scaledImageRect.size.height) / 2.0
UIGraphicsBeginImageContextWithOptions(newSize, false, 0)
draw(in: scaledImageRect)
let scaledImage: UIImage = UIGraphicsGetImageFromCurrentImageContext()!
UIGraphicsEndImageContext()
return scaledImage
}
/// Method to get a size for the image appropriate for video (dividing by 16 without overlapping 1200)
///
/// - Returns: a size fit for video
func getSizeForVideo() -> CGSize {
let scale = UIScreen.main.scale
var imageWidth = 16 * ((size.width / scale) / 16).rounded(.awayFromZero)
var imageHeight = 16 * ((size.height / scale) / 16).rounded(.awayFromZero)
var ratio: CGFloat!
if imageWidth > 1400 {
ratio = 1400 / imageWidth
imageWidth = 16 * (imageWidth / 16).rounded(.towardZero) * ratio
imageHeight = 16 * (imageHeight / 16).rounded(.towardZero) * ratio
}
if imageWidth < 800 {
ratio = 800 / imageWidth
imageWidth = 16 * (imageWidth / 16).rounded(.awayFromZero) * ratio
imageHeight = 16 * (imageHeight / 16).rounded(.awayFromZero) * ratio
}
if imageHeight > 1200 {
ratio = 1200 / imageHeight
imageWidth = 16 * (imageWidth / 16).rounded(.towardZero) * ratio
imageHeight = 16 * (imageHeight / 16).rounded(.towardZero) * ratio
}
return CGSize(width: imageWidth, height: imageHeight)
}
/// Method to resize an image to an appropriate video size
///
/// - Returns: the resized image
func resizeImageToVideoSize() -> UIImage? {
let scale = UIScreen.main.scale
let videoImageSize = getSizeForVideo()
let imageRect = CGRect(x: 0, y: 0, width: videoImageSize.width * scale, height: videoImageSize.height * scale)
UIGraphicsBeginImageContextWithOptions(CGSize(width: imageRect.width, height: imageRect.height), false, scale)
if let _ = UIGraphicsGetCurrentContext() {
draw(in: imageRect, blendMode: .normal, alpha: 1)
if let resultImage = UIGraphicsGetImageFromCurrentImageContext() {
UIGraphicsEndImageContext()
return resultImage
} else {
return nil
}
} else {
return nil
}
}
}
import UIKit
import AVFoundation
struct ImagesToVideoExporter {
typealias ProgressHandler = ((Progress) -> Void)?
typealias SuccessCompletion = ((URL) -> Void)?
typealias FailureCompletion = ((Error) -> Void)?
static func generateVideo(withImages images: [UIImage],
framesPerSecond: CMTimeScale = 20,
configuration: Configuration = Configuration(),
progress: ProgressHandler = nil,
successCompletion: SuccessCompletion = nil,
failureCompletion: FailureCompletion = nil) {
let writerQueue = DispatchQueue(label: "writer_queue")
let adaptorQueue = DispatchQueue(label: "adaptor_queue")
writerQueue.async {
// Scale the images according to the size of the screen
var scaledImages = images.compactMap({ (image) -> UIImage? in
let scale = UIScreen.main.scale
return image.resizeImage(with: .scaleAspectFit, bounds: UIScreen.main.bounds.size.applying(CGAffineTransform(scaleX: scale, y: scale)), interpolationQuality: .medium)
})
guard let minimumSize = scaledImages.compactMap({ $0.size }).min(by: { $0.width < $1.width && $0.height < $1.height}) else {
failureCompletion?(ExportErrors.minmumSize)
return
}
let inputSize = minimumSize
let outputSize = minimumSize
// Create temporary location to save the video
let tmpMovieURL = URL(fileURLWithPath: NSTemporaryDirectory()).appendingPathComponent("\(Date().timeIntervalSince1970).mov")
// Check if a file with the same name existed before and remove it if it does
if FileManager.default.fileExists(atPath: tmpMovieURL.path) {
try? FileManager.default.removeItem(at: tmpMovieURL)
}
do {
let videoWriter = try AVAssetWriter(outputURL: tmpMovieURL, fileType: .mov)
let videoOutputSettings = videoSettings(for: outputSize)
guard videoWriter.canApply(outputSettings: videoOutputSettings, forMediaType: .video) else {
failureCompletion?(ExportErrors.canNotApplyVideoOutputSettings)
return
}
let videoWriterInput = AVAssetWriterInput(mediaType: .video, outputSettings: videoOutputSettings)
guard videoWriter.canAdd(videoWriterInput) else {
return
}
let bufferAttributes = sourceBufferAttributes(for: inputSize)
let pixelBufferAdaptor = AVAssetWriterInputPixelBufferAdaptor(assetWriterInput: videoWriterInput, sourcePixelBufferAttributes: bufferAttributes)
videoWriter.add(videoWriterInput)
// Start writing and see if it could start writing
guard videoWriter.startWriting() else {
failureCompletion?(ExportErrors.videoWriterFailed)
return
}
// Assert that we have a buffer pool
assert(pixelBufferAdaptor.pixelBufferPool != nil)
// Start the session
videoWriter.startSession(atSourceTime: kCMTimeZero)
// Define These variables outside so they don't get reinitialized
// They should be thread safe since their values will not be set or changed
// From any other thread
var frameCount = 0
let numImages = scaledImages.count
var nextTime = CMTime(value: 0, timescale: framesPerSecond)
let currentProgress = Progress(totalUnitCount: Int64(numImages))
// Starts writing here
videoWriterInput.requestMediaDataWhenReady(on: adaptorQueue) {
var imageForVideo: UIImage!
while frameCount < numImages {
// If its not ready return and get called when ready again to continue writing
if !videoWriterInput.isReadyForMoreMediaData {
print("Wasn't ready")
return
}
imageForVideo = scaledImages[frameCount]
if !appendPixelBufferForImage(imageForVideo, pixelBufferAdaptor: pixelBufferAdaptor, presentationTime: nextTime, minSize: minimumSize, videoBackgroundColor: configuration.videoBackgroundColor) {
failureCompletion?(ExportErrors.failedToAppendPixelBuffer)
videoWriterInput.markAsFinished()
videoWriter.cancelWriting()
return
}
frameCount += 1
nextTime.value = nextTime.value + 1
currentProgress.completedUnitCount = Int64(frameCount)
progress?(currentProgress)
}
videoWriterInput.markAsFinished()
if let maxLength = configuration.maxVideoLengthInSeconds {
videoWriter.endSession(atSourceTime: CMTime(seconds: maxLength, preferredTimescale: framesPerSecond))
}
print("Video Writer Status: ", videoWriter.status.rawValue)
videoWriter.finishWriting {
DispatchQueue.main.async {
successCompletion?(tmpMovieURL)
}
}
}
} catch {
failureCompletion?(error)
return
}
}
}
static func generateVideo(panoramaImage: UIImage,
framesPerSecond: CMTimeScale = 25,
configuration: Configuration = Configuration(),
progress: ProgressHandler = nil,
successCompletion: SuccessCompletion = nil,
failureCompletion: FailureCompletion = nil) {
DispatchQueue.global(qos: .default).async {
var images: [UIImage] = []
let resizeScale = panoramaImage.size.height / UIScreen.main.bounds.height
let resizedImage = panoramaImage.resizeImage(with: .scaleAspectFit, bounds: CGSize(width: panoramaImage.size.width / resizeScale, height: panoramaImage.size.height / resizeScale), interpolationQuality: .high)
let totalNumberOfFrames = 2.5 * Double(framesPerSecond)
let widthToBeAddedOnEachIncrement = (resizedImage.size.width - UIScreen.main.bounds.width) / CGFloat((totalNumberOfFrames - 1))
for index in 0..<Int(totalNumberOfFrames) {
let xPosition = widthToBeAddedOnEachIncrement * CGFloat(index)
let frame = CGRect(x: xPosition, y: 0, width: UIScreen.main.bounds.width, height: resizedImage.size.height)
guard let croppedCGImage = resizedImage.cgImage?.cropping(to: frame) else { break }
let croppedImage = UIImage(cgImage: croppedCGImage)
images.append(croppedImage)
}
let reversedImages = images.reversed()
images.append(contentsOf: reversedImages)
generateVideo(withImages: images, framesPerSecond: framesPerSecond, configuration: configuration, progress: progress, successCompletion: successCompletion, failureCompletion: failureCompletion)
}
}
static func generateVideo(gifData: Data,
configuration: Configuration = Configuration(),
progress: ProgressHandler = nil,
successCompletion: SuccessCompletion = nil,
failureCompletion: FailureCompletion = nil) {
DispatchQueue.global(qos: .default).async {
let options = [String(kCGImageSourceShouldCache): kCFBooleanFalse] as CFDictionary
let imageSource = CGImageSourceCreateWithData(gifData as CFData, options) ?? CGImageSourceCreateIncremental(options)
let frameCount = Int(CGImageSourceGetCount(imageSource))
var images: [UIImage] = []
let screenWidth = UIScreen.main.bounds.width
for index in 0..<frameCount {
guard let imageRef = CGImageSourceCreateImageAtIndex(imageSource, index, nil) else { return }
let image = UIImage(cgImage: imageRef)
let scaleFactor = image.size.width > screenWidth ? image.size.width / screenWidth : screenWidth / image.size.width
let resizedImage = image.resizeImage(with: .scaleAspectFit, bounds: image.size.applying(CGAffineTransform(scaleX: scaleFactor, y: scaleFactor)), interpolationQuality: .high)
images.append(resizedImage)
}
generateVideo(withImages: images, framesPerSecond: 20, configuration: configuration, progress: progress, successCompletion: successCompletion, failureCompletion: failureCompletion)
}
}
private static func videoSettings(for size: CGSize) -> [String: Any] {
return [
AVVideoCodecKey: AVVideoCodecH264,
AVVideoWidthKey: size.width,
AVVideoHeightKey: size.height,
AVVideoCompressionPropertiesKey: [
AVVideoProfileLevelKey: AVVideoProfileLevelH264BaselineAutoLevel,
AVVideoAverageBitRateKey: 1200000
]
]
}
private static func sourceBufferAttributes(for size: CGSize) -> [String: Any] {
return [
kCVPixelBufferPixelFormatTypeKey as String: kCVPixelFormatType_32ARGB,
kCVPixelBufferCGBitmapContextCompatibilityKey as String: true
]
}
/**
Private method to append pixels to a pixel buffer
- parameter url: The image which pixels will be appended to the pixel buffer
- parameter pixelBufferAdaptor: The pixel buffer to which new pixels will be added
- parameter presentationTime: The duration of each frame of the video
- returns: True or false depending on the action execution
*/
private static func appendPixelBufferForImage(_ image: UIImage, pixelBufferAdaptor: AVAssetWriterInputPixelBufferAdaptor, presentationTime: CMTime, minSize: CGSize, videoBackgroundColor: UIColor) -> Bool {
/// at the beginning of the append the status is false
var appendSucceeded = false
/**
* The proccess of appending new pixels is put inside a autoreleasepool
*/
autoreleasepool {
// check posibilitty of creating a pixel buffer pool
if let pixelBufferPool = pixelBufferAdaptor.pixelBufferPool {
let pixelBufferPointer = UnsafeMutablePointer<CVPixelBuffer?>.allocate(capacity: 1)
let status: CVReturn = CVPixelBufferPoolCreatePixelBuffer(kCFAllocatorDefault, pixelBufferPool, pixelBufferPointer)
/// check if the memory of the pixel buffer pointer can be accessed and the creation status is 0
if let pixelBuffer = pixelBufferPointer.pointee, status == kCVReturnSuccess {
// if the condition is satisfied append the image pixels to the pixel buffer pool
fillPixelBufferFromImage(image, pixelBuffer: pixelBuffer, minSize: minSize, videoBackgroundColor: videoBackgroundColor)
// generate new append status
appendSucceeded = pixelBufferAdaptor.append(pixelBuffer, withPresentationTime: presentationTime)
/**
* Destroy the pixel buffer contains
*/
pixelBufferPointer.deinitialize(count: 1)
} else {
NSLog("error: Failed to allocate pixel buffer from pool")
}
/**
Destroy the pixel buffer pointer from the memory
*/
pixelBufferPointer.deallocate()
}
}
print(appendSucceeded)
return appendSucceeded
}
/**
Private method to append image pixels to a pixel buffer
- parameter image: The image which pixels will be appented
- parameter pixelBuffer: The pixel buffer (as memory) to which the image pixels will be appended
*/
private static func fillPixelBufferFromImage(_ image: UIImage, pixelBuffer: CVPixelBuffer, minSize: CGSize, videoBackgroundColor: UIColor) {
// lock the buffer memoty so no one can access it during manipulation
CVPixelBufferLockBaseAddress(pixelBuffer, CVPixelBufferLockFlags(rawValue: CVOptionFlags(0)))
// get the pixel data from the address in the memory
let pixelData = CVPixelBufferGetBaseAddress(pixelBuffer)
// create a color scheme
let rgbColorSpace = CGColorSpaceCreateDeviceRGB()
/// set the context size
let contextSize = image.size
let contextWidth = CVPixelBufferGetWidth(pixelBuffer)
let contextHeight = CVPixelBufferGetHeight(pixelBuffer)
// generate a context where the image will be drawn
if let context = CGContext(data: pixelData, width: contextWidth, height: contextHeight, bitsPerComponent: 8, bytesPerRow: CVPixelBufferGetBytesPerRow(pixelBuffer), space: rgbColorSpace, bitmapInfo: CGImageAlphaInfo.premultipliedFirst.rawValue) {
// if let context = CGContext(data: pixelData, width: Int(contextSize.width), height: Int(contextSize.height), bitsPerComponent: 8, bytesPerRow: CVPixelBufferGetBytesPerRow(pixelBuffer), space: rgbColorSpace, bitmapInfo: CGImageAlphaInfo.premultipliedFirst.rawValue | CGBitmapInfo.byteOrder32Little.rawValue) {
// UIGraphicsPushContext(context)
var imageHeight = image.size.height
var imageWidth = image.size.width
if Int(imageHeight) > context.height {
imageHeight = 16 * (CGFloat(context.height) / 16).rounded(.awayFromZero)
} else if Int(imageWidth) > context.width {
imageWidth = 16 * (CGFloat(context.width) / 16).rounded(.awayFromZero)
}
let center = CGPoint(x: (minSize.width - imageWidth) / 2, y: (minSize.height - imageHeight) / 2)
context.clear(CGRect(x: 0.0, y: 0.0, width: imageWidth, height: imageHeight))
// set the context's background color
context.setFillColor(videoBackgroundColor.cgColor)
context.fill(CGRect(x: 0.0, y: 0.0, width: CGFloat(context.width), height: CGFloat(context.height)))
context.concatenate(CGAffineTransform.identity)
// draw the image in the context
if let cgImage = image.cgImage {
context.draw(cgImage, in: CGRect(x: center.x, y: center.y, width: imageWidth, height: imageHeight))
}
// UIGraphicsPopContext()
// unlock the buffer memory
CVPixelBufferUnlockBaseAddress(pixelBuffer, CVPixelBufferLockFlags(rawValue: CVOptionFlags(0)))
}
}
}
// MARK: - Class Definition Extension
extension ImagesToVideoExporter {
class Configuration {
var scaleWidth: CGFloat?
var maxVideoLengthInSeconds: Double?
var videoBackgroundColor: UIColor = .black
var shouldOptimiseImageForVideo: Bool = true
var videoImageWidthForMultipleVideoGeneration = 750
}
}
extension ImagesToVideoExporter {
enum ExportErrors: Error, LocalizedError {
case minmumSize
case canNotApplyVideoOutputSettings
case failedToAppendPixelBuffer
case videoWriterFailed
case writerInput
var errorDescription: String? {
return self.localizedDescription
}
var localizedDescription: String {
switch self {
case .minmumSize:
print("Minumum size could not be calculated")
return "Minumum size could not be calculated"
case .failedToAppendPixelBuffer:
print("AVAssetWriterInputPixelBufferAdapter failed to append pixel buffer")
return "AVAssetWriterInputPixelBufferAdapter failed to append pixel buffer"
case .videoWriterFailed:
print("Video Writer failed to write pixels")
return "Video Writer failed to write pixels"
case .canNotApplyVideoOutputSettings:
print("Could not apply video output settings")
return "Could not apply video output settings"
case .writerInput:
print("Could not add writer input")
return "Could not add writer input"
}
}
}
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment