dimanche 26 juin 2016

Turn a Single UIImage into a video with x time

I have some code (below) that works when trying to work with multiple images, and turning them into some arbitrary value of seconds of video time, but it only works when images.count > 1. If I try and make it just 1 second, with a presentation time of 10 seconds, I get black frames until the very last frame, then a picture finally appears.

I am hoping that one of you brilliant people out there can provide me some guidance into getting it where I can make images.count == 1 and I can still make a video from the code.

This code combines a few things I have learned/morphed from the internet, as well as having read Apple docs and numerous tutorials (have to learn somewhere, eh?). I feel fairly confident in understanding most of (if not all) the code, so this is why I am completely baffled onto why when I turn images = [self.image, self.image] into images = [self.image] and change time into time = CMTime(seconds: 10, preferredTimeScale: 600) from where time changes (has to since the number of images change) and it doesn't work.

All I want is to turn a single image into a video instead of [UIImage] into a video. If you want to try and edit my code and play with it, by all means, but if you have new code that you can bring to the table I am all ears too! Thanks!

Edit Main Goal:

Turn a UIImage into a video with any Presentation Time >= 1 second!

import UIKit
import AVFoundation
import Photos

class ImageConverter: NSObject {

    var settings:RenderSettings!
    var imageAnimator:ImageAnimator!
    var image:UIImage!

    convenience init(image:UIImage) {
        self.init()
        self.image = image
        settings = RenderSettings(size: self.image.size)
        imageAnimator = ImageAnimator(renderSettings: settings, image: self.image)
    }

    class RenderSettings {

        var fps: Int32 = 2   // 2 frames per second
        var avCodecKey = AVVideoCodecH264
        var size:CGSize!

        convenience init(size:CGSize) {
            self.init()
            self.size = size
            print(size)
        }

        var outputURL: NSURL {
            let documentDirectory = NSSearchPathForDirectoriesInDomains(.DocumentDirectory, .UserDomainMask, true)[0]
            let savePath = (documentDirectory as NSString).stringByAppendingPathComponent("mergeVideo-picture.mov")
            return getURL(savePath)
        }

        func getURL(path:String) -> NSURL {
            let movieDestinationUrl = NSURL(fileURLWithPath: path)
            _ = try? NSFileManager().removeItemAtURL(movieDestinationUrl)
            return NSURL(fileURLWithPath: path)
        }
    }

    class VideoWriter {

        let renderSettings: RenderSettings

        var videoWriter: AVAssetWriter!
        var videoWriterInput: AVAssetWriterInput!
        var pixelBufferAdaptor: AVAssetWriterInputPixelBufferAdaptor!

        var isReadyForData: Bool {
            return videoWriterInput?.readyForMoreMediaData ?? false
        }

        class func pixelBufferFromImage(image: UIImage, pixelBufferPool: CVPixelBufferPool, size: CGSize) -> CVPixelBuffer {

            var pixelBufferOut: CVPixelBuffer?

            let status = CVPixelBufferPoolCreatePixelBuffer(kCFAllocatorDefault, pixelBufferPool, &pixelBufferOut)
            if status != kCVReturnSuccess {
                fatalError("CVPixelBufferPoolCreatePixelBuffer() failed")
            }

            let pixelBuffer = pixelBufferOut!

            CVPixelBufferLockBaseAddress(pixelBuffer, 0)

            let data = CVPixelBufferGetBaseAddress(pixelBuffer)
            let rgbColorSpace = CGColorSpaceCreateDeviceRGB()
            let context = CGBitmapContextCreate(data, Int(size.width), Int(size.height),
                                                8, CVPixelBufferGetBytesPerRow(pixelBuffer), rgbColorSpace, CGImageAlphaInfo.PremultipliedFirst.rawValue)

            CGContextClearRect(context, CGRectMake(0, 0, size.width, size.height))

            let horizontalRatio = size.width / image.size.width
            let verticalRatio = size.height / image.size.height
            let aspectRatio = max(horizontalRatio, verticalRatio) // ScaleAspectFill
//            let aspectRatio = min(horizontalRatio, verticalRatio) // ScaleAspectFit

            let newSize = CGSize(width: image.size.width * aspectRatio, height: image.size.height * aspectRatio)

            let x = newSize.width < size.width ? (size.width - newSize.width) / 2 : 0
            let y = newSize.height < size.height ? (size.height - newSize.height) / 2 : 0

            CGContextDrawImage(context, CGRectMake(x, y, newSize.width, newSize.height), image.CGImage)
            CVPixelBufferUnlockBaseAddress(pixelBuffer, 0)

            return pixelBuffer
        }

        init(renderSettings: RenderSettings) {
            self.renderSettings = renderSettings
        }

        func start() {

            let avOutputSettings: [String: AnyObject] = [
                AVVideoCodecKey: renderSettings.avCodecKey,
                AVVideoWidthKey: NSNumber(float: Float(renderSettings.size.width)),
                AVVideoHeightKey: NSNumber(float: Float(renderSettings.size.height))
            ]

            func createPixelBufferAdaptor() {
                let sourcePixelBufferAttributesDictionary = [
                    kCVPixelBufferPixelFormatTypeKey as String: NSNumber(unsignedInt: kCVPixelFormatType_32ARGB),
                    kCVPixelBufferWidthKey as String: NSNumber(float: Float(renderSettings.size.width)),
                    kCVPixelBufferHeightKey as String: NSNumber(float: Float(renderSettings.size.height))
                ]
                pixelBufferAdaptor = AVAssetWriterInputPixelBufferAdaptor(assetWriterInput: videoWriterInput,
                                                                          sourcePixelBufferAttributes: sourcePixelBufferAttributesDictionary)
            }

            func createAssetWriter(outputURL: NSURL) -> AVAssetWriter {
                guard let assetWriter = try? AVAssetWriter(URL: outputURL, fileType: AVFileTypeMPEG4) else {
                    fatalError("AVAssetWriter() failed")
                }

                guard assetWriter.canApplyOutputSettings(avOutputSettings, forMediaType: AVMediaTypeVideo) else {
                    fatalError("canApplyOutputSettings() failed")
                }

                return assetWriter
            }

            videoWriter = createAssetWriter(renderSettings.outputURL)
            videoWriterInput = AVAssetWriterInput(mediaType: AVMediaTypeVideo, outputSettings: avOutputSettings)

            if videoWriter.canAddInput(videoWriterInput) {
                videoWriter.addInput(videoWriterInput)
            }
            else {
                fatalError("canAddInput() returned false")
            }

            // The pixel buffer adaptor must be created before we start writing.
            createPixelBufferAdaptor()

            if videoWriter.startWriting() == false {
                fatalError("startWriting() failed")
            }

            videoWriter.startSessionAtSourceTime(kCMTimeZero)

            precondition(pixelBufferAdaptor.pixelBufferPool != nil, "nil pixelBufferPool")
        }

        func render(appendPixelBuffers: (VideoWriter)->Bool, completion: ()->Void) {

            precondition(videoWriter != nil, "Call start() to initialze the writer")

            let queue = dispatch_queue_create("mediaInputQueue", nil)
            videoWriterInput.requestMediaDataWhenReadyOnQueue(queue) {
                let isFinished = appendPixelBuffers(self)
                if isFinished {
                    self.videoWriterInput.markAsFinished()
                    self.videoWriter.finishWritingWithCompletionHandler() {
                        dispatch_async(dispatch_get_main_queue()) {
                            completion()
                        }
                    }
                }
                else {
                    // Fall through. The closure will be called again when the writer is ready.
                }
            }
        }

        func addImage(image: UIImage, withPresentationTime presentationTime: CMTime) -> Bool {

            precondition(pixelBufferAdaptor != nil, "Call start() to initialze the writer")

            let pixelBuffer = VideoWriter.pixelBufferFromImage(image, pixelBufferPool: pixelBufferAdaptor.pixelBufferPool!, size: renderSettings.size)
            return pixelBufferAdaptor.appendPixelBuffer(pixelBuffer, withPresentationTime: presentationTime)
        }

    }

    class ImageAnimator {

        // Apple suggests a timescale of 600 because it's a multiple of standard video rates 24, 25, 30, 60 fps etc.
        let kTimescale: Int32 = 600

        let settings: RenderSettings
        let videoWriter: VideoWriter
        var images: [UIImage]!
        var image:UIImage!
        var time:CMTime!
        var frameNum = 0

        class func saveToLibrary(videoURL: NSURL) {
            PHPhotoLibrary.requestAuthorization { status in
                guard status == .Authorized else { return }

                PHPhotoLibrary.sharedPhotoLibrary().performChanges({
                    PHAssetChangeRequest.creationRequestForAssetFromVideoAtFileURL(videoURL)
                }) { success, error in
                    if !success {
                        print("Could not save video to photo library:", error)
                    }
                }
            }
        }

        class func removeFileAtURL(fileURL: NSURL) {
            do {
                try NSFileManager.defaultManager().removeItemAtPath(fileURL.path!)
            }
            catch _ as NSError {
                // Assume file doesn't exist.
            }
        }

        init(renderSettings: RenderSettings, image:UIImage) {
            settings = renderSettings
            videoWriter = VideoWriter(renderSettings: settings)
            self.image = image
            images = loadImages()
        }

        func render(time:CMTime, completion: ()->Void) {

            // The VideoWriter will fail if a file exists at the URL, so clear it out first.
            ImageAnimator.removeFileAtURL(settings.outputURL)

            videoWriter.start()
            self.time = time
            videoWriter.render(appendPixelBuffers) {
//                ImageAnimator.saveToLibrary(self.settings.outputURL)
                completion()
            }

        }

        // Replace this logic with your own.
        func loadImages() -> [UIImage] {
            var images = [UIImage]()

            //**If I change this to for index in 1...1 {    (Read the next **BELOW)

            for index in 1...2 {
                images.append(self.image)
            }
            return images
        }

        // This is the callback function for VideoWriter.render()
        func appendPixelBuffers(writer: VideoWriter) -> Bool {

            let frameDuration = CMTimeMake(Int64(kTimescale / settings.fps), kTimescale)
            let frameD = CMTime(seconds: 0.5, preferredTimescale: 600)

            while !images.isEmpty {

                if writer.isReadyForData == false {
                    // Inform writer we have more buffers to write.
                    return false
                }

                let image = images.removeFirst()
                let presentationTime = CMTimeMultiply(self.time, Int32(frameNum))

                //**AND change presentationTime to CMTime(10, 600) 
                //Then I end up with black frames for 10 seconds, and the very LAST frame will be my picture.

                let success = videoWriter.addImage(image, withPresentationTime: presentationTime)
                if success == false {
                    fatalError("addImage() failed")
                }

                frameNum += 1
            }

            // Inform writer all buffers have been written.
            return true
        }

    }
}

Aucun commentaire:

Enregistrer un commentaire