Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

How can I record video using AVAssetWriter and apply filtered image in swift #6

Open
milanTbi opened this issue Jun 25, 2020 · 0 comments

Comments

@milanTbi
Copy link

I try to make Video filler functionality in live camera with recording facility

I followed the reference code of IMAGLY but it can't record video, it shows only blank video file so please give me some hint or reference code

Please see my code and give suggestions

*********************** AVAssetWriter ***********************
func setUpWriter() {
self.captureVideoOrientation = .landscapeLeft
motionManager.startAccelerometerUpdates(to: motionManagerQueue, withHandler: { accelerometerData, _ in
guard let accelerometerData = accelerometerData else {
return
}

               if abs(accelerometerData.acceleration.y) < abs(accelerometerData.acceleration.x) {
                   if accelerometerData.acceleration.x > 0 {
                       self.captureVideoOrientation = .landscapeLeft
                   } else {
                       self.captureVideoOrientation = .landscapeRight
                   }
               } else {
                   if accelerometerData.acceleration.y > 0 {
                       self.captureVideoOrientation = .portraitUpsideDown
                   } else {
                       self.captureVideoOrientation = .portrait
                   }
               }
           })
    do {
        outputFileLocation = videoFileLocation()
        
        videoWriter = try AVAssetWriter(outputURL: outputFileLocation!, fileType: AVFileType.mp4)
        
        
        // add video input
        videoWriterInput = AVAssetWriterInput(mediaType: AVMediaType.video, outputSettings: [
            AVVideoCodecKey : AVVideoCodecType.h264,
            AVVideoWidthKey : 720,
            AVVideoHeightKey : 1280,
            AVVideoCompressionPropertiesKey : [
                AVVideoAverageBitRateKey : 2300000,
            ],
        ])
        
        
        let videoCompressionSettings = self.videoDataOutput.recommendedVideoSettingsForAssetWriter(writingTo: AVFileType.mp4)
        
        self.videoWriterInput = AVAssetWriterInput(mediaType: AVMediaType.video, outputSettings: videoCompressionSettings as [String: AnyObject]?)
        self.videoWriterInput!.expectsMediaDataInRealTime = true
        
        var sourcePixelBufferAttributes: [String: AnyObject] = [String(kCVPixelBufferPixelFormatTypeKey): NSNumber(value: kCVPixelFormatType_32BGRA as UInt32), String(kCVPixelFormatOpenGLESCompatibility): kCFBooleanTrue]

  if let currentVideoDimensions = self.currentVideoDimensions {
sourcePixelBufferAttributes[String(kCVPixelBufferWidthKey)] = NSNumber(value: currentVideoDimensions.width as Int32)
sourcePixelBufferAttributes[String(kCVPixelBufferHeightKey)] = NSNumber(value: currentVideoDimensions.height as Int32)
}

        self.assetWriterInputPixelBufferAdaptor = AVAssetWriterInputPixelBufferAdaptor(assetWriterInput: self.videoWriterInput!, sourcePixelBufferAttributes: sourcePixelBufferAttributes)
        
        if let videoDevice = self.videoDeviceInput?.device, let captureVideoOrientation = self.captureVideoOrientation {
            if videoDevice.position == .front {
                self.videoWriterInput?.transform = GetTransformForDeviceOrientation(captureVideoOrientation, mirrored: true)
            } else {
                self.videoWriterInput?.transform = GetTransformForDeviceOrientation(captureVideoOrientation)
            }
        }
        
        
        videoWriterInput!.expectsMediaDataInRealTime = true
        
    
        
        if videoWriter!.canAdd(videoWriterInput!) {
            videoWriter!.add(videoWriterInput!)
            print("video input added")
        } else {
            print("no input added")
        }
        
        // add audio input
        audioWriterInput = AVAssetWriterInput(mediaType: AVMediaType.audio, outputSettings: nil)
        
        audioWriterInput!.expectsMediaDataInRealTime = true
        
        if videoWriter!.canAdd(audioWriterInput!) {
            videoWriter!.add(audioWriterInput!)
            print("audio input added")
        } else {
             print("no audio input added")
        }
        
        
        videoWriter!.startWriting()
    } catch let error {
        debugPrint(error.localizedDescription)
    }
}

********************** captureOutput ***********************

func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
let pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer)

    let cameraImage = CIImage(cvPixelBuffer: pixelBuffer!)
    
    let filteredImage: CIImage?
    filteredImage = cameraImage
    
    guard let formatDescription = CMSampleBufferGetFormatDescription(sampleBuffer) else {
        return
    }
    
    let mediaType = CMFormatDescriptionGetMediaType(formatDescription)
    
    if mediaType == CMMediaType(kCMMediaType_Audio) {
        self.currentAudioSampleBufferFormatDescription = formatDescription
        if let assetWriterAudioInput = self.audioWriterInput, assetWriterAudioInput.isReadyForMoreMediaData {
            let success = assetWriterAudioInput.append(sampleBuffer)
            if !success {
                print("Success")
            }
        }
        
        return
    }
    
    currentVideoDimensions = CMVideoFormatDescriptionGetDimensions(formatDescription)
    
    DispatchQueue.main.sync {
        //self.imageView.image = UIImage(ciImage: cameraImage)
        
        let writable = self.canWrite()
        
        if writable,
            self.sessionAtSourceTime == nil {
            // start writing
            self.sessionAtSourceTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer)
            
            self.videoWriter!.startSession(atSourceTime: self.sessionAtSourceTime as! CMTime)
            
            //print("Writing")
            
        }
        
        if let assetWriterInputPixelBufferAdaptor = self.assetWriterInputPixelBufferAdaptor, let pixelBufferPool = assetWriterInputPixelBufferAdaptor.pixelBufferPool {
            var renderedOutputPixelBuffer: CVPixelBuffer?
            let status2 = CVPixelBufferPoolCreatePixelBuffer(nil, pixelBufferPool, &renderedOutputPixelBuffer)
            if status2 != 0 {
                print("*********************** STATUS2 RETURN ***********************")
                return
            }
            
            let image = #imageLiteral(resourceName: "568557_v9_bc")
            
            let attrs = [kCVPixelBufferCGImageCompatibilityKey: kCFBooleanTrue, kCVPixelBufferCGBitmapContextCompatibilityKey: kCFBooleanTrue] as CFDictionary
            var pixelBuffer : CVPixelBuffer?
            
            pixelBuffer = getPixelBufferImage(from: image)
            let status = CVPixelBufferCreate(kCFAllocatorDefault, Int(image.size.width), Int(image.size.height), kCVPixelFormatType_32ARGB, attrs, &pixelBuffer)
            guard (status == kCVReturnSuccess) else {
               
                return
            }
            
            CVPixelBufferLockBaseAddress(pixelBuffer!, CVPixelBufferLockFlags(rawValue: 0))
            let pixelData = CVPixelBufferGetBaseAddress(pixelBuffer!)
            
            let rgbColorSpace = CGColorSpaceCreateDeviceRGB()
            let context = CGContext(data: pixelData, width: Int(image.size.width), height: Int(image.size.height), bitsPerComponent: 8, bytesPerRow: CVPixelBufferGetBytesPerRow(pixelBuffer!), space: rgbColorSpace, bitmapInfo: CGImageAlphaInfo.noneSkipFirst.rawValue)
            
            context?.translateBy(x: 0, y: image.size.height)
            context?.scaleBy(x: 1.0, y: -1.0)
            
            UIGraphicsPushContext(context!)
            image.draw(in: CGRect(x: 0, y: 0, width: image.size.width, height: image.size.height))
            UIGraphicsPopContext()
            CVPixelBufferUnlockBaseAddress(pixelBuffer!, CVPixelBufferLockFlags(rawValue: 0))
            
            renderedOutputPixelBuffer = pixelBuffer
           
            if let filteredImage = filteredImage, let renderedOutputPixelBuffer = renderedOutputPixelBuffer {
                
                if let assetWriterVideoInput = self.videoWriterInput, assetWriterVideoInput.isReadyForMoreMediaData {
                    
                    assetWriterInputPixelBufferAdaptor.append(renderedOutputPixelBuffer, withPresentationTime: self.sessionAtSourceTime!)
                    
                }
                
            }
            
        }

        if output == self.videoDataOutput {
            connection.videoOrientation = .portrait
            
            if connection.isVideoMirroringSupported {
                connection.isVideoMirrored = false
            }
        }
        
        if writable,
            output == self.videoDataOutput,
            (self.videoWriterInput!.isReadyForMoreMediaData) {
            // write video buffer
            
            self.videoWriterInput!.append(sampleBuffer)
            //print("video buffering")
        }
        
        if writable,
            output == self.audioDataOutput,
            (self.audioWriterInput!.isReadyForMoreMediaData) {
            // write audio buffer
            self.audioWriterInput?.append(sampleBuffer)
            print("audio buffering")
        }   
    } 
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment
Labels
None yet
Projects
None yet
Development

No branches or pull requests

1 participant