Skip to content

support filter effect #12

Open
Open
@fukemy

Description

@fukemy

Hi, i want to add some filter effect like tiktok, and I just make a demo, it succesfull show my custom image in other view, but logo i wrong orientation, did u ever try to work with this feature?

The step:

  1. Convert to UIImage
  2. Using VNFaceDetector to detect face boundingBox
  3. Add my filter image into camera image
  4. Convert back to CMSampleBuffer and pass to delegate

Here is my sample code:

extension WebRTCClient: AVCaptureVideoDataOutputSampleBufferDelegate{
   func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
       
       guard let imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else {
           return
       }
       let ciimage = CIImage(cvPixelBuffer: imageBuffer)
//        let image = UIImage(ciImage: ciimage, scale: 1.0, orientation: imageOrientation())
       let image = convert(cmage: ciimage)
       print("image: \(image.size)")
       
       let faceDetectionRequest = VNDetectFaceLandmarksRequest(completionHandler: { (request: VNRequest, error: Error?) in
           DispatchQueue.main.async {[weak self] in
               if let observations = request.results as? [VNFaceObservation], !observations.isEmpty {
                   for observation in observations {
                       let box = observation.boundingBox
                       let boxFrame = CGRect(x: box.origin.x * image.size.width, y: box.origin.y * image.size.height, width: box.width * image.size.width, height: box.height * image.size.height)
                       print("box: \(boxFrame)")
                       let logo = UIImage(named: "dog_nose")!.rotate(radians: .pi * 2)
                       if let newImage = self?.drawImageIn(image, logo, inRect: boxFrame){
                           if let pxBuffer = self?.convertImageToBuffer(from: newImage){
                               var newSampleBuffer: CMSampleBuffer? = nil
                               var timimgInfo: CMSampleTimingInfo = .invalid
                               var videoInfo: CMVideoFormatDescription? = nil
                               CMVideoFormatDescriptionCreateForImageBuffer(allocator: nil, imageBuffer: pxBuffer, formatDescriptionOut: &videoInfo)
                               if videoInfo != nil{
                                   CMSampleBufferCreateForImageBuffer(allocator: kCFAllocatorDefault, imageBuffer: pxBuffer, dataReady: true, makeDataReadyCallback: nil, refcon: nil, formatDescription: videoInfo!, sampleTiming: &timimgInfo, sampleBufferOut: &newSampleBuffer)
                                   if newSampleBuffer != nil{
                                       self?.outputCaptureDelegate?.captureOutput!(output, didOutput: newSampleBuffer!, from: connection)
                                   }
                               }

                           }
                       }
                   }
               }
               self?.outputCaptureDelegate?.captureOutput!(output, didOutput: sampleBuffer, from: connection)
           }
       })
       let imageRequestHandler = VNImageRequestHandler(cvPixelBuffer: imageBuffer, orientation: exifOrientationForCurrentDeviceOrientation(), options: [:])
       do {
           try imageRequestHandler.perform([faceDetectionRequest])
       } catch {
           print(error.localizedDescription)
       }
   }
   
   func imageOrientation() -> UIImage.Orientation {
       let curDeviceOrientation = UIDevice.current.orientation
       var exifOrientation: UIImage.Orientation
       switch curDeviceOrientation {
           case UIDeviceOrientation.portraitUpsideDown:  // Device oriented vertically, Home button on the top
               exifOrientation = .left
           case UIDeviceOrientation.landscapeLeft:       // Device oriented horizontally, Home button on the right
               exifOrientation = .upMirrored
           case UIDeviceOrientation.landscapeRight:      // Device oriented horizontally, Home button on the left
               exifOrientation = .down
           case UIDeviceOrientation.portrait:            // Device oriented vertically, Home button on the bottom
               exifOrientation = .up
           default:
               exifOrientation = .up
       }
       return exifOrientation
   }
   
   func exifOrientationForCurrentDeviceOrientation() -> CGImagePropertyOrientation {
       return exifOrientationForDeviceOrientation(UIDevice.current.orientation)
   }
   
   func exifOrientationForDeviceOrientation(_ deviceOrientation: UIDeviceOrientation) -> CGImagePropertyOrientation {
       
       switch deviceOrientation {
       case .portraitUpsideDown:
           return .rightMirrored
           
       case .landscapeLeft:
           return .downMirrored
           
       case .landscapeRight:
           return .upMirrored
           
       default:
           return .leftMirrored
       }
   }
   
   func convert(cmage: CIImage) -> UIImage {
       let context = CIContext(options: nil)
       let cgImage = context.createCGImage(cmage, from: cmage.extent)!
       let image = UIImage(cgImage: cgImage)
       return image
   }
   
   func drawImageIn(_ image: UIImage, _ logo: UIImage, inRect: CGRect) -> UIImage {
       let renderer = UIGraphicsImageRenderer(size: image.size)
       return renderer.image { context in
           image.draw(in: CGRect(origin: CGPoint.zero, size: image.size))
           logo.draw(in: inRect)
       }
   }
   
   func convertImageToBuffer(from image: UIImage) -> CVPixelBuffer? {
       let attrs = [
           String(kCVPixelBufferCGImageCompatibilityKey) : true,
           String(kCVPixelBufferCGBitmapContextCompatibilityKey) : true
       ] as [String : Any]
       var buffer : CVPixelBuffer?
       let status = CVPixelBufferCreate(kCFAllocatorDefault, Int(image.size.width), Int(image.size.height), kCVPixelFormatType_32ARGB, attrs as CFDictionary, &buffer)
       guard (status == kCVReturnSuccess) else {
           return nil
       }
       
       CVPixelBufferLockBaseAddress(buffer!, CVPixelBufferLockFlags(rawValue: 0))
       let pixelData = CVPixelBufferGetBaseAddress(buffer!)
       
       let rgbColorSpace = CGColorSpaceCreateDeviceRGB()
       let context = CGContext(data: pixelData, width: Int(image.size.width), height: Int(image.size.height), bitsPerComponent: 8, bytesPerRow: CVPixelBufferGetBytesPerRow(buffer!), space: rgbColorSpace, bitmapInfo: CGImageAlphaInfo.noneSkipFirst.rawValue)
       
       context?.translateBy(x: 0, y: image.size.height)
       context?.scaleBy(x: 1.0, y: -1.0)
       
       UIGraphicsPushContext(context!)
       image.draw(in: CGRect(x: 0, y: 0, width: image.size.width, height: image.size.height))
       UIGraphicsPopContext()
       CVPixelBufferUnlockBaseAddress(buffer!, CVPixelBufferLockFlags(rawValue: 0))
       
       return buffer
   }
}
extension UIImage {
   func rotate(radians: CGFloat) -> UIImage {
       let rotatedSize = CGRect(origin: .zero, size: size)
           .applying(CGAffineTransform(rotationAngle: CGFloat(radians)))
           .integral.size
       UIGraphicsBeginImageContext(rotatedSize)
       if let context = UIGraphicsGetCurrentContext() {
           let origin = CGPoint(x: rotatedSize.width / 2.0,
                                y: rotatedSize.height / 2.0)
           context.translateBy(x: origin.x, y: origin.y)
           context.rotate(by: radians)
           draw(in: CGRect(x: -origin.y, y: -origin.x,
                           width: size.width, height: size.height))
           let rotatedImage = UIGraphicsGetImageFromCurrentImageContext()
           UIGraphicsEndImageContext()

           return rotatedImage ?? self
       }

       return self
   }
}

if you have any experience on this, please tech me the problem

Metadata

Metadata

Assignees

No one assigned

    Labels

    No labels
    No labels

    Projects

    No projects

    Milestone

    No milestone

    Relationships

    None yet

    Development

    No branches or pull requests

    Issue actions