Adding Blur Effect to Background in Swift

Adding blur effect to background in swift

I have tested this code and it's working fine:

let blurEffect = UIBlurEffect(style: UIBlurEffect.Style.dark)
let blurEffectView = UIVisualEffectView(effect: blurEffect)
blurEffectView.frame = view.bounds
blurEffectView.autoresizingMask = [.flexibleWidth, .flexibleHeight]
view.addSubview(blurEffectView)

For Swift 3.0:

let blurEffect = UIBlurEffect(style: UIBlurEffectStyle.dark)
let blurEffectView = UIVisualEffectView(effect: blurEffect)
blurEffectView.frame = view.bounds
blurEffectView.autoresizingMask = [.flexibleWidth, .flexibleHeight]
view.addSubview(blurEffectView)

For Swift 4.0:

let blurEffect = UIBlurEffect(style: UIBlurEffect.Style.dark)
let blurEffectView = UIVisualEffectView(effect: blurEffect)
blurEffectView.frame = view.bounds
blurEffectView.autoresizingMask = [.flexibleWidth, .flexibleHeight]
view.addSubview(blurEffectView)

Here you can see result:

blurred view

Or you can use this lib for that:

https://github.com/FlexMonkey/Blurable

Is there a method to blur a background in SwiftUI?


1. The Native SwiftUI way:

Just add .blur() modifier on anything you need to be blurry like:

Image("BG")
.blur(radius: 20)

Blur Demo
Note the top and bottom of the view

Note that you can Group multiple views and blur them together.



2. The Visual Effect View:

You can bring the prefect UIVisualEffectView from the UIKit:

VisualEffectView(effect: UIBlurEffect(style: .dark))

With this tiny struct:

struct VisualEffectView: UIViewRepresentable {
var effect: UIVisualEffect?
func makeUIView(context: UIViewRepresentableContext<Self>) -> UIVisualEffectView { UIVisualEffectView() }
func updateUIView(_ uiView: UIVisualEffectView, context: UIViewRepresentableContext<Self>) { uiView.effect = effect }
}

VEV Demo



3. iOS 15: Materials

You can use iOS predefined materials with one line code:

.background(.ultraThinMaterial)

Demo

swift - Add blur to UIView in order to blur the Controller below? Read before

this is what I've done

class ViewController: UIViewController,UIPickerViewDataSource, UIPickerViewDelegate {

var pickerDataSource = ["White", "Red", "Green", "Blue"];
@IBOutlet var pickerView: UIPickerView!
override func viewDidLoad() {
super.viewDidLoad()
// Do any additional setup after loading the view, typically from a nib.
let blurEffect = UIBlurEffect(style: UIBlurEffectStyle.regular)
let blurEffectView = UIVisualEffectView(effect: blurEffect)
blurEffectView.frame = view.bounds
blurEffectView.autoresizingMask = [.flexibleWidth, .flexibleHeight]
self.pickerView.delegate = self
self.pickerView.dataSource = self
self.view.addSubview(blurEffectView)
self.view.bringSubview(toFront: self.pickerView)

}

override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
// Dispose of any resources that can be recreated.
}

func numberOfComponents(in pickerView: UIPickerView) -> Int {
return 1
}
func pickerView(_ pickerView: UIPickerView, numberOfRowsInComponent component: Int) -> Int {
return pickerDataSource.count;
}

func pickerView(_ pickerView: UIPickerView, titleForRow row: Int, forComponent component: Int) -> String! {
return pickerDataSource[row]
}

}Sample Image

Sample Image

How create transparent view with with blur effect?

I found an excellent tutorial that might help you to create a smooth transparent view with blur effect: https://www.raywenderlich.com/167-uivisualeffectview-tutorial-getting-started

Creating a blurring overlay view

You can use UIVisualEffectView to achieve this effect. This is a native API that has been fine-tuned for performance and great battery life, plus it's easy to implement.

Swift:

//only apply the blur if the user hasn't disabled transparency effects
if !UIAccessibility.isReduceTransparencyEnabled {
view.backgroundColor = .clear

let blurEffect = UIBlurEffect(style: .dark)
let blurEffectView = UIVisualEffectView(effect: blurEffect)
//always fill the view
blurEffectView.frame = self.view.bounds
blurEffectView.autoresizingMask = [.flexibleWidth, .flexibleHeight]

view.addSubview(blurEffectView) //if you have more UIViews, use an insertSubview API to place it where needed
} else {
view.backgroundColor = .black
}

Objective-C:

//only apply the blur if the user hasn't disabled transparency effects
if (!UIAccessibilityIsReduceTransparencyEnabled()) {
self.view.backgroundColor = [UIColor clearColor];

UIBlurEffect *blurEffect = [UIBlurEffect effectWithStyle:UIBlurEffectStyleDark];
UIVisualEffectView *blurEffectView = [[UIVisualEffectView alloc] initWithEffect:blurEffect];
//always fill the view
blurEffectView.frame = self.view.bounds;
blurEffectView.autoresizingMask = UIViewAutoresizingFlexibleWidth | UIViewAutoresizingFlexibleHeight;

[self.view addSubview:blurEffectView]; //if you have more UIViews, use an insertSubview API to place it where needed
} else {
self.view.backgroundColor = [UIColor blackColor];
}

If you are presenting this view controller modally to blur the underlying content, you'll need to set the modal presentation style to Over Current Context and set the background color to clear to ensure the underlying view controller will remain visible once this is presented overtop.

Make ViewController background Blur

@SemarY I got your bug you added image you at wrong place so let me give you code which will help you for your desire output.

//Background
ChatRoomTable.backgroundColor = .clear
ChatRoomTable.tableFooterView = UIView(frame: CGRect.zero)
ChatRoomTable.sectionIndexColor = #colorLiteral(red: 0, green: 0, blue: 0, alpha: 0.6715271832)
let backgroundImage = UIImage(named: "16.png")
let imageView = UIImageView(image: backgroundImage)
imageView.frame = self.view bound
view.addSubView(imageView)
let blurEffect = UIBlurEffect(style: .dark)
let blurView = UIVisualEffectView(effect: blurEffect)
blurView.frame = self.view.bounds
view.addSubview(blurView)
self.bringSubviewToFront(ChatRoomTable)

AVFoundation - Adding blur background to video


Swift 4 - Adding blur background to video

1. Single video support ❤️

1. Multiple videos merging support ❤️

2. Support any canvas in any ratio ❤️

3. Save final video to camera roll ❤️

5. Manage all video orientations ❤️

May be I'm late for this answer but still I didn't find any solution for this requirement. So sharing my work: /p>

⭐ Download Sample Code Here ⭐

Step to add a blur background to videos:

  1. Merge all videos without audio

    a) Need a rendered area size.

    b) Need to calculate the scale and position for video with in this area. For aspectFill property.
  2. Add blur effect to merged video
  3. Place one by one video at the center of blurred video

Merge Videos

func mergeVideos(_ videos: Array<AVURLAsset>, inArea area:CGSize, completion: @escaping (_ error: Error?, _ url:URL?) -> Swift.Void) {

// Create AVMutableComposition Object.This object will hold our multiple AVMutableCompositionTrack.
let mixComposition = AVMutableComposition()

var instructionLayers : Array<AVMutableVideoCompositionLayerInstruction> = []

for asset in videos {

// Here we are creating the AVMutableCompositionTrack. See how we are adding a new track to our AVMutableComposition.
let track = mixComposition.addMutableTrack(withMediaType: AVMediaType.video, preferredTrackID: kCMPersistentTrackID_Invalid)

// Now we set the length of the track equal to the length of the asset and add the asset to out newly created track at kCMTimeZero for first track and lastAssetTime for current track so video plays from the start of the track to end.
if let videoTrack = asset.tracks(withMediaType: AVMediaType.video).first {


/// Hide time for this video's layer
let opacityStartTime: CMTime = CMTimeMakeWithSeconds(0, asset.duration.timescale)
let opacityEndTime: CMTime = CMTimeAdd(mixComposition.duration, asset.duration)
let hideAfter: CMTime = CMTimeAdd(opacityStartTime, opacityEndTime)


let timeRange = CMTimeRangeMake(kCMTimeZero, asset.duration)
try? track?.insertTimeRange(timeRange, of: videoTrack, at: mixComposition.duration)


/// Layer instrcution
let layerInstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: track!)
layerInstruction.setOpacity(0.0, at: hideAfter)

/// Add logic for aspectFit in given area
let properties = scaleAndPositionInAspectFillMode(forTrack: videoTrack, inArea: area)


/// Checking for orientation
let videoOrientation: UIImageOrientation = self.getVideoOrientation(forTrack: videoTrack)
let assetSize = self.assetSize(forTrack: videoTrack)

if (videoOrientation == .down) {
/// Rotate
let defaultTransfrom = asset.preferredTransform
let rotateTransform = CGAffineTransform(rotationAngle: -CGFloat(Double.pi/2.0))

// Scale
let scaleTransform = CGAffineTransform(scaleX: properties.scale.width, y: properties.scale.height)

// Translate
var ytranslation: CGFloat = assetSize.height
var xtranslation: CGFloat = 0
if properties.position.y == 0 {
xtranslation = -(assetSize.width - ((size.width/size.height) * assetSize.height))/2.0
}
else {
ytranslation = assetSize.height - (assetSize.height - ((size.height/size.width) * assetSize.width))/2.0
}
let translationTransform = CGAffineTransform(translationX: xtranslation, y: ytranslation)

// Final transformation - Concatination
let finalTransform = defaultTransfrom.concatenating(rotateTransform).concatenating(translationTransform).concatenating(scaleTransform)
layerInstruction.setTransform(finalTransform, at: kCMTimeZero)
}
else if (videoOrientation == .left) {

/// Rotate
let defaultTransfrom = asset.preferredTransform
let rotateTransform = CGAffineTransform(rotationAngle: -CGFloat(Double.pi))

// Scale
let scaleTransform = CGAffineTransform(scaleX: properties.scale.width, y: properties.scale.height)

// Translate
var ytranslation: CGFloat = assetSize.height
var xtranslation: CGFloat = assetSize.width
if properties.position.y == 0 {
xtranslation = assetSize.width - (assetSize.width - ((size.width/size.height) * assetSize.height))/2.0
}
else {
ytranslation = assetSize.height - (assetSize.height - ((size.height/size.width) * assetSize.width))/2.0
}
let translationTransform = CGAffineTransform(translationX: xtranslation, y: ytranslation)

// Final transformation - Concatination
let finalTransform = defaultTransfrom.concatenating(rotateTransform).concatenating(translationTransform).concatenating(scaleTransform)
layerInstruction.setTransform(finalTransform, at: kCMTimeZero)
}
else if (videoOrientation == .right) {
/// No need to rotate
// Scale
let scaleTransform = CGAffineTransform(scaleX: properties.scale.width, y: properties.scale.height)

// Translate
let translationTransform = CGAffineTransform(translationX: properties.position.x, y: properties.position.y)

let finalTransform = scaleTransform.concatenating(translationTransform)
layerInstruction.setTransform(finalTransform, at: kCMTimeZero)
}
else {
/// Rotate
let defaultTransfrom = asset.preferredTransform
let rotateTransform = CGAffineTransform(rotationAngle: CGFloat(Double.pi/2.0))

// Scale
let scaleTransform = CGAffineTransform(scaleX: properties.scale.width, y: properties.scale.height)

// Translate
var ytranslation: CGFloat = 0
var xtranslation: CGFloat = assetSize.width
if properties.position.y == 0 {
xtranslation = assetSize.width - (assetSize.width - ((size.width/size.height) * assetSize.height))/2.0
}
else {
ytranslation = -(assetSize.height - ((size.height/size.width) * assetSize.width))/2.0
}
let translationTransform = CGAffineTransform(translationX: xtranslation, y: ytranslation)

// Final transformation - Concatination
let finalTransform = defaultTransfrom.concatenating(rotateTransform).concatenating(translationTransform).concatenating(scaleTransform)
layerInstruction.setTransform(finalTransform, at: kCMTimeZero)
}

instructionLayers.append(layerInstruction)
}
}


let mainInstruction = AVMutableVideoCompositionInstruction()
mainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, mixComposition.duration)
mainInstruction.layerInstructions = instructionLayers

let mainCompositionInst = AVMutableVideoComposition()
mainCompositionInst.instructions = [mainInstruction]
mainCompositionInst.frameDuration = CMTimeMake(1, 30)
mainCompositionInst.renderSize = area

//let url = URL(fileURLWithPath: "/Users/enacteservices/Desktop/final_video.mov")
let url = self.videoOutputURL
try? FileManager.default.removeItem(at: url)

let exporter = AVAssetExportSession(asset: mixComposition, presetName: AVAssetExportPresetHighestQuality)
exporter?.outputURL = url
exporter?.outputFileType = .mp4
exporter?.videoComposition = mainCompositionInst
exporter?.shouldOptimizeForNetworkUse = true
exporter?.exportAsynchronously(completionHandler: {
if let anError = exporter?.error {
completion(anError, nil)
}
else if exporter?.status == AVAssetExportSessionStatus.completed {
completion(nil, url)
}
})
}

Adding Blur Effect

func addBlurEffect(toVideo asset:AVURLAsset, completion: @escaping (_ error: Error?, _ url:URL?) -> Swift.Void) {

let filter = CIFilter(name: "CIGaussianBlur")
let composition = AVVideoComposition(asset: asset, applyingCIFiltersWithHandler: { request in
// Clamp to avoid blurring transparent pixels at the image edges
let source: CIImage? = request.sourceImage.clampedToExtent()
filter?.setValue(source, forKey: kCIInputImageKey)

filter?.setValue(10.0, forKey: kCIInputRadiusKey)

// Crop the blurred output to the bounds of the original image
let output: CIImage? = filter?.outputImage?.cropped(to: request.sourceImage.extent)

// Provide the filter output to the composition
if let anOutput = output {
request.finish(with: anOutput, context: nil)
}
})

//let url = URL(fileURLWithPath: "/Users/enacteservices/Desktop/final_video.mov")
let url = self.videoOutputURL
// Remove any prevouis videos at that path
try? FileManager.default.removeItem(at: url)

let exporter = AVAssetExportSession(asset: asset, presetName: AVAssetExportPresetHighestQuality)

// assign all instruction for the video processing (in this case the transformation for cropping the video
exporter?.videoComposition = composition
exporter?.outputFileType = .mp4
exporter?.outputURL = url
exporter?.exportAsynchronously(completionHandler: {
if let anError = exporter?.error {
completion(anError, nil)
}
else if exporter?.status == AVAssetExportSessionStatus.completed {
completion(nil, url)
}
})
}

Place one by one video at the center of blurred video

This will be your final video URL.

func addAllVideosAtCenterOfBlur(videos: Array<AVURLAsset>, blurVideo: AVURLAsset, completion: @escaping (_ error: Error?, _ url:URL?) -> Swift.Void) {


// Create AVMutableComposition Object.This object will hold our multiple AVMutableCompositionTrack.
let mixComposition = AVMutableComposition()

var instructionLayers : Array<AVMutableVideoCompositionLayerInstruction> = []


// Add blur video first
let blurVideoTrack = mixComposition.addMutableTrack(withMediaType: AVMediaType.video, preferredTrackID: kCMPersistentTrackID_Invalid)
// Blur layer instruction
if let videoTrack = blurVideo.tracks(withMediaType: AVMediaType.video).first {
let timeRange = CMTimeRangeMake(kCMTimeZero, blurVideo.duration)
try? blurVideoTrack?.insertTimeRange(timeRange, of: videoTrack, at: kCMTimeZero)
}

/// Add other videos at center of the blur video
var startAt = kCMTimeZero
for asset in videos {

/// Time Range of asset
let timeRange = CMTimeRangeMake(kCMTimeZero, asset.duration)

// Here we are creating the AVMutableCompositionTrack. See how we are adding a new track to our AVMutableComposition.
let track = mixComposition.addMutableTrack(withMediaType: AVMediaType.video, preferredTrackID: kCMPersistentTrackID_Invalid)

// Now we set the length of the track equal to the length of the asset and add the asset to out newly created track at kCMTimeZero for first track and lastAssetTime for current track so video plays from the start of the track to end.
if let videoTrack = asset.tracks(withMediaType: AVMediaType.video).first {

/// Hide time for this video's layer
let opacityStartTime: CMTime = CMTimeMakeWithSeconds(0, asset.duration.timescale)
let opacityEndTime: CMTime = CMTimeAdd(startAt, asset.duration)
let hideAfter: CMTime = CMTimeAdd(opacityStartTime, opacityEndTime)

/// Adding video track
try? track?.insertTimeRange(timeRange, of: videoTrack, at: startAt)

/// Layer instrcution
let layerInstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: track!)
layerInstruction.setOpacity(0.0, at: hideAfter)

/// Add logic for aspectFit in given area
let properties = scaleAndPositionInAspectFitMode(forTrack: videoTrack, inArea: size)

/// Checking for orientation
let videoOrientation: UIImageOrientation = self.getVideoOrientation(forTrack: videoTrack)
let assetSize = self.assetSize(forTrack: videoTrack)

if (videoOrientation == .down) {
/// Rotate
let defaultTransfrom = asset.preferredTransform
let rotateTransform = CGAffineTransform(rotationAngle: -CGFloat(Double.pi/2.0))

// Scale
let scaleTransform = CGAffineTransform(scaleX: properties.scale.width, y: properties.scale.height)

// Translate
var ytranslation: CGFloat = assetSize.height
var xtranslation: CGFloat = 0
if properties.position.y == 0 {
xtranslation = -(assetSize.width - ((size.width/size.height) * assetSize.height))/2.0
}
else {
ytranslation = assetSize.height - (assetSize.height - ((size.height/size.width) * assetSize.width))/2.0
}
let translationTransform = CGAffineTransform(translationX: xtranslation, y: ytranslation)

// Final transformation - Concatination
let finalTransform = defaultTransfrom.concatenating(rotateTransform).concatenating(translationTransform).concatenating(scaleTransform)
layerInstruction.setTransform(finalTransform, at: kCMTimeZero)
}
else if (videoOrientation == .left) {

/// Rotate
let defaultTransfrom = asset.preferredTransform
let rotateTransform = CGAffineTransform(rotationAngle: -CGFloat(Double.pi))

// Scale
let scaleTransform = CGAffineTransform(scaleX: properties.scale.width, y: properties.scale.height)

// Translate
var ytranslation: CGFloat = assetSize.height
var xtranslation: CGFloat = assetSize.width
if properties.position.y == 0 {
xtranslation = assetSize.width - (assetSize.width - ((size.width/size.height) * assetSize.height))/2.0
}
else {
ytranslation = assetSize.height - (assetSize.height - ((size.height/size.width) * assetSize.width))/2.0
}
let translationTransform = CGAffineTransform(translationX: xtranslation, y: ytranslation)

// Final transformation - Concatination
let finalTransform = defaultTransfrom.concatenating(rotateTransform).concatenating(translationTransform).concatenating(scaleTransform)
layerInstruction.setTransform(finalTransform, at: kCMTimeZero)
}
else if (videoOrientation == .right) {
/// No need to rotate
// Scale
let scaleTransform = CGAffineTransform(scaleX: properties.scale.width, y: properties.scale.height)

// Translate
let translationTransform = CGAffineTransform(translationX: properties.position.x, y: properties.position.y)

let finalTransform = scaleTransform.concatenating(translationTransform)
layerInstruction.setTransform(finalTransform, at: kCMTimeZero)
}
else {
/// Rotate
let defaultTransfrom = asset.preferredTransform
let rotateTransform = CGAffineTransform(rotationAngle: CGFloat(Double.pi/2.0))

// Scale
let scaleTransform = CGAffineTransform(scaleX: properties.scale.width, y: properties.scale.height)

// Translate
var ytranslation: CGFloat = 0
var xtranslation: CGFloat = assetSize.width
if properties.position.y == 0 {
xtranslation = assetSize.width - (assetSize.width - ((size.width/size.height) * assetSize.height))/2.0
}
else {
ytranslation = -(assetSize.height - ((size.height/size.width) * assetSize.width))/2.0
}
let translationTransform = CGAffineTransform(translationX: xtranslation, y: ytranslation)

// Final transformation - Concatination
let finalTransform = defaultTransfrom.concatenating(rotateTransform).concatenating(translationTransform).concatenating(scaleTransform)
layerInstruction.setTransform(finalTransform, at: kCMTimeZero)
}

instructionLayers.append(layerInstruction)
}

/// Adding audio
if let audioTrack = asset.tracks(withMediaType: AVMediaType.audio).first {
let aTrack = mixComposition.addMutableTrack(withMediaType: AVMediaType.audio, preferredTrackID: kCMPersistentTrackID_Invalid)
try? aTrack?.insertTimeRange(timeRange, of: audioTrack, at: startAt)
}

// Increase the startAt time
startAt = CMTimeAdd(startAt, asset.duration)
}


/// Blur layer instruction
let layerInstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: blurVideoTrack!)
instructionLayers.append(layerInstruction)

let mainInstruction = AVMutableVideoCompositionInstruction()
mainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, blurVideo.duration)
mainInstruction.layerInstructions = instructionLayers

let mainCompositionInst = AVMutableVideoComposition()
mainCompositionInst.instructions = [mainInstruction]
mainCompositionInst.frameDuration = CMTimeMake(1, 30)
mainCompositionInst.renderSize = size

//let url = URL(fileURLWithPath: "/Users/enacteservices/Desktop/final_video.mov")
let url = self.videoOutputURL
try? FileManager.default.removeItem(at: url)

let exporter = AVAssetExportSession(asset: mixComposition, presetName: AVAssetExportPresetHighestQuality)
exporter?.outputURL = url
exporter?.outputFileType = .mp4
exporter?.videoComposition = mainCompositionInst
exporter?.shouldOptimizeForNetworkUse = true
exporter?.exportAsynchronously(completionHandler: {
if let anError = exporter?.error {
completion(anError, nil)
}
else if exporter?.status == AVAssetExportSessionStatus.completed {
completion(nil, url)
}
})
}

For helping methods used in above code please download the attached sample code.

Also I'm looking forward from you if there is any shorter way to do this. Because I have to export the video 3 times to achieve this.

Uipicker background blur

Try using the below code.

pickerView.subviews.last?.backgroundColor = .red.withAlphaComponent(0.2)

UIPickerView consists of 2 subviews. One is the main container and the second one is the selection indicator. So we just simply change the background of the second i.e last subview.



Related Topics



Leave a reply



Submit