Uicolor Code in a Variable in Swift

UIColor code in a variable in swift

If hex is an Int and not a String you can use this extension:

extension UIColor {
convenience init(hex: Int, alpha: CGFloat) {
let red = CGFloat((hex & 0xFF0000) >> 16) / 255.0
let green = CGFloat((hex & 0xFF00) >> 8) / 255.0
let blue = CGFloat((hex & 0xFF)) / 255.0
self.init(red:red, green:green, blue:blue, alpha:alpha)
}
}

Used like this:

UIColor(0x00ff00, alpha: 1.0)
UIColor(hex, alpha: 1.0)

How do I use variables in a UIColor?

Try this.

class Color: UIView
{
var colors = ViewController()

override func drawRect(rect: CGRect)
{

let swiftColor = UIColor(red: 1, green: 165/255, blue: 0, alpha: 1);

let context = UIGraphicsGetCurrentContext()
CGContextSetLineWidth(context, 5.0)
CGContextSetStrokeColorWithColor(context,
UIColor(red: CGFloat(colors.red1), green: CGFloat(colors.green1), blue: CGFloat(colors.blue1), alpha: 1.0).CGColor)
let rectangle = CGRectMake(60,170,200,80)
CGContextAddRect(context, rectangle)
CGContextStrokePath(context)
CGContextSetFillColorWithColor(context,
UIColor(red: CGFloat(colors.red1), green: CGFloat(colors.green1), blue: CGFloat(colors.blue1), alpha: 1.0).CGColor)
CGContextFillRect(context, rectangle)
}
}

CGContextSetStrokeColorWithColor expects a CGColor as parameter, but you are providing UIColor.

How can UIColor be a type in Swift?

If you Cmd-click the UIColor in your code you will be brought to the header-file of UIKit framework, straight to UIColor public interface, where you can see that yes, it's a class. Alternatively, you can look it up in Apple's documentation.

Reason why you can't do just

var newBackgroundColor = UIColor

... is that = is a shorthand for initialisation here. You not only indicate the type of a property/variable but also specify its initial value. I.e. for this to work you need to specify which exactly color should it be. E.g.

var newBackgroundColor = UIColor.blackColor()

Otherwise, if you really only need to declare the type, but initialisation will happen later, then you should use:

var newBackgroundColor : UIColor

How to use hex color values

#ffffff are actually 3 color components in hexadecimal notation - red ff, green ff and blue ff. You can write hexadecimal notation in Swift using 0x prefix, e.g 0xFF

To simplify the conversion, let's create an initializer that takes integer (0 - 255) values:

extension UIColor {
convenience init(red: Int, green: Int, blue: Int) {
assert(red >= 0 && red <= 255, "Invalid red component")
assert(green >= 0 && green <= 255, "Invalid green component")
assert(blue >= 0 && blue <= 255, "Invalid blue component")

self.init(red: CGFloat(red) / 255.0, green: CGFloat(green) / 255.0, blue: CGFloat(blue) / 255.0, alpha: 1.0)
}

convenience init(rgb: Int) {
self.init(
red: (rgb >> 16) & 0xFF,
green: (rgb >> 8) & 0xFF,
blue: rgb & 0xFF
)
}
}

Usage:

let color = UIColor(red: 0xFF, green: 0xFF, blue: 0xFF)
let color2 = UIColor(rgb: 0xFFFFFF)

How to get alpha?

Depending on your use case, you can simply use the native UIColor.withAlphaComponent method, e.g.

let semitransparentBlack = UIColor(rgb: 0x000000).withAlphaComponent(0.5)

Or you can add an additional (optional) parameter to the above methods:

convenience init(red: Int, green: Int, blue: Int, a: CGFloat = 1.0) {
self.init(
red: CGFloat(red) / 255.0,
green: CGFloat(green) / 255.0,
blue: CGFloat(blue) / 255.0,
alpha: a
)
}

convenience init(rgb: Int, a: CGFloat = 1.0) {
self.init(
red: (rgb >> 16) & 0xFF,
green: (rgb >> 8) & 0xFF,
blue: rgb & 0xFF,
a: a
)
}

(we cannot name the parameter alpha because of a name collision with the existing initializer).

Called as:

let color = UIColor(red: 0xFF, green: 0xFF, blue: 0xFF, a: 0.5)
let color2 = UIColor(rgb: 0xFFFFFF, a: 0.5)

To get the alpha as an integer 0-255, we can

convenience init(red: Int, green: Int, blue: Int, a: Int = 0xFF) {
self.init(
red: CGFloat(red) / 255.0,
green: CGFloat(green) / 255.0,
blue: CGFloat(blue) / 255.0,
alpha: CGFloat(a) / 255.0
)
}

// let's suppose alpha is the first component (ARGB)
convenience init(argb: Int) {
self.init(
red: (argb >> 16) & 0xFF,
green: (argb >> 8) & 0xFF,
blue: argb & 0xFF,
a: (argb >> 24) & 0xFF
)
}

Called as

let color = UIColor(red: 0xFF, green: 0xFF, blue: 0xFF, a: 0xFF)
let color2 = UIColor(argb: 0xFFFFFFFF)

Or a combination of the previous methods. There is absolutely no need to use strings.

iOS (Swift): representing scaled value as UIColor

Here's a helpful UIColor extension that lets you get a blend between two colors based on a percentage.

extension UIColor {
// This function calculates a new color by blending the two colors.
// A percent of 0.0 gives the "self" color
// A percent of 1.0 gives the "to" color
// Any other percent gives an appropriate color in between the two
func blend(to: UIColor, percent: Double) -> UIColor {
var fR : CGFloat = 0.0
var fG : CGFloat = 0.0
var fB : CGFloat = 0.0
var tR : CGFloat = 0.0
var tG : CGFloat = 0.0
var tB : CGFloat = 0.0

getRed(&fR, green: &fG, blue: &fB, alpha: nil)
to.getRed(&tR, green: &tG, blue: &tB, alpha: nil)

let dR = tR - fR
let dG = tG - fG
let dB = tB - fB

let perc = min(1.0, max(0.0, percent))
let rR = fR + dR * CGFloat(perc)
let rG = fG + dG * CGFloat(perc)
let rB = fB + dB * CGFloat(perc)

return UIColor(red: rR, green: rG, blue: rB, alpha: 1.0)
}
}

Examples:

let red = UIColor.red.blend(to: .green, percent: 0)
let mix = UIColor.red.blend(to: .green, percent: 0.5)
let green = UIColor.red.blend(to: .green, percent: 1)

How can I use UIColorFromRGB in Swift?

Here's a Swift version of that function (for getting a UIColor representation of a UInt value):

func UIColorFromRGB(rgbValue: UInt) -> UIColor {
return UIColor(
red: CGFloat((rgbValue & 0xFF0000) >> 16) / 255.0,
green: CGFloat((rgbValue & 0x00FF00) >> 8) / 255.0,
blue: CGFloat(rgbValue & 0x0000FF) / 255.0,
alpha: CGFloat(1.0)
)
}

view.backgroundColor = UIColorFromRGB(0x209624)

How to convert standard color names in string to UIColor values

There is no built in feature to make a UIColor with a name. You can write an extension like the one by Paul Hudson found here: https://www.hackingwithswift.com/example-code/uicolor/how-to-convert-a-html-name-string-into-a-uicolor

Simplified example:

extension UIColor {
public func named(_ name: String) -> UIColor? {
let allColors: [String: UIColor] = [
"red": .red,
]
let cleanedName = name.replacingOccurrences(of: " ", with: "").lowercased()
return allColors[cleanedName]
}
}

And then use it:

let redColor = UIColor().named("red")

You could also define an xcassets like in this article: https://medium.com/bobo-shone/how-to-use-named-color-in-xcode-9-d7149d270a16

And then use UIColor(named: "red")

Strange Behavior Of UIColor As Function Parameter In Swift

You could try something like this:

var colors: [UIColor] = []

func setColors(colors: UIColor...) {
self.colors = colors
}

override func drawRect(rect: CGRect) {
let context = UIGraphicsGetCurrentContext()
let colorSpace: CGColorSpaceRef = CGColorSpaceCreateDeviceRGB()

var gradientColors: [CGColor] = []
var colorLocations: [CGFloat] = []
var i: CGFloat = 0.0;

for color in colors {
gradientColors.append(color.CGColor)
colorLocations.append(i)
i += CGFloat(1.0) / CGFloat(colors.count)
}

let gradient: CGGradientRef = CGGradientCreateWithColors(colorSpace, gradientColors, colorLocations)

CGContextDrawLinearGradient(context, gradient, CGPoint(x: 0.0, y: self.frame.size.height / 2), CGPoint(x: self.frame.size.width, y: self.frame.size.height / 2), 0)
}

Side Note: You don't want to capitalize variable names. So I changed self.Colors to self.colors



Related Topics



Leave a reply



Submit