Swift: Convert String to Hex Color Code

Swift: Convert string to hex color code

There are far too many user ids to get a unique color for every possible user id. Your best option would be to find a way to narrow down each user id to one of the possible available colors and accept the fact that two users may have the same color.

One possible solution is would be to get the hashValue of the user id string and then reduce that Int down to one of the possible 16,777,216 colors.

let userId = "XzSYoKJaqKYREkdB2dgwt0fLOPP2" // or whatever the id is
let hash = abs(userId.hashValue)
let colorNum = hash % (256*256*256)

At this point colorNum is the range 0 - 0xFFFFFF

You can now create a color from colorNum.

let red = colorNum >> 16
let green = (colorNum & 0x00FF00) >> 8
let blue = (colorNum & 0x0000FF)
let userColor = UIColor(red: CGFloat(red)/255.0, green: CGFloat(green)/255.0, blue: CGFloat(blue)/255.0, alpha: 1.0)

You will want to store this color in the user's profile since the hashValue isn't guaranteed to be the same each time your app runs.

How to use hex color values

#ffffff are actually 3 color components in hexadecimal notation - red ff, green ff and blue ff. You can write hexadecimal notation in Swift using 0x prefix, e.g 0xFF

To simplify the conversion, let's create an initializer that takes integer (0 - 255) values:

extension UIColor {
convenience init(red: Int, green: Int, blue: Int) {
assert(red >= 0 && red <= 255, "Invalid red component")
assert(green >= 0 && green <= 255, "Invalid green component")
assert(blue >= 0 && blue <= 255, "Invalid blue component")

self.init(red: CGFloat(red) / 255.0, green: CGFloat(green) / 255.0, blue: CGFloat(blue) / 255.0, alpha: 1.0)
}

convenience init(rgb: Int) {
self.init(
red: (rgb >> 16) & 0xFF,
green: (rgb >> 8) & 0xFF,
blue: rgb & 0xFF
)
}
}

Usage:

let color = UIColor(red: 0xFF, green: 0xFF, blue: 0xFF)
let color2 = UIColor(rgb: 0xFFFFFF)

How to get alpha?

Depending on your use case, you can simply use the native UIColor.withAlphaComponent method, e.g.

let semitransparentBlack = UIColor(rgb: 0x000000).withAlphaComponent(0.5)

Or you can add an additional (optional) parameter to the above methods:

convenience init(red: Int, green: Int, blue: Int, a: CGFloat = 1.0) {
self.init(
red: CGFloat(red) / 255.0,
green: CGFloat(green) / 255.0,
blue: CGFloat(blue) / 255.0,
alpha: a
)
}

convenience init(rgb: Int, a: CGFloat = 1.0) {
self.init(
red: (rgb >> 16) & 0xFF,
green: (rgb >> 8) & 0xFF,
blue: rgb & 0xFF,
a: a
)
}

(we cannot name the parameter alpha because of a name collision with the existing initializer).

Called as:

let color = UIColor(red: 0xFF, green: 0xFF, blue: 0xFF, a: 0.5)
let color2 = UIColor(rgb: 0xFFFFFF, a: 0.5)

To get the alpha as an integer 0-255, we can

convenience init(red: Int, green: Int, blue: Int, a: Int = 0xFF) {
self.init(
red: CGFloat(red) / 255.0,
green: CGFloat(green) / 255.0,
blue: CGFloat(blue) / 255.0,
alpha: CGFloat(a) / 255.0
)
}

// let's suppose alpha is the first component (ARGB)
convenience init(argb: Int) {
self.init(
red: (argb >> 16) & 0xFF,
green: (argb >> 8) & 0xFF,
blue: argb & 0xFF,
a: (argb >> 24) & 0xFF
)
}

Called as

let color = UIColor(red: 0xFF, green: 0xFF, blue: 0xFF, a: 0xFF)
let color2 = UIColor(argb: 0xFFFFFFFF)

Or a combination of the previous methods. There is absolutely no need to use strings.

How to create a hex color string UIColor initializer in Swift?

Xcode 9 • Swift 4 or later

extension UIColor {
convenience init?(hexaRGB: String, alpha: CGFloat = 1) {
var chars = Array(hexaRGB.hasPrefix("#") ? hexaRGB.dropFirst() : hexaRGB[...])
switch chars.count {
case 3: chars = chars.flatMap { [$0, $0] }
case 6: break
default: return nil
}
self.init(red: .init(strtoul(String(chars[0...1]), nil, 16)) / 255,
green: .init(strtoul(String(chars[2...3]), nil, 16)) / 255,
blue: .init(strtoul(String(chars[4...5]), nil, 16)) / 255,
alpha: alpha)
}

convenience init?(hexaRGBA: String) {
var chars = Array(hexaRGBA.hasPrefix("#") ? hexaRGBA.dropFirst() : hexaRGBA[...])
switch chars.count {
case 3: chars = chars.flatMap { [$0, $0] }; fallthrough
case 6: chars.append(contentsOf: ["F","F"])
case 8: break
default: return nil
}
self.init(red: .init(strtoul(String(chars[0...1]), nil, 16)) / 255,
green: .init(strtoul(String(chars[2...3]), nil, 16)) / 255,
blue: .init(strtoul(String(chars[4...5]), nil, 16)) / 255,
alpha: .init(strtoul(String(chars[6...7]), nil, 16)) / 255)
}

convenience init?(hexaARGB: String) {
var chars = Array(hexaARGB.hasPrefix("#") ? hexaARGB.dropFirst() : hexaARGB[...])
switch chars.count {
case 3: chars = chars.flatMap { [$0, $0] }; fallthrough
case 6: chars.append(contentsOf: ["F","F"])
case 8: break
default: return nil
}
self.init(red: .init(strtoul(String(chars[2...3]), nil, 16)) / 255,
green: .init(strtoul(String(chars[4...5]), nil, 16)) / 255,
blue: .init(strtoul(String(chars[6...7]), nil, 16)) / 255,
alpha: .init(strtoul(String(chars[0...1]), nil, 16)) / 255)
}
}

if let textColor = UIColor(hexa: "00F") {
print(textColor) // r 0.0 g 0.0 b 1.0 a 1.0
}

if let textColor = UIColor(hexaRGB: "00F") {
print(textColor) // r 0.0 g 0.0 b 1.0 a 1.0
}

UIColor(hexaRGB: "#00F") // r 0.0 g 0.0 b 1.0 a 1.0
UIColor(hexaRGB: "#00F", alpha: 0.5) // r 0.0 g 0.0 b 1.0 a 0.5

UIColor(hexaRGB: "#0000FF") // r 0.0 g 0.0 b 1.0 a 1.0
UIColor(hexaRGB: "#0000FF", alpha: 0.5) // r 0.0 g 0.0 b 1.0 a 0.5

UIColor(hexaRGBA: "#0000FFFF") // r 0.0 g 0.0 b 1.0 a 1.0
UIColor(hexaRGBA: "#0000FF7F") // r 0.0 g 0.0 b 1.0 a 0.498

UIColor(hexaARGB: "#FF0000FF") // r 0.0 g 0.0 b 1.0 a 1.0
UIColor(hexaARGB: "#7F0000FF") // r 0.0 g 0.0 b 1.0 a 0.498

Cant convert string hex into hexint and then into UIColor

You have to remove the 0x prefix and then specify the radix 16:

let s = "0xe7c79d"
print(Int(s)) // nil

let value = s.hasPrefix("0x")
? String(s.dropFirst(2))
: s
print(Int(value, radix: 16)) // 15189917

Use Hex color in SwiftUI

You're almost there, you were using the wrong initialiser parameter:

extension Color {
init(hex: String) {
let hex = hex.trimmingCharacters(in: CharacterSet.alphanumerics.inverted)
var int: UInt64 = 0
Scanner(string: hex).scanHexInt64(&int)
let a, r, g, b: UInt64
switch hex.count {
case 3: // RGB (12-bit)
(a, r, g, b) = (255, (int >> 8) * 17, (int >> 4 & 0xF) * 17, (int & 0xF) * 17)
case 6: // RGB (24-bit)
(a, r, g, b) = (255, int >> 16, int >> 8 & 0xFF, int & 0xFF)
case 8: // ARGB (32-bit)
(a, r, g, b) = (int >> 24, int >> 16 & 0xFF, int >> 8 & 0xFF, int & 0xFF)
default:
(a, r, g, b) = (1, 1, 1, 0)
}

self.init(
.sRGB,
red: Double(r) / 255,
green: Double(g) / 255,
blue: Double(b) / 255,
opacity: Double(a) / 255
)
}
}

How to convert RGB values to HEX string iOS swift

let rgbRedValue = 200
let rgbGreenValue = 13
let rgbBlueValue = 45

let hexValue = String(format:"%02X", Int(rgbRedValue)) + String(format:"%02X", Int(rgbGreenValue)) + String(format:"%02X", Int(rgbBlueValue))

Another workaround could be to convert the RGB to UIColor and get the HEX string from UIColor.



Related Topics



Leave a reply



Submit