How to Convert Array of Bytes [Uint8] into Hexa String in Swift

How to convert hexadecimal string to an array of UInt8 bytes in Swift?

You can convert your hexa String back to array of [UInt8] iterating every two hexa characters and initialize an UInt8 using its string radix initializer. The following implementation assumes the hexa string is well formed:


Edit/update: Xcode 11 • Swift 5.1

extension StringProtocol {
var hexaData: Data { .init(hexa) }
var hexaBytes: [UInt8] { .init(hexa) }
private var hexa: UnfoldSequence<UInt8, Index> {
sequence(state: startIndex) { startIndex in
guard startIndex < self.endIndex else { return nil }
let endIndex = self.index(startIndex, offsetBy: 2, limitedBy: self.endIndex) ?? self.endIndex
defer { startIndex = endIndex }
return UInt8(self[startIndex..<endIndex], radix: 16)
}
}
}


let string = "e0696349774606f1b5602ffa6c2d953f"
let data = string.hexaData // 16 bytes
let bytes = string.hexaBytes // [224, 105, 99, 73, 119, 70, 6, 241, 181, 96, 47, 250, 108, 45, 149, 63]

If you would like to handle malformed hexa strings as well you can make it a throwing method:

extension String {
enum DecodingError: Error {
case invalidHexaCharacter(Character), oddNumberOfCharacters
}
}


extension Collection {
func unfoldSubSequences(limitedTo maxLength: Int) -> UnfoldSequence<SubSequence,Index> {
sequence(state: startIndex) { lowerBound in
guard lowerBound < endIndex else { return nil }
let upperBound = index(lowerBound,
offsetBy: maxLength,
limitedBy: endIndex
) ?? endIndex
defer { lowerBound = upperBound }
return self[lowerBound..<upperBound]
}
}
}


extension StringProtocol {
func hexa<D>() throws -> D where D: DataProtocol & RangeReplaceableCollection {
try .init(self)
}
}


extension DataProtocol where Self: RangeReplaceableCollection {
init<S: StringProtocol>(_ hexa: S) throws {
guard hexa.count.isMultiple(of: 2) else {
throw String.DecodingError.oddNumberOfCharacters
}
self = .init()
reserveCapacity(hexa.utf8.count/2)
for pair in hexa.unfoldSubSequences(limitedTo: 2) {
guard let byte = UInt8(pair, radix: 16) else {
for character in pair where !character.isHexDigit {
throw String.DecodingError.invalidHexaCharacter(character)
}
continue
}
append(byte)
}
}
}

Usage:

let hexaString = "e0696349774606f1b5602ffa6c2d953f"
do {
let bytes: [UInt8] = try hexaString.hexa()
print(bytes)
let data: Data = try hexaString.hexa()
print(data)
} catch {
print(error)
}

This will print

[224, 105, 99, 73, 119, 70, 6, 241, 181, 96, 47, 250, 108, 45, 149, 63]

16 bytes

How to convert Data to hex string in swift

A simple implementation (taken from How to hash NSString with SHA1 in Swift?, with an additional option for uppercase output) would be

extension Data {
struct HexEncodingOptions: OptionSet {
let rawValue: Int
static let upperCase = HexEncodingOptions(rawValue: 1 << 0)
}

func hexEncodedString(options: HexEncodingOptions = []) -> String {
let format = options.contains(.upperCase) ? "%02hhX" : "%02hhx"
return self.map { String(format: format, $0) }.joined()
}
}

I chose a hexEncodedString(options:) method in the style of the existing method base64EncodedString(options:).

Data conforms to the Collection protocol, therefore one can use
map() to map each byte to the corresponding hex string.
The %02x format prints the argument in base 16, filled up to two digits
with a leading zero if necessary. The hh modifier causes the argument
(which is passed as an integer on the stack) to be treated as a one byte
quantity. One could omit the modifier here because $0 is an unsigned
number (UInt8) and no sign-extension will occur, but it does no harm leaving
it in.

The result is then joined to a single string.

Example:

let data = Data([0, 1, 127, 128, 255])
// For Swift < 4.2 use:
// let data = Data(bytes: [0, 1, 127, 128, 255])
print(data.hexEncodedString()) // 00017f80ff
print(data.hexEncodedString(options: .upperCase)) // 00017F80FF

The following implementation is faster by a factor about 50
(tested with 1000 random bytes). It is inspired to
RenniePet's solution
and Nick Moore's solution, but takes advantage of
String(unsafeUninitializedCapacity:initializingUTF8With:)
which was introduced with Swift 5.3/Xcode 12 and is available on macOS 11 and iOS 14 or newer.

This method allows to create a Swift string from UTF-8 units efficiently, without unnecessary copying or reallocations.

An alternative implementation for older macOS/iOS versions is also provided.

extension Data {
struct HexEncodingOptions: OptionSet {
let rawValue: Int
static let upperCase = HexEncodingOptions(rawValue: 1 << 0)
}

func hexEncodedString(options: HexEncodingOptions = []) -> String {
let hexDigits = options.contains(.upperCase) ? "0123456789ABCDEF" : "0123456789abcdef"
if #available(macOS 11.0, iOS 14.0, watchOS 7.0, tvOS 14.0, *) {
let utf8Digits = Array(hexDigits.utf8)
return String(unsafeUninitializedCapacity: 2 * self.count) { (ptr) -> Int in
var p = ptr.baseAddress!
for byte in self {
p[0] = utf8Digits[Int(byte / 16)]
p[1] = utf8Digits[Int(byte % 16)]
p += 2
}
return 2 * self.count
}
} else {
let utf16Digits = Array(hexDigits.utf16)
var chars: [unichar] = []
chars.reserveCapacity(2 * self.count)
for byte in self {
chars.append(utf16Digits[Int(byte / 16)])
chars.append(utf16Digits[Int(byte % 16)])
}
return String(utf16CodeUnits: chars, count: chars.count)
}
}
}

Int8 array to signed hex string doesn't work when first byte is negative

I finally wrote my own solution, iterating the bytes and combining the string. Tested with different arrays, and works for both positive and negative hex.

extension Data {

func toSignedHexString() -> String {
// Create an empty string
var result = ""
var first: Int8 = 0

// Iterate bytes
var bytes = map { byte in
// Convert to Int8
return Int8(bitPattern: byte)
}
while !bytes.isEmpty {
// Get and remove the first byte
let byte = bytes.removeFirst()

// Check if this byte is the first byte
if result.isEmpty && first == 0 {
// Save the first byte
first = byte
} else if result.isEmpty && first != 0 {
// Convert two first bytes to hex
result.append(String(Int32(first + 1) * 256 + Int32(byte) + (first < 0 ? 1 : 0), radix: 16, uppercase: false))
} else {
// Convert it to hex
result.append(String(format: "%02hhx", first < 0 ? (Int32(bytes.isEmpty ? 256 : 255) - Int32(byte)) % 256 : byte))
}
}

// Return the final result
return result
}

}

Test code:

let bytes = Data([-100, -21, -46, 47, -99, 39, 67, 53, 62, -2, -23, 104, -15, 117, -9, 40, -31, 70, 4, 28].map({ UInt8(bitPattern: $0) }))
print(bytes.toSignedHexString() == "-63142dd062d8bccac10116970e8a08d71eb9fbe4")
// true

let bytes2 = Data([112, -84, -89, 120, -123, 118, -50, -7, -115, -97, -127, 41, -71, 52, -4, 105, -5, -80, 115, 86].map({ UInt8(bitPattern: $0) }))
print(bytes2.toSignedHexString() == "70aca7788576cef98d9f8129b934fc69fbb07356")
// true

How to convert UInt8 byte array to string in Swift

Update for Swift 3/Xcode 8:

String from bytes: [UInt8]:

if let string = String(bytes: bytes, encoding: .utf8) {
print(string)
} else {
print("not a valid UTF-8 sequence")
}

String from data: Data:

let data: Data = ...
if let string = String(data: data, encoding: .utf8) {
print(string)
} else {
print("not a valid UTF-8 sequence")
}

Update for Swift 2/Xcode 7:

String from bytes: [UInt8]:

if let string = String(bytes: bytes, encoding: NSUTF8StringEncoding) {
print(string)
} else {
print("not a valid UTF-8 sequence")
}

String from data: NSData:

let data: NSData = ...
if let str = String(data: data, encoding: NSUTF8StringEncoding) {
print(str)
} else {
print("not a valid UTF-8 sequence")
}

Previous answer:

String does not have a stringWithBytes() method.
NSString has a

 NSString(bytes: , length: , encoding: )

method which you could use, but you can create the string directly from NSData, without the need for an UInt8 array:

if let str = NSString(data: data, encoding: NSUTF8StringEncoding) as? String {
println(str)
} else {
println("not a valid UTF-8 sequence")
}

How do i convert HexString To ByteArray in Swift 3

This code can generate the same output as your swift 2 code.

func stringToBytes(_ string: String) -> [UInt8]? {
let length = string.characters.count
if length & 1 != 0 {
return nil
}
var bytes = [UInt8]()
bytes.reserveCapacity(length/2)
var index = string.startIndex
for _ in 0..<length/2 {
let nextIndex = string.index(index, offsetBy: 2)
if let b = UInt8(string[index..<nextIndex], radix: 16) {
bytes.append(b)
} else {
return nil
}
index = nextIndex
}
return bytes
}

let bytes = stringToBytes("7661706f72")
print(String(bytes: bytes!, encoding: .utf8)) //->Optional("vapor")

Splitting a UInt16 to 2 UInt8 bytes and getting the hexa string of both. Swift

From your test cases, it seems like your values are 7 bits per byte.

You want 8192 to convert to 4000.
You want 16383 to convert to 7F7F.

Note that:

(0x7f << 7) + 0x7f == 16383

Given that:

let a = UInt8((int16 >> 7) & 0x7f)
let b = UInt8(int16 & 0x7f)
let result = String(format: "%02X%02X", a , b)

This gives:

"4000" for 8128
"7F7F" for 16383


To reverse the process:

let str = "7F7F"
let value = Int(str, radix: 16)!
let result = ((value >> 8) & 0x7f) << 7 + (value & 0x7f)

print(result) // 16383

Swift Hex string Decoding

The Java byte primitive type

is an 8-bit signed two's complement integer. It has a minimum value of -128 and a maximum value of 127 (inclusive).

(Java, in general, does not have unsigned primitive types, only signed ones.)

In your Java output, the -25 value corresponds to hex E7 in your string, whose decimal value is E * 16 + 7 = 14 * 16 + 7 = 231; 231 is outside of the [-128, 127] range, and wraps around to -25. (More precisely the bit pattern of unsigned 8-bit 231 corresponds to the bit pattern of signed 8-bit -25 in two's-complement.)

In Swift, you're using a UInt8 to represent results (both explicitly, and implicitly in a Data value), and the range of UInt8 is [0, 255]; 231 fits within this range, and is what you see in your first Swift code snippet. The results are bitwise equivalent, but if you need results which are type equivalent to what you're seeing in Java, you'll need to work in terms of Int8 instead of UInt8.

Swift: Create arbitrary base string from byte array

I had already written this for base 10, so here is a general version for any base from 2...36:

func bytesToRadix<C: RangeReplaceableCollection>(_ bytes: C, radix: Int, isUppercase: Bool = false, isBigEndian: Bool = true) -> String where C.Element == UInt8 {

// Nothing to process or radix outside of 2...36, return an empty string.
guard !bytes.isEmpty, 2...36 ~= radix else { return "" }

let bytes = isBigEndian ? bytes : C(bytes.reversed())

// For efficiency in calculation, combine 7 bytes into one Int.
let chunk = 7
let numvalues = bytes.count
var ints = Array(repeating: 0, count: (numvalues + chunk - 1)/chunk)
var rem = numvalues % chunk == 0 ? chunk : numvalues % chunk
var index = 0
var accum = 0

for value in bytes {
accum = (accum << 8) + Int(value)
rem -= 1
if rem == 0 {
rem = chunk
ints[index] = accum
index += 1
accum = 0
}
}

// Array to hold the result, in reverse order
var digits = [Int]()

// Repeatedly divide value by radix, accumulating the remainders.
// Repeat until original number is zero
while !ints.isEmpty {
var carry = 0
for (index, value) in ints.enumerated() {
var total = (carry << (8 * chunk)) + value
carry = total % radix
total /= radix
ints[index] = total
}

digits.append(carry)

// Remove leading Ints that have become zero.
ints = .init(ints.drop { $0 == 0 })
}

// Create mapping of digit Int to String
let letterOffset = Int(UnicodeScalar(isUppercase ? "A" : "a").value - 10)
let letters = (0 ..< radix).map { d in d < 10 ? "\(d)" : String(UnicodeScalar(letterOffset + d)!) }

// Reverse the digits array, convert them to String, and join them
return digits.reversed().map { letters[$0] }.joined()
}

Examples:

let face: [UInt8] = [0xFA, 0xCE]

print(bytesToRadix(face, radix: 16)) // "face"
print(bytesToRadix(face, radix: 16, isUppercase: true)) // "FACE"
print(bytesToRadix(face, radix: 16, isBigEndian: false)) // "cefa""
print(bytesToRadix(face, radix: 16, isUppercase: true, isBigEndian: false)) // "CEFA"
print(bytesToRadix(face, radix: 10)) // "64206"
print(bytesToRadix(face, radix: 2)) // "111101011001110"
print(bytesToRadix(face, radix: 36)) // "1dji"

// also works with Data
let faceData = Data([0xFA, 0xCE])
print(bytesToRadix(face, radix: 16)) // "face"

Some edge cases:

print(bytesToRadix([9], radix: 16))   // "9"
print(bytesToRadix([10], radix: 16)) // "a"
print(bytesToRadix([15], radix: 16)) // "f"
print(bytesToRadix([16], radix: 16)) // "10"
print(bytesToRadix([35], radix: 36)) // "z"
print(bytesToRadix([36], radix: 36)) // "10"

Big test:

let bArray = (0...255).map(UInt8.init)
print(bytesToRadix(bArray, radix: 16, isBigEndian: false))

fffefdfcfbfaf9f8f7f6f5f4f3f2f1f0efeeedecebeae9e8e7e6e5e4e3e2e1e0dfdedddcdbdad9d8d7d6d5d4d3d2d1d0cfcecdcccbcac9c8c7c6c5c4c3c2c1c0bfbebdbcbbbab9b8b7b6b5b4b3b2b1b0afaeadacabaaa9a8a7a6a5a4a3a2a1a09f9e9d9c9b9a999897969594939291908f8e8d8c8b8a898887868584838281807f7e7d7c7b7a797877767574737271706f6e6d6c6b6a696867666564636261605f5e5d5c5b5a595857565554535251504f4e4d4c4b4a494847464544434241403f3e3d3c3b3a393837363534333231302f2e2d2c2b2a292827262524232221201f1e1d1c1b1a191817161514131211100f0e0d0c0b0a09080706050403020100

print(bytesToRadix(bArray, radix: 36))

168swoi6iuzj4fbwknlnh695zl88v65qcfgnwrwepqcxb9dysmluowqahvt3r9gsc1v47ssxdivjda3nttl6r044pzz7zwhtgu2mkow5ts28x2mbwenh3wfz4s1sarspfhlrakvqrgpmzb66sgtz2lzbotl7r28wcq8925c747b44l60vrk3scrin4zvnwn7pdsukgo6lgjhu1nuwj7yt1h9ujpe3os17onsk7sp4ysmytu568do2tqetwnrmbxb2dtd8kqorcoakaizlm9svr8axe1acxfursz11nubrhighfd64yhmp99ucvzr944n8co01o4x64cmbd8be0hqbm2zy5uwe4uplc4sa50xajel4bkkxb1kh21pisna37eqwpbpq11ypr


Test with your sample data:

let bArray: [UInt8] = [169, 74, 143, 229, 204, 177, 155, 166, 28, 76, 8, 115, 211, 145, 233, 135, 152, 47, 187, 211]

print(bytesToRadix(bArray, radix: 16))

a94a8fe5ccb19ba61c4c0873d391e987982fbbd3

print(bytesToRadix(bArray, radix: 36))

jrwjerxiekdtj9k82lg930wpkr6tq6r


Reverse function: radixToBytes

Here is a quick version of the reverse function. It doesn't yet have the ability to use uppercase digits or handle endian (big endian is assumed).

func radixToBytes(_ radixString: String, radix: Int) -> [UInt8] {

let digitMap: [Character : Int] = [
"0": 0, "1": 1, "2": 2, "3": 3, "4": 4, "5": 5,
"6": 6, "7": 7, "8": 8, "9": 9, "a": 10, "b": 11,
"c": 12, "d": 13, "e": 14, "f": 15, "g": 16, "h": 17,
"i": 18, "j": 19, "k": 20, "l": 21, "m": 22, "n": 23,
"o": 24, "p": 25, "q": 26, "r": 27, "s": 28, "t": 29,
"u": 30, "v": 31, "w": 32, "x": 33, "y": 34, "z": 35
]

// Convert input string into array of Int digits
let digits = Array(radixString).compactMap { digitMap[$0] }

// Nothing to process? Return an empty array.
guard digits.count > 0 else { return [] }

let numdigits = digits.count

// Array to hold the result, in reverse order
var bytes = [UInt8]()

// Convert array of digits into array of Int values each
// representing 6 digits of the original number. Six digits
// was chosen to work on 32-bit and 64-bit systems.
// Compute length of first number. It will be less than 6 if
// there isn't a multiple of 6 digits in the number.
let chunk = 6
var ints = Array(repeating: 0, count: (numdigits + chunk - 1)/chunk)
var rem = numdigits % chunk
if rem == 0 {
rem = chunk
}
var index = 0
var accum = 0
for digit in digits {
accum = accum * radix + digit
rem -= 1
if rem == 0 {
rem = chunk
ints[index] = accum
index += 1
accum = 0
}
}

// Repeatedly divide value by 256, accumulating the remainders.
// Repeat until original number is zero
var mult = 1
for _ in 1...chunk {
mult *= radix
}

while ints.count > 0 {
var carry = 0
for (index, value) in ints.enumerated() {
var total = carry * mult + value
carry = total % 256
total /= 256
ints[index] = total
}

bytes.append(UInt8(truncatingIfNeeded: carry))

// Remove leading Ints that have become zero
ints = .init(ints.drop { $0 == 0 })
}

// Reverse the array and return it
return bytes.reversed()
}


Related Topics



Leave a reply



Submit