How do i convert HexString To ByteArray in Swift 3
This code can generate the same output as your swift 2 code.
func stringToBytes(_ string: String) -> [UInt8]? {
let length = string.characters.count
if length & 1 != 0 {
return nil
}
var bytes = [UInt8]()
bytes.reserveCapacity(length/2)
var index = string.startIndex
for _ in 0..<length/2 {
let nextIndex = string.index(index, offsetBy: 2)
if let b = UInt8(string[index..<nextIndex], radix: 16) {
bytes.append(b)
} else {
return nil
}
index = nextIndex
}
return bytes
}
let bytes = stringToBytes("7661706f72")
print(String(bytes: bytes!, encoding: .utf8)) //->Optional("vapor")
How to convert hexadecimal string to an array of UInt8 bytes in Swift?
You can convert your hexa String
back to array of [UInt8]
iterating every two hexa characters and initialize an UInt8
using its string radix initializer. The following implementation assumes the hexa string is well formed:
Edit/update: Xcode 11 • Swift 5.1
extension StringProtocol {
var hexaData: Data { .init(hexa) }
var hexaBytes: [UInt8] { .init(hexa) }
private var hexa: UnfoldSequence<UInt8, Index> {
sequence(state: startIndex) { startIndex in
guard startIndex < self.endIndex else { return nil }
let endIndex = self.index(startIndex, offsetBy: 2, limitedBy: self.endIndex) ?? self.endIndex
defer { startIndex = endIndex }
return UInt8(self[startIndex..<endIndex], radix: 16)
}
}
}
let string = "e0696349774606f1b5602ffa6c2d953f"
let data = string.hexaData // 16 bytes
let bytes = string.hexaBytes // [224, 105, 99, 73, 119, 70, 6, 241, 181, 96, 47, 250, 108, 45, 149, 63]
If you would like to handle malformed hexa strings as well you can make it a throwing method:
extension String {
enum DecodingError: Error {
case invalidHexaCharacter(Character), oddNumberOfCharacters
}
}
extension Collection {
func unfoldSubSequences(limitedTo maxLength: Int) -> UnfoldSequence<SubSequence,Index> {
sequence(state: startIndex) { lowerBound in
guard lowerBound < endIndex else { return nil }
let upperBound = index(lowerBound,
offsetBy: maxLength,
limitedBy: endIndex
) ?? endIndex
defer { lowerBound = upperBound }
return self[lowerBound..<upperBound]
}
}
}
extension StringProtocol {
func hexa<D>() throws -> D where D: DataProtocol & RangeReplaceableCollection {
try .init(self)
}
}
extension DataProtocol where Self: RangeReplaceableCollection {
init<S: StringProtocol>(_ hexa: S) throws {
guard hexa.count.isMultiple(of: 2) else {
throw String.DecodingError.oddNumberOfCharacters
}
self = .init()
reserveCapacity(hexa.utf8.count/2)
for pair in hexa.unfoldSubSequences(limitedTo: 2) {
guard let byte = UInt8(pair, radix: 16) else {
for character in pair where !character.isHexDigit {
throw String.DecodingError.invalidHexaCharacter(character)
}
continue
}
append(byte)
}
}
}
Usage:
let hexaString = "e0696349774606f1b5602ffa6c2d953f"
do {
let bytes: [UInt8] = try hexaString.hexa()
print(bytes)
let data: Data = try hexaString.hexa()
print(data)
} catch {
print(error)
}
This will print
[224, 105, 99, 73, 119, 70, 6, 241, 181, 96, 47, 250, 108, 45, 149, 63]
16 bytes
Swift Hex to byte (iOS)
You can convert your hexa string back to array of UInt8 using this extension :
extension StringProtocol {
var hexa2Bytes: [UInt8] {
let hexa = Array(self)
return stride(from: 0, to: count, by: 2).compactMap { UInt8(String(hexa[$0..<$0.advanced(by: 2)]), radix: 16) }
}}
How to convert Data to hex string in swift
A simple implementation (taken from How to hash NSString with SHA1 in Swift?, with an additional option for uppercase output) would be
extension Data {
struct HexEncodingOptions: OptionSet {
let rawValue: Int
static let upperCase = HexEncodingOptions(rawValue: 1 << 0)
}
func hexEncodedString(options: HexEncodingOptions = []) -> String {
let format = options.contains(.upperCase) ? "%02hhX" : "%02hhx"
return self.map { String(format: format, $0) }.joined()
}
}
I chose a hexEncodedString(options:)
method in the style of the existing method base64EncodedString(options:)
.
Data
conforms to the Collection
protocol, therefore one can usemap()
to map each byte to the corresponding hex string.
The %02x
format prints the argument in base 16, filled up to two digits
with a leading zero if necessary. The hh
modifier causes the argument
(which is passed as an integer on the stack) to be treated as a one byte
quantity. One could omit the modifier here because $0
is an unsigned
number (UInt8
) and no sign-extension will occur, but it does no harm leaving
it in.
The result is then joined to a single string.
Example:
let data = Data([0, 1, 127, 128, 255])
// For Swift < 4.2 use:
// let data = Data(bytes: [0, 1, 127, 128, 255])
print(data.hexEncodedString()) // 00017f80ff
print(data.hexEncodedString(options: .upperCase)) // 00017F80FF
The following implementation is faster by a factor about 50
(tested with 1000 random bytes). It is inspired to
RenniePet's solution
and Nick Moore's solution, but takes advantage ofString(unsafeUninitializedCapacity:initializingUTF8With:)
which was introduced with Swift 5.3/Xcode 12 and is available on macOS 11 and iOS 14 or newer.
This method allows to create a Swift string from UTF-8 units efficiently, without unnecessary copying or reallocations.
An alternative implementation for older macOS/iOS versions is also provided.
extension Data {
struct HexEncodingOptions: OptionSet {
let rawValue: Int
static let upperCase = HexEncodingOptions(rawValue: 1 << 0)
}
func hexEncodedString(options: HexEncodingOptions = []) -> String {
let hexDigits = options.contains(.upperCase) ? "0123456789ABCDEF" : "0123456789abcdef"
if #available(macOS 11.0, iOS 14.0, watchOS 7.0, tvOS 14.0, *) {
let utf8Digits = Array(hexDigits.utf8)
return String(unsafeUninitializedCapacity: 2 * self.count) { (ptr) -> Int in
var p = ptr.baseAddress!
for byte in self {
p[0] = utf8Digits[Int(byte / 16)]
p[1] = utf8Digits[Int(byte % 16)]
p += 2
}
return 2 * self.count
}
} else {
let utf16Digits = Array(hexDigits.utf16)
var chars: [unichar] = []
chars.reserveCapacity(2 * self.count)
for byte in self {
chars.append(utf16Digits[Int(byte / 16)])
chars.append(utf16Digits[Int(byte % 16)])
}
return String(utf16CodeUnits: chars, count: chars.count)
}
}
}
how Hexadecimal string to Bytes
To answer your immediate question: UInt32(intValue)
creates a new (constant) value whose address cannot be taken with &
. So
var intValue: UInt
scanner.scanHexInt32(&UInt32(intValue))
should be
var intValue: UInt32 = 0
scanner.scanHexInt32(&intValue)
And
dataBytes.append(&intValue, count: 1)
does not compile because &intValue
is a pointer to an integer, not to an UInt8
. Here you can do
dataBytes.append(UInt8(intValue))
because the value is known to fit in a single byte.
Having said that, all the conversions from String
to NSString
are not needed. A more “Swifty” translation of that Objective-C code to Swift would be
func convertHex(toDataBytes hexStr: String) -> Data {
var dataBytes = Data()
var startPos = hexStr.startIndex
while let endPos = hexStr.index(startPos, offsetBy: 2, limitedBy: hexStr.endIndex) {
let singleHexStr = hexStr[startPos..<endPos]
let scanner = Scanner(string: String(singleHexStr))
var intValue: UInt32 = 0
scanner.scanHexInt32(&intValue)
dataBytes.append(UInt8(intValue))
startPos = endPos
}
return dataBytes
}
For an alternative approach (which includes error checking) see for example
hex/binary string conversion in Swift
Hex string to text conversion - swift 3
You probably can use something like this:
func hexToStr(text: String) -> String {
let regex = try! NSRegularExpression(pattern: "(0x)?([0-9A-Fa-f]{2})", options: .caseInsensitive)
let textNS = text as NSString
let matchesArray = regex.matches(in: textNS as String, options: [], range: NSMakeRange(0, textNS.length))
let characters = matchesArray.map {
Character(UnicodeScalar(UInt32(textNS.substring(with: $0.rangeAt(2)), radix: 16)!)!)
}
return String(characters)
}
Converting Hex String to NSData in Swift
This is my hex string to Data
routine:
extension String {
/// Create `Data` from hexadecimal string representation
///
/// This creates a `Data` object from hex string. Note, if the string has any spaces or non-hex characters (e.g. starts with '<' and with a '>'), those are ignored and only hex characters are processed.
///
/// - returns: Data represented by this hexadecimal string.
var hexadecimal: Data? {
var data = Data(capacity: count / 2)
let regex = try! NSRegularExpression(pattern: "[0-9a-f]{1,2}", options: .caseInsensitive)
regex.enumerateMatches(in: self, range: NSRange(startIndex..., in: self)) { match, _, _ in
let byteString = (self as NSString).substring(with: match!.range)
let num = UInt8(byteString, radix: 16)!
data.append(num)
}
guard data.count > 0 else { return nil }
return data
}
}
And for the sake of completeness, this is my Data
to hex string routine:
extension Data {
/// Hexadecimal string representation of `Data` object.
var hexadecimal: String {
return map { String(format: "%02x", $0) }
.joined()
}
}
Note, as shown in the above, I generally only convert between hexadecimal representations and NSData
instances (because if the information could have been represented as a string you probably wouldn't have created a hexadecimal representation in the first place). But your original question wanted to convert between hexadecimal representations and String
objects, and that might look like so:
extension String {
/// Create `String` representation of `Data` created from hexadecimal string representation
///
/// This takes a hexadecimal representation and creates a String object from that. Note, if the string has any spaces, those are removed. Also if the string started with a `<` or ended with a `>`, those are removed, too.
///
/// For example,
///
/// String(hexadecimal: "<666f6f>")
///
/// is
///
/// Optional("foo")
///
/// - returns: `String` represented by this hexadecimal string.
init?(hexadecimal string: String, encoding: String.Encoding = .utf8) {
guard let data = string.hexadecimal() else {
return nil
}
self.init(data: data, encoding: encoding)
}
/// Create hexadecimal string representation of `String` object.
///
/// For example,
///
/// "foo".hexadecimalString()
///
/// is
///
/// Optional("666f6f")
///
/// - parameter encoding: The `String.Encoding` that indicates how the string should be converted to `Data` before performing the hexadecimal conversion.
///
/// - returns: `String` representation of this String object.
func hexadecimalString(encoding: String.Encoding = .utf8) -> String? {
return data(using: encoding)?
.hexadecimal
}
}
You could then use the above like so:
let hexString = "68656c6c 6f2c2077 6f726c64"
print(String(hexadecimal: hexString))
Or,
let originalString = "hello, world"
print(originalString.hexadecimalString())
For permutations of the above for earlier Swift versions, see the revision history of this question.
Swift convert HexString to Integer
Hope this will help you
let hexaString = "72AE"
var byteArray = [UInt8]()
byteArray += hexaString.utf8 // Convert into byte array
// Retain the orginal string from byte array
let stringFromByteArray = NSString(bytes: byteArray, length: byteArray.count, encoding: NSUTF8StringEncoding)
Related Topics
How to Use Trailing Closure in If Condition
Swiftui Coordinator Not Updating the Containing View's Property
Turn for in Loops Local Variables into Mutable Variables
What Are the Advantages Swift Deprecates C-Style for Statement
Incorrect String to Date Conversion Swift 3.0
Type of Expression Is Ambiguous Without More Context Swift
How to Change the Z Index or Stack Order of Uiview
How to Destroy a Singleton in Swift
How to Listen to Global Hotkeys With Swift in a Macos App
How to Create Instance Variable and Class Variable of the Same Name
Flip Arfaceanchor from Left-Handed to Right-Handed Coordinate System
Enumerate Is Unavailable Call the Enumerate Method on the Sequence
Hot to Decode JSON Data That Could and Array or a Single Element in Swift
Many Ways of Defining a Swift Dictionary