Hex: Difference between revisions
Jump to navigation
Jump to search
(→Python) |
|||
(13 intermediate revisions by the same user not shown) | |||
Line 49: | Line 49: | ||
== C / C ++ == |
== C / C ++ == |
||
=== Convert an integer to hex string === |
=== Convert an integer to hex string === |
||
==== C++ ==== |
|||
This looks simple but is in fact extremelly '''treacherous''' because of persistent iomanip flags: |
This looks simple but is in fact extremelly '''treacherous''' because of persistent iomanip flags: |
||
<source lang="c++"> |
<source lang="c++"> |
||
Line 58: | Line 59: | ||
# 'internal' to make sure we use correct alignment (because 'left' is persistent) |
# 'internal' to make sure we use correct alignment (because 'left' is persistent) |
||
cout << "0x" << setfill('0') << setw(2) << internal << hex << 10 << endl; |
cout << "0x" << setfill('0') << setw(2) << internal << hex << 10 << endl; |
||
</source> |
|||
==== C char2hex==== |
|||
A complete C solution, <code>char2hex</code>: |
|||
<source lang="c"> |
|||
// ############################################################################################################ |
|||
static uint32_t _char2hex(char *s, const uint8_t data[], size_t data_n, size_t n) |
|||
{ |
|||
uint32_t o = 0; |
|||
#define out(f, a ...) { \ |
|||
o += (uint32_t)snprintf(s + o, n - (uint32_t)o, f, ## a); \ |
|||
o = o <= n ? o : n; \ |
|||
} |
|||
if (n > 0) { |
|||
s[0] = 0; |
|||
} |
|||
for (size_t i = 0; i < data_n; i++) { |
|||
out("%.2x", data[i]); |
|||
} |
|||
return o; |
|||
#undef out |
|||
} |
|||
/** |
|||
* Convert an array of char into a list of hexadecimal values. |
|||
* For instance, "\xca\xfe" is converted into "cafe". |
|||
*/ |
|||
char * char2hex(char *s, const uint8_t data[], size_t data_n, size_t n) |
|||
{ |
|||
uint32_t o = _char2hex(s, data, data_n, n); |
|||
assert(o < n); // Check if given buffer was big enough |
|||
return s; |
|||
} |
|||
</source> |
</source> |
||
Line 270: | Line 308: | ||
</source> |
</source> |
||
==== |
==== C - hex2char ==== |
||
<source lang="c"> |
|||
From [https://stackoverflow.com/questions/10324/convert-a-hexadecimal-string-to-an-integer-efficiently-in-c SO]: |
|||
// ############################################################################################################ |
|||
static uint8_t _htoi(char hex) |
|||
{ |
|||
return ((uint8_t)hex >> 6) + (((uint8_t)hex >> 6) << 3) + ((uint8_t)hex & 0x0F); |
|||
} |
|||
// ############################################################################################################ |
|||
/** Convert an hex null-terminated string ("cafe") into a byte array ({0xca, 0xfe}). |
|||
* |
|||
* Convert at most `n` bytes from an hexadecimal null-terminated string into a byte array. |
|||
* For instance, it converts "cafe" to {0xca, 0xfe}. |
|||
* |
|||
* @param[out] dst pointer to destination buffer (at least #n-byte long). |
|||
* @param[in] src pointer to source string. |
|||
* @param[in] n maximum number of bytes to write in #dst. |
|||
* |
|||
* @return Number of bytes converted. |
|||
*/ |
|||
uint32_t hex2char(void *dst, const char *src, uint32_t n) |
|||
{ |
|||
uint8_t * p; |
|||
uint8_t * _dst = dst; |
|||
const char *_src = src; |
|||
for (p = _dst; (p < _dst + n) && _src[0] && _src[1]; p++, _src += 2) { |
|||
*p = (_htoi(_src[0]) << 4) + _htoi(_src[1]); |
|||
} |
|||
return p - _dst; |
|||
} |
|||
</source> |
|||
==== C - hex2char - fastest and lowercase/uppercase ==== |
|||
Inspired from [https://stackoverflow.com/questions/10324/convert-a-hexadecimal-string-to-an-integer-efficiently-in-c SO]: |
|||
<source lang="c"> |
<source lang="c"> |
||
// ############################################################################################################ |
|||
static const long hextable[] = { |
|||
// 74ns on i686 (32-bit), vs 150ns for version above |
|||
[0 ... 255] = -1, // bit aligned access into this table is considerably |
|||
uint32_t hex2char(void *dst, const char *src, uint32_t n) |
|||
['0'] = 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, // faster for most modern processors, |
|||
{ |
|||
['A'] = 10, 11, 12, 13, 14, 15, // for the space conscious, reduce to |
|||
uint8_t * p; |
|||
['a'] = 10, 11, 12, 13, 14, 15 // signed char. |
|||
uint8_t * _dst = dst; |
|||
const char *_src = src; |
|||
#pragma clang diagnostic push |
|||
#pragma clang diagnostic ignored "-Winitializer-overrides" |
|||
static const long hextable[] = { // Reduce to signed char for space saving |
|||
[0 ... 255] = -1, |
|||
['0'] = 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, |
|||
['A'] = 10, 11, 12, 13, 14, 15, |
|||
['a'] = 10, 11, 12, 13, 14, 15, |
|||
}; |
}; |
||
#pragma clang diagnostic pop |
|||
for (p = _dst; (p < _dst + n) && _src[0] && _src[1]; p++) { |
|||
/** |
|||
*p = hextable[*_src++]; |
|||
* @brief convert a hexidecimal string to a signed long |
|||
*p = (*p << 4) | hextable[*_src++]; |
|||
* will not produce or process negative numbers except |
|||
} |
|||
* to signal error. |
|||
* |
|||
return p - _dst; |
|||
* @param hex without decoration, case insensitive. |
|||
* |
|||
* @return -1 on error, or result (max (sizeof(long)*8)-1 bits) |
|||
*/ |
|||
long hexdec(unsigned const char *hex) { |
|||
long ret = 0; |
|||
while (*hex && ret >= 0) { |
|||
ret = (ret << 4) | hextable[*hex++]; |
|||
} |
|||
return ret; |
|||
} |
} |
||
</source> |
</source> |
||
Line 302: | Line 377: | ||
;Integer |
;Integer |
||
* References [https://stackoverflow.com/questions/21017698/converting-int-to-bytes-in-python-3] |
|||
<source lang=python> |
<source lang=python> |
||
'4d2' == '%x' % 1234 # to hex |
'4d2' == '%x' % 1234 # to hex |
||
'0x4d2' == hex(1234) # to hex (with '0x' prefix) |
'0x4d2' == hex(1234) # to hex (with '0x' prefix) |
||
'0x4d2' == '0x%x' % 1234 |
'0x4d2' == '0x%x' % 1234 |
||
'000004d2' == '%08x' % 1234 # to hex (padded) |
'000004d2' == '%08x' % 1234 # to hex (padded) |
||
'000004D2' == '%08X' % 1234 # to hex (padded,caps) |
'000004D2' == '%08X' % 1234 # to hex (padded,caps) |
||
'\x41' == chr(0x41) # to hex char |
'\x41' == chr(0x41) # to hex char |
||
b'\x04\xd2'== (1234).to_bytes(2,'big') # to bytes |
|||
b'\xd2\x04'== (1234).to_bytes(2,'little') # to bytes (little endian) |
|||
'04d2' == (1234).to_bytes(2,'big').hex() # to hex |
|||
' |
'd204' == (1234).to_bytes(2,'little').hex() # to hex (little endian) |
||
# Use ((1234).bit_length() + 7)//8 to get length |
|||
'0x1234' == '0x%d' % 0x04d2 # from hex (with '0x' prefix) |
|||
'0000010011010010' == f"{1234:016b}" # to bin |
|||
'10011010010' == f"{1234:11b}" # to bin |
|||
'10011010010' == bin(1234)[2:] # to bin |
|||
'10011010010' == bin(1234).removeprefix('0b) # to bin |
|||
'0b10011010010' == bin(1234) # to bin |
|||
1234 == int('04d2',16) # from hex |
|||
1234 == int('0x04d2',0) # from hex - base 0, autodetect |
|||
1234 == 0x04d2 # from hex |
|||
1234 == int.from_bytes(b'\x04\xd2','big') # from hex |
|||
1234 == int.from_bytes(b'\xd2\x04','little') # from hex (little endian) |
|||
22 == int('10110',2) # from binary |
|||
'1234' == '%d' % 0x04d2 # from hex |
|||
'0x1234' == '0x%d' % 0x04d2 # from hex (with '0x' prefix) |
|||
# Some functions |
|||
def int_to_bytes(x: int) -> bytes: |
|||
return x.to_bytes((x.bit_length() + 7) // 8, 'big') |
|||
def int_from_bytes(xbytes: bytes) -> int: |
|||
return int.from_bytes(xbytes, 'big') |
|||
</source> |
</source> |
||
Line 322: | Line 420: | ||
import binascii |
import binascii |
||
'deadbeef' == |
'deadbeef' == '\xde\xad\xbe\xef'.encode("hex") |
||
'deadbeef' == binascii.b2a_hex('\xde\xad\xbe\xef') |
'deadbeef' == binascii.b2a_hex('\xde\xad\xbe\xef') |
||
'deadbeef' == binascii.hexlify('\xde\xad\xbe\xef') |
'deadbeef' == binascii.hexlify('\xde\xad\xbe\xef') |
||
Line 346: | Line 444: | ||
import binascii |
import binascii |
||
'deadbeef' == (b'\xde\xad\xbe\xef').hex() |
|||
'deadbeef' == binascii.b2a_hex(b'\xde\xad\xbe\xef').decode() |
'deadbeef' == binascii.b2a_hex(b'\xde\xad\xbe\xef').decode() |
||
'deadbeef' == binascii.hexlify(b'\xde\xad\xbe\xef').decode() |
'deadbeef' == binascii.hexlify(b'\xde\xad\xbe\xef').decode() |
||
'deadbeef' == binascii.b2a_hex(bytes([0xde,0xad,0xbe,0xef])).decode() |
'deadbeef' == binascii.b2a_hex(bytes([0xde,0xad,0xbe,0xef])).decode() |
||
'deadbeef' == binascii.hexlify(bytes([0xde,0xad,0xbe,0xef])).decode() |
'deadbeef' == binascii.hexlify(bytes([0xde,0xad,0xbe,0xef])).decode() |
||
'10110' == bytes([x + 48 for x in [1?0,1,1,0]]).decode() |
|||
# These are not recommended, since they produces bytes(), which could be seen as array of actual bytes |
|||
b'deadbeef' == binascii.b2a_hex(b'\xde\xad\xbe\xef') |
b'deadbeef' == binascii.b2a_hex(b'\xde\xad\xbe\xef') |
||
b'deadbeef' == binascii.hexlify(b'\xde\xad\xbe\xef') |
b'deadbeef' == binascii.hexlify(b'\xde\xad\xbe\xef') |
||
b'deadbeef' == binascii.b2a_hex(bytes([0xde,0xad,0xbe,0xef])) |
b'deadbeef' == binascii.b2a_hex(bytes([0xde,0xad,0xbe,0xef])) |
||
b'deadbeef' == binascii.hexlify(bytes([0xde,0xad,0xbe,0xef])) |
b'deadbeef' == binascii.hexlify(bytes([0xde,0xad,0xbe,0xef])) |
||
b'10110' == bytes([x + 48 for x in [1?0,1,1,0]]) |
|||
b'\xde\xad\xbe\xef' == bytes.fromhex('deadbeef') # IGNORE SPACES, IF ANY |
|||
b'\xde\xad\xbe\xef' == binascii.a2b_hex('deadbeef') |
b'\xde\xad\xbe\xef' == binascii.a2b_hex('deadbeef') |
||
b'\xde\xad\xbe\xef' == binascii.unhexlify('deadbeef') |
b'\xde\xad\xbe\xef' == binascii.unhexlify('deadbeef') |
||
b'\xde\xad\xbe\xef' == bytes.fromhex('deadbeef') # IGNORE SPACES, IF ANY |
|||
b'\xde\xad\xbe\xef' == binascii.a2b_hex(b'deadbeef') |
b'\xde\xad\xbe\xef' == binascii.a2b_hex(b'deadbeef') |
||
b'\xde\xad\xbe\xef' == binascii.unhexlify(b'deadbeef') |
b'\xde\xad\xbe\xef' == binascii.unhexlify(b'deadbeef') |
||
b'\xde\xad\xbe\xef' == bytes([0xde,0xad,0xbe,0xef]) |
b'\xde\xad\xbe\xef' == bytes([0xde,0xad,0xbe,0xef]) |
||
b'\x01\x00\x01\x01\x00' == bytes([1,0,1,1,0]) |
|||
from bitstring import BitArray |
|||
# BitArray is an army swisknife for bytes <-> bin <-> hex <-> (u)int conversion |
|||
# Conversion to any format: |
|||
'aaaa' == BitArray(hex="aaaa").hex |
|||
b'\xaa\xaa' == BitArray(hex="aaaa").bytes |
|||
-21846 == BitArray(hex="aaaa").int |
|||
43690 == BitArray(hex="aaaa").uint |
|||
'1010101010101010' == BitArray(hex="aaaa").bin |
|||
# Conversion from any format: |
|||
'aaaa' == BitArray(hex="aaaa").hex |
|||
'aaaa' == BitArray(bytes=b'\xaa\xaa').hex |
|||
'aaaa' == BitArray(int=-21846,length=16).hex |
|||
'aaaa' == BitArray(uint=43690,length=16).hex |
|||
'aaaa' == BitArray(bin='1010101010101010').hex |
|||
[0xde,0xad,0xbe,0xef] == list(b'\xde\xad\xbe\xef') |
[0xde,0xad,0xbe,0xef] == list(b'\xde\xad\xbe\xef') |
Latest revision as of 22:51, 15 July 2024
References
Many examples come from cplusplus.com and stackoverflow.com.
Shell
See also Linux Commands.
Convert 66 6f 6f → "foo"
hexdump -e '"%2x"' <myfile> # Convert myfile into a long hexadecimal string - ! See DOUBLE-QUOTED parameter
hexdump -C <myfile> # Canonical hex + ascii display (16 bytes)
hd <myfile> # (idem)
echo 202122 | xxd -r -p # Convert hexdump to a binary string
echo -e '\x66\x6f\x6f' # 'foo'
or to convert hex to decimal [1]
echo $((0x15a))
printf '%d\n' 0x15a
perl -e 'printf ("%d\n", 0x15a)'
echo 'ibase=16;obase=A;15A' | bc
Convert "foo" → 66 6f 6f
xxd -g8 -c32 <file> # Output 32 bytes, grouped in 8-byte columns
xxd -p -c64 <file> # Output 64 bytes per line, plain (postscript) mode
echo -n ' !"' | xxd -p # Convert binary string to hexdump - DON'T FORGET -n
Or to convert decimal to hex
printf '%x\n' 346
perl -e 'printf ("%x\n", 346)'
echo 'ibase=10;obase=16;346' | bc
hex editors
hex diffs
diff -u <(hexdump -v -C file1) <(hexdump -v -C file2)
C / C ++
Convert an integer to hex string
C++
This looks simple but is in fact extremelly treacherous because of persistent iomanip flags:
#include <iostream>
#include <iomanip>
unsigned value = 10;
# 'internal' to make sure we use correct alignment (because 'left' is persistent)
cout << "0x" << setfill('0') << setw(2) << internal << hex << 10 << endl;
C char2hex
A complete C solution, char2hex
:
// ############################################################################################################
static uint32_t _char2hex(char *s, const uint8_t data[], size_t data_n, size_t n)
{
uint32_t o = 0;
#define out(f, a ...) { \
o += (uint32_t)snprintf(s + o, n - (uint32_t)o, f, ## a); \
o = o <= n ? o : n; \
}
if (n > 0) {
s[0] = 0;
}
for (size_t i = 0; i < data_n; i++) {
out("%.2x", data[i]);
}
return o;
#undef out
}
/**
* Convert an array of char into a list of hexadecimal values.
* For instance, "\xca\xfe" is converted into "cafe".
*/
char * char2hex(char *s, const uint8_t data[], size_t data_n, size_t n)
{
uint32_t o = _char2hex(s, data, data_n, n);
assert(o < n); // Check if given buffer was big enough
return s;
}
Read an integer from an hex string
Homebrew
const char _hex[] = "0123456789ABCDEF";
// Change a 0x0A into 'A'
char itoh(int nibble)
{
return _hex[nibble];
}
// Change a 'A' (or 'a') into 0x0A
int htoi(char hex)
{
return ( hex >> 6 ) + (( hex >> 6 ) << 3 ) + ( hex & 0x0F );
}
// Change "A010" into 0xA010
template<class T>
T hextoint_BE(string h)
{
T i=0;
for(string::const_iterator it=h.begin(); it!=h.end(); ++it)
{
i=(i<<4)|htoi(*it);
}
return i;
}
// Change "A010" into "\xA0\x10"
string hextostring_BE(const string& h)
{
string x;
for(unsigned int i=0; (i+1)<h.length(); i+=2) {
x += (htoi(h[i]) << 4)+ htoi(h[i+1]);
}
return x;
}
// Change 0xA010 into "A010"
template<class T>
string inttohex_BE(T i, unsigned int size)
{
string sh;
while( i > 0 )
{
sh = itoh(i&0xF) + sh;
i>>=4;
}
if(!size)
return sh;
if(size<sh.length())
return string(sh,sh.length()-size,size);
else
return string(size-sh.length(),'0')+sh;
}
// Change "\xA0\x10" into "A010"
string stringtohex_BE(const string& s, unsigned int size)
{
string h;
for(string::const_iterator it=s.begin();it!=s.end();++it)
{
h += itoh((*it & 0xF0) >> 4);
h += itoh((*it & 0x0F) >> 0);
}
if(!size)
return h;
if(size<h.length())
return string(h,h.length()-size,size);
else
return string(size-h.length(),'0')+h;
}
Using atol
/* atol example */
#include <stdio.h>
#include <stdlib.h>
int main ()
{
long int li;
char szInput [256];
printf ("Enter a long number: ");
gets ( szInput );
li = atol (szInput);
printf ("The value entered is %d. The double is %d.\n",li,li*2);
return 0;
}
Using strtol, strtoll
strtol
(resp. strtoll
) returns LONG_MIN, LONG_MAX (resp. LLONG_MIN, LLONGMAX) in case of under/overflow.
/* strtol example */
#include <stdio.h>
#include <stdlib.h>
int main ()
{
char szNumbers[] = "2001 60c0c0 -1101110100110100100000 0x6fffff";
char * pEnd;
long int li1, li2, li3, li4;
li1 = strtol (szNumbers,&pEnd,10);
li2 = strtol (pEnd,&pEnd,16);
li3 = strtol (pEnd,&pEnd,2);
li4 = strtol (pEnd,NULL,0);
printf ("The decimal equivalents are: %ld, %ld, %ld and %ld.\n",
li1, li2, li3, li4);
return 0;
}
#include <cstdlib>
#include <iostream>
using namespace std;
int main() {
string s = "abcd";
char * p;
long n = strtol( s.c_str(), & p, 16 );
if ( * p != 0 ) {
cout << "not a number" << endl;
}
else {
cout << n << endl;
}
}
Using strtoul, strtoull
Use strtoul
(resp. strtoull
) when value to read is greater than numeric_limits<long>::max()
(resp. numeric_limits<long long>::max()
).
/* strtoul example */
#include <stdio.h>
#include <stdlib.h>
int main ()
{
char szInput [256];
unsigned long ul;
printf ("Enter an unsigned number: ");
fgets (szInput,256,stdin);
ul = strtoul (szInput,NULL,0);
printf ("Value entered: %lu. Its double: %lu\n",ul,ul*2);
return 0;
}
#include <cstdlib>
#include <iostream>
using namespace std;
int main() {
string s = "fffefffe";
char * p;
long n = strtoul( s.c_str(), & p, 16 );
if ( * p != 0 ) {
cout << "not a number" << endl;
} else {
cout << n << endl;
}
}
Using sscanf
#include <stdio.h>
main()
{
char s[] = "fffffffe";
int x;
sscanf(s, "%x", &x);
printf("%u\n", x);
}
Using stringstream
#include <sstream>
#include <iostream>
int main() {
unsigned int x;
std::stringstream ss;
ss << std::hex << "fffefffe";
ss >> x;
// output it as a signed type
std::cout << static_cast<int>(x) << std::endl;
}
C - hex2char
// ############################################################################################################
static uint8_t _htoi(char hex)
{
return ((uint8_t)hex >> 6) + (((uint8_t)hex >> 6) << 3) + ((uint8_t)hex & 0x0F);
}
// ############################################################################################################
/** Convert an hex null-terminated string ("cafe") into a byte array ({0xca, 0xfe}).
*
* Convert at most `n` bytes from an hexadecimal null-terminated string into a byte array.
* For instance, it converts "cafe" to {0xca, 0xfe}.
*
* @param[out] dst pointer to destination buffer (at least #n-byte long).
* @param[in] src pointer to source string.
* @param[in] n maximum number of bytes to write in #dst.
*
* @return Number of bytes converted.
*/
uint32_t hex2char(void *dst, const char *src, uint32_t n)
{
uint8_t * p;
uint8_t * _dst = dst;
const char *_src = src;
for (p = _dst; (p < _dst + n) && _src[0] && _src[1]; p++, _src += 2) {
*p = (_htoi(_src[0]) << 4) + _htoi(_src[1]);
}
return p - _dst;
}
C - hex2char - fastest and lowercase/uppercase
Inspired from SO:
// ############################################################################################################
// 74ns on i686 (32-bit), vs 150ns for version above
uint32_t hex2char(void *dst, const char *src, uint32_t n)
{
uint8_t * p;
uint8_t * _dst = dst;
const char *_src = src;
#pragma clang diagnostic push
#pragma clang diagnostic ignored "-Winitializer-overrides"
static const long hextable[] = { // Reduce to signed char for space saving
[0 ... 255] = -1,
['0'] = 0, 1, 2, 3, 4, 5, 6, 7, 8, 9,
['A'] = 10, 11, 12, 13, 14, 15,
['a'] = 10, 11, 12, 13, 14, 15,
};
#pragma clang diagnostic pop
for (p = _dst; (p < _dst + n) && _src[0] && _src[1]; p++) {
*p = hextable[*_src++];
*p = (*p << 4) | hextable[*_src++];
}
return p - _dst;
}
Python
- Integer
- References [4]
'4d2' == '%x' % 1234 # to hex
'0x4d2' == hex(1234) # to hex (with '0x' prefix)
'0x4d2' == '0x%x' % 1234
'000004d2' == '%08x' % 1234 # to hex (padded)
'000004D2' == '%08X' % 1234 # to hex (padded,caps)
'\x41' == chr(0x41) # to hex char
b'\x04\xd2'== (1234).to_bytes(2,'big') # to bytes
b'\xd2\x04'== (1234).to_bytes(2,'little') # to bytes (little endian)
'04d2' == (1234).to_bytes(2,'big').hex() # to hex
'd204' == (1234).to_bytes(2,'little').hex() # to hex (little endian)
# Use ((1234).bit_length() + 7)//8 to get length
'0000010011010010' == f"{1234:016b}" # to bin
'10011010010' == f"{1234:11b}" # to bin
'10011010010' == bin(1234)[2:] # to bin
'10011010010' == bin(1234).removeprefix('0b) # to bin
'0b10011010010' == bin(1234) # to bin
1234 == int('04d2',16) # from hex
1234 == int('0x04d2',0) # from hex - base 0, autodetect
1234 == 0x04d2 # from hex
1234 == int.from_bytes(b'\x04\xd2','big') # from hex
1234 == int.from_bytes(b'\xd2\x04','little') # from hex (little endian)
22 == int('10110',2) # from binary
'1234' == '%d' % 0x04d2 # from hex
'0x1234' == '0x%d' % 0x04d2 # from hex (with '0x' prefix)
# Some functions
def int_to_bytes(x: int) -> bytes:
return x.to_bytes((x.bit_length() + 7) // 8, 'big')
def int_from_bytes(xbytes: bytes) -> int:
return int.from_bytes(xbytes, 'big')
- Strings, arrays or lists - Python 2
import binascii
'deadbeef' == '\xde\xad\xbe\xef'.encode("hex")
'deadbeef' == binascii.b2a_hex('\xde\xad\xbe\xef')
'deadbeef' == binascii.hexlify('\xde\xad\xbe\xef')
'deadbeef' == str(bytearray([0xde,0xad,0xbe,0xef])).encode('hex')
'deadbeef' == ''.join(map(chr,[0xde,0xad,0xbe,0xef])).encode('hex') # Likely slow, but need benchmarks on 2.x / 3.x
'deadbeef' == str(bytearray([222, 173, 190, 239])).encode('hex')
'\xde\xad\xbe\xef' == 'deadbeef'.decode("hex")
'\xde\xad\xbe\xef' == binascii.a2b_hex('deadbeef')
'\xde\xad\xbe\xef' == binascii.unhexlify('deadbeef')
'\xde\xad\xbe\xef' == bytearray.fromhex('deadbeef') # IGNORE SPACES, IF ANY
'\xde\xad\xbe\xef' == bytearray('\xde\xad\xbe\xef')
'\xde\xad\xbe\xef' == bytearray([0xde,0xad,0xbe,0xef])
'\xde\xad\xbe\xef' == str(bytearray([0xde,0xad,0xbe,0xef]))
[0xde,0xad,0xbe,0xef] == map(ord,'\xde\xad\xbe\xef')
[0xde,0xad,0xbe,0xef] == list(bytearray.fromhex('deadbeef')) # IGNORE SPACES, IF ANY
- Strings, arrays or lists - Python 3
import binascii
'deadbeef' == (b'\xde\xad\xbe\xef').hex()
'deadbeef' == binascii.b2a_hex(b'\xde\xad\xbe\xef').decode()
'deadbeef' == binascii.hexlify(b'\xde\xad\xbe\xef').decode()
'deadbeef' == binascii.b2a_hex(bytes([0xde,0xad,0xbe,0xef])).decode()
'deadbeef' == binascii.hexlify(bytes([0xde,0xad,0xbe,0xef])).decode()
'10110' == bytes([x + 48 for x in [1?0,1,1,0]]).decode()
# These are not recommended, since they produces bytes(), which could be seen as array of actual bytes
b'deadbeef' == binascii.b2a_hex(b'\xde\xad\xbe\xef')
b'deadbeef' == binascii.hexlify(b'\xde\xad\xbe\xef')
b'deadbeef' == binascii.b2a_hex(bytes([0xde,0xad,0xbe,0xef]))
b'deadbeef' == binascii.hexlify(bytes([0xde,0xad,0xbe,0xef]))
b'10110' == bytes([x + 48 for x in [1?0,1,1,0]])
b'\xde\xad\xbe\xef' == bytes.fromhex('deadbeef') # IGNORE SPACES, IF ANY
b'\xde\xad\xbe\xef' == binascii.a2b_hex('deadbeef')
b'\xde\xad\xbe\xef' == binascii.unhexlify('deadbeef')
b'\xde\xad\xbe\xef' == binascii.a2b_hex(b'deadbeef')
b'\xde\xad\xbe\xef' == binascii.unhexlify(b'deadbeef')
b'\xde\xad\xbe\xef' == bytes([0xde,0xad,0xbe,0xef])
b'\x01\x00\x01\x01\x00' == bytes([1,0,1,1,0])
from bitstring import BitArray
# BitArray is an army swisknife for bytes <-> bin <-> hex <-> (u)int conversion
# Conversion to any format:
'aaaa' == BitArray(hex="aaaa").hex
b'\xaa\xaa' == BitArray(hex="aaaa").bytes
-21846 == BitArray(hex="aaaa").int
43690 == BitArray(hex="aaaa").uint
'1010101010101010' == BitArray(hex="aaaa").bin
# Conversion from any format:
'aaaa' == BitArray(hex="aaaa").hex
'aaaa' == BitArray(bytes=b'\xaa\xaa').hex
'aaaa' == BitArray(int=-21846,length=16).hex
'aaaa' == BitArray(uint=43690,length=16).hex
'aaaa' == BitArray(bin='1010101010101010').hex
[0xde,0xad,0xbe,0xef] == list(b'\xde\xad\xbe\xef')
[0xde,0xad,0xbe,0xef] == list(bytes.fromhex('deadbeef')) # IGNORE SPACES, IF ANY
[0xde,0xad,0xbe,0xef] == list(bytearray.fromhex('deadbeef')) # IGNORE SPACES, IF ANY
# Note that in Python3, bytearray is a different from bytes, but both are equivalent
bytearray(b'\xde\xad\xbe\xef') == bytearray([0xde,0xad,0xbe,0xef])