Fix endianity issue when reading uint32

The uint32 is given as a bigendian stream, in the tests, however,
the char buffer that collected the stream read it as is,
without converting it. Add a temporary buffer, to call `greentea_getc()`
8 times, and then put it in the correct endianity for input to `unhexify()`.
diff --git a/tests/suites/target_test.function b/tests/suites/target_test.function
index 35b311c..4d03c3b 100644
--- a/tests/suites/target_test.function
+++ b/tests/suites/target_test.function
@@ -75,7 +75,7 @@
     c[1] = greentea_getc();
     c[2] = '\0';
 
-    assert( unhexify( &byte, c ) != 2 );
+    TEST_HELPER_ASSERT( unhexify( &byte, c ) != 2 );
     return( byte );
 }
 
@@ -90,18 +90,19 @@
 uint32_t receive_uint32()
 {
     uint32_t value;
-    const uint8_t c[9] = { greentea_getc(),
-                           greentea_getc(),
-                           greentea_getc(),
-                           greentea_getc(),
-                           greentea_getc(),
-                           greentea_getc(),
-                           greentea_getc(),
-                           greentea_getc(),
-                           '\0'
-                         };
-    assert( unhexify( &value, c ) != 8 );
-    return( (uint32_t)value );
+    uint8_t c_be[8] = { greentea_getc(),
+                        greentea_getc(),
+                        greentea_getc(),
+                        greentea_getc(),
+                        greentea_getc(),
+                        greentea_getc(),
+                        greentea_getc(),
+                        greentea_getc()
+                      };
+    const uint8_t c[9] = { c_be[6], c_be[7], c_be[4], c_be[5], c_be[2],
+                           c_be[3], c_be[0], c_be[1], '\0' };
+    TEST_HELPER_ASSERT( unhexify( (uint8_t*)&value, c ) != 8 );
+    return( value );
 }
 
 /**