Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(84)

Unified Diff: src/trusted/validator_ragel/gen/decoder_x86_64.c

Issue 11000033: Move validator_x86_XX.rl out of unreviewed. (Closed) Base URL: svn://svn.chromium.org/native_client/trunk/src/native_client/
Patch Set: Created 8 years, 2 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
Download patch
Index: src/trusted/validator_ragel/gen/decoder_x86_64.c
===================================================================
--- src/trusted/validator_ragel/gen/decoder_x86_64.c (revision 9996)
+++ src/trusted/validator_ragel/gen/decoder_x86_64.c (working copy)
@@ -11,7 +11,7 @@
#include "native_client/src/include/elf32.h"
#include "native_client/src/shared/utils/types.h"
-#include "native_client/src/trusted/validator_ragel/unreviewed/decoding.h"
+#include "native_client/src/trusted/validator_ragel/decoder_internal.h"
#include "native_client/src/trusted/validator_ragel/gen/decoder_x86_64_instruction_consts.h"
@@ -12356,7 +12356,7 @@
136u, 143u, 144u, 151u, 152u, 159u, 160u, 167u,
168u, 175u, 176u, 183u, 184u, 191u, 192u, 199u,
200u, 207u, 208u, 215u, 216u, 223u, 224u, 231u,
- 232u, 239u, 248u, 255u, 5u, 13u, 21u, 29u,
+ 240u, 247u, 248u, 255u, 5u, 13u, 21u, 29u,
37u, 45u, 53u, 61u, 69u, 77u, 85u, 93u,
101u, 109u, 117u, 125u, 133u, 141u, 149u, 157u,
165u, 173u, 181u, 189u, 197u, 205u, 213u, 221u,
@@ -12372,7 +12372,7 @@
144u, 151u, 152u, 159u, 160u, 167u, 168u, 175u,
176u, 183u, 184u, 191u, 192u, 199u, 200u, 207u,
208u, 215u, 216u, 223u, 224u, 231u, 232u, 239u,
- 248u, 255u, 4u, 5u, 12u, 13u, 20u, 21u,
+ 240u, 247u, 4u, 5u, 12u, 13u, 20u, 21u,
28u, 29u, 36u, 37u, 44u, 45u, 68u, 76u,
84u, 92u, 100u, 108u, 132u, 140u, 148u, 156u,
164u, 172u, 0u, 47u, 64u, 111u, 128u, 175u,
@@ -12481,7 +12481,7 @@
136u, 143u, 144u, 151u, 152u, 159u, 160u, 167u,
168u, 175u, 176u, 183u, 184u, 191u, 192u, 199u,
200u, 207u, 208u, 215u, 216u, 223u, 224u, 231u,
- 240u, 247u, 248u, 255u, 4u, 5u, 20u, 21u,
+ 232u, 239u, 240u, 247u, 4u, 5u, 20u, 21u,
28u, 29u, 36u, 37u, 44u, 45u, 52u, 53u,
60u, 61u, 68u, 84u, 92u, 100u, 108u, 116u,
124u, 132u, 148u, 156u, 164u, 172u, 180u, 188u,
@@ -12697,7 +12697,7 @@
111u, 112u, 119u, 120u, 127u, 128u, 135u, 136u,
143u, 144u, 151u, 152u, 159u, 160u, 167u, 168u,
175u, 176u, 183u, 184u, 191u, 192u, 199u, 200u,
- 207u, 208u, 215u, 224u, 231u, 232u, 239u, 240u,
+ 207u, 216u, 223u, 224u, 231u, 232u, 239u, 240u,
247u, 248u, 255u, 4u, 5u, 12u, 13u, 20u,
21u, 28u, 29u, 36u, 37u, 44u, 45u, 52u,
53u, 60u, 61u, 68u, 76u, 84u, 92u, 100u,
@@ -12708,7 +12708,7 @@
87u, 88u, 95u, 96u, 103u, 104u, 111u, 112u,
119u, 120u, 127u, 128u, 135u, 136u, 143u, 144u,
151u, 152u, 159u, 160u, 167u, 168u, 175u, 176u,
- 183u, 184u, 191u, 200u, 207u, 208u, 215u, 216u,
+ 183u, 184u, 191u, 192u, 199u, 200u, 207u, 216u,
223u, 224u, 231u, 232u, 239u, 240u, 247u, 248u,
255u, 4u, 5u, 12u, 13u, 20u, 21u, 28u,
29u, 36u, 37u, 44u, 45u, 68u, 76u, 84u,
@@ -13712,7 +13712,7 @@
111u, 112u, 119u, 120u, 127u, 128u, 135u, 136u,
143u, 144u, 151u, 152u, 159u, 160u, 167u, 168u,
175u, 176u, 183u, 184u, 191u, 192u, 199u, 200u,
- 207u, 216u, 223u, 224u, 231u, 232u, 239u, 240u,
+ 207u, 208u, 215u, 216u, 223u, 232u, 239u, 240u,
247u, 248u, 255u, 192u, 239u, 4u, 5u, 68u,
132u, 0u, 7u, 64u, 71u, 128u, 135u, 192u,
199u, 4u, 5u, 12u, 13u, 20u, 21u, 28u,
@@ -13848,7 +13848,7 @@
104u, 111u, 112u, 119u, 120u, 127u, 128u, 135u,
136u, 143u, 144u, 151u, 152u, 159u, 160u, 167u,
168u, 175u, 176u, 183u, 184u, 191u, 192u, 199u,
- 200u, 207u, 208u, 215u, 216u, 223u, 232u, 239u,
+ 200u, 207u, 216u, 223u, 224u, 231u, 232u, 239u,
240u, 247u, 248u, 255u, 4u, 5u, 12u, 13u,
20u, 21u, 28u, 29u, 36u, 37u, 44u, 45u,
52u, 53u, 60u, 61u, 68u, 76u, 84u, 92u,
@@ -13860,7 +13860,7 @@
112u, 119u, 120u, 127u, 128u, 135u, 136u, 143u,
144u, 151u, 152u, 159u, 160u, 167u, 168u, 175u,
176u, 183u, 184u, 191u, 192u, 199u, 200u, 207u,
- 208u, 215u, 216u, 223u, 224u, 231u, 240u, 247u,
+ 208u, 215u, 224u, 231u, 232u, 239u, 240u, 247u,
248u, 255u, 192u, 239u, 4u, 5u, 12u, 13u,
20u, 21u, 28u, 29u, 36u, 37u, 44u, 45u,
60u, 61u, 68u, 76u, 84u, 92u, 100u, 108u,
@@ -26869,7 +26869,7 @@
1965, 1968, 1971, 1974, 1977, 1980, 1982, 1984,
1986, 1988, 1990, 1992, 1994, 1996, 1998, 2000,
2002, 2004, 2006, 2008, 2010, 2012, 2013, 2014,
- 2015, 2016, 2017, 2019, 2018, 2021, 2021, 2021,
+ 2015, 2016, 2018, 2019, 2017, 2021, 2021, 2021,
2021, 2021, 2021, 2021, 2021, 2021, 2021, 2021,
2021, 2021, 2021, 2021, 2021, 2021, 2021, 2021,
2021, 2021, 2021, 2021, 2021, 2021, 2021, 2021,
@@ -26881,7 +26881,7 @@
2028, 2031, 2034, 2037, 2040, 2043, 2046, 2048,
2050, 2052, 2054, 2056, 2058, 2060, 2062, 2064,
2066, 2068, 2070, 2072, 2074, 2076, 2078, 2079,
- 2080, 2081, 2082, 2083, 2085, 2084, 2087, 2088,
+ 2080, 2081, 2082, 2083, 2084, 2085, 2087, 2088,
2087, 2088, 2087, 2088, 2087, 2088, 2087, 2088,
2087, 2088, 2090, 2090, 2090, 2090, 2090, 2090,
2092, 2092, 2092, 2092, 2092, 2092, 2086, 2089,
@@ -26965,8 +26965,8 @@
2529, 2474, 2477, 2480, 2483, 2486, 2489, 2492,
2495, 2498, 2500, 2502, 2504, 2506, 2508, 2510,
2512, 2514, 2516, 2518, 2520, 2522, 2524, 2526,
- 2528, 2530, 2531, 2532, 2533, 2534, 2536, 2537,
- 2535, 2539, 2540, 2542, 2543, 2545, 2546, 2548,
+ 2528, 2530, 2531, 2532, 2533, 2534, 2535, 2536,
+ 2537, 2539, 2540, 2542, 2543, 2545, 2546, 2548,
2549, 2551, 2552, 2554, 2555, 2557, 2558, 2560,
2562, 2564, 2566, 2568, 2570, 2572, 2574, 2576,
2578, 2580, 2582, 2584, 2586, 2589, 2590, 2591,
@@ -27133,16 +27133,16 @@
3399, 3401, 3403, 3348, 3351, 3354, 3357, 3360,
3363, 3366, 3369, 3372, 3374, 3376, 3378, 3380,
3382, 3384, 3386, 3388, 3390, 3392, 3394, 3396,
- 3398, 3400, 3402, 3404, 3405, 3406, 3408, 3409,
- 3410, 3411, 3407, 3413, 3414, 3416, 3417, 3419,
+ 3398, 3400, 3402, 3404, 3405, 3407, 3408, 3409,
+ 3410, 3411, 3406, 3413, 3414, 3416, 3417, 3419,
3420, 3422, 3423, 3425, 3426, 3428, 3429, 3431,
3432, 3434, 3435, 3437, 3439, 3441, 3443, 3445,
3447, 3449, 3451, 3453, 3455, 3457, 3459, 3461,
3463, 3465, 3467, 3412, 3415, 3418, 3421, 3424,
3427, 3430, 3433, 3436, 3438, 3440, 3442, 3444,
3446, 3448, 3450, 3452, 3454, 3456, 3458, 3460,
- 3462, 3464, 3466, 3469, 3470, 3471, 3472, 3473,
- 3474, 3475, 3468, 2087, 2088, 2087, 2088, 2087,
+ 3462, 3464, 3466, 3468, 3469, 3471, 3472, 3473,
+ 3474, 3475, 3470, 2087, 2088, 2087, 2088, 2087,
2088, 2087, 2088, 2087, 2088, 2087, 2088, 2090,
2090, 2090, 2090, 2090, 2090, 2092, 2092, 2092,
2092, 2092, 2092, 2086, 2089, 2091, 3476, 52,
@@ -28084,8 +28084,8 @@
5670, 5615, 5618, 5621, 5624, 5627, 5630, 5633,
5636, 5639, 5641, 5643, 5645, 5647, 5649, 5651,
5653, 5655, 5657, 5659, 5661, 5663, 5665, 5667,
- 5669, 5671, 5672, 5674, 5675, 5676, 5677, 5678,
- 5673, 5679, 52, 5681, 5682, 5684, 5686, 5680,
+ 5669, 5671, 5672, 5673, 5674, 5676, 5677, 5678,
+ 5675, 5679, 52, 5681, 5682, 5684, 5686, 5680,
5683, 5685, 5687, 52, 5689, 5690, 5692, 5693,
5695, 5696, 5698, 5699, 5701, 5702, 5704, 5705,
5707, 5708, 5710, 5712, 5714, 5716, 5718, 5720,
@@ -28195,7 +28195,7 @@
6176, 6179, 6182, 6185, 6188, 6191, 6194, 6196,
6198, 6200, 6202, 6204, 6206, 6208, 6210, 6212,
6214, 6216, 6218, 6220, 6222, 6224, 6226, 6227,
- 6228, 6229, 6231, 6232, 6233, 6230, 6235, 6236,
+ 6229, 6230, 6231, 6232, 6233, 6228, 6235, 6236,
6238, 6239, 6241, 6242, 6244, 6245, 6247, 6248,
6250, 6251, 6253, 6254, 6256, 6257, 6259, 6261,
6263, 6265, 6267, 6269, 6271, 6273, 6275, 6277,
@@ -28203,7 +28203,7 @@
6240, 6243, 6246, 6249, 6252, 6255, 6258, 6260,
6262, 6264, 6266, 6268, 6270, 6272, 6274, 6276,
6278, 6280, 6282, 6284, 6286, 6288, 6290, 6291,
- 6292, 6293, 6294, 6296, 6297, 6295, 6298, 52,
+ 6292, 6294, 6295, 6296, 6297, 6293, 6298, 52,
6300, 6301, 6303, 6304, 6306, 6307, 6309, 6310,
6312, 6313, 6315, 6316, 6318, 6319, 6321, 6323,
6325, 6327, 6329, 6331, 6333, 6335, 6337, 6339,
@@ -43405,74 +43405,17 @@
-#define GET_REX_PREFIX() instruction.prefix.rex
-#define SET_REX_PREFIX(P) instruction.prefix.rex = (P)
-#define GET_VEX_PREFIX2() vex_prefix2
-#define SET_VEX_PREFIX2(P) vex_prefix2 = (P)
-#define GET_VEX_PREFIX3() vex_prefix3
-#define SET_VEX_PREFIX3(P) vex_prefix3 = (P)
-#define SET_DATA16_PREFIX(S) instruction.prefix.data16 = (S)
-#define SET_LOCK_PREFIX(S) instruction.prefix.lock = (S)
-#define SET_REPZ_PREFIX(S) instruction.prefix.repz = (S)
-#define SET_REPNZ_PREFIX(S) instruction.prefix.repnz = (S)
-#define SET_BRANCH_TAKEN(S) instruction.prefix.branch_taken = (S)
-#define SET_BRANCH_NOT_TAKEN(S) instruction.prefix.branch_not_taken = (S)
-#define SET_INSTRUCTION_NAME(N) instruction.name = (N)
-#define GET_OPERAND_NAME(N) instruction.operands[(N)].name
-#define SET_OPERAND_NAME(N, S) instruction.operands[(N)].name = (S)
-#define SET_OPERAND_TYPE(N, S) instruction.operands[(N)].type = (S)
-#define SET_OPERANDS_COUNT(N) instruction.operands_count = (N)
-#define SET_MODRM_BASE(N) instruction.rm.base = (N)
-#define SET_MODRM_INDEX(N) instruction.rm.index = (N)
-#define SET_MODRM_SCALE(S) instruction.rm.scale = (S)
-#define SET_DISP_TYPE(T) instruction.rm.disp_type = (T)
-#define SET_DISP_PTR(P) disp = (P)
-#define SET_IMM_TYPE(T) imm_operand = (T)
-#define SET_IMM_PTR(P) imm = (P)
-#define SET_IMM2_TYPE(T) imm2_operand = (T)
-#define SET_IMM2_PTR(P) imm2 = (P)
-#define SET_CPU_FEATURE(F)
-#define SET_ATT_INSTRUCTION_SUFFIX(S) instruction.att_instruction_suffix = (S)
-#define SET_SPURIOUS_DATA16() instruction.prefix.data16_spurious = TRUE;
-#define SET_SPURIOUS_REX_B() \
- if (GET_REX_PREFIX() & REX_B) instruction.prefix.rex_b_spurious = TRUE;
-#define SET_SPURIOUS_REX_X() \
- if (GET_REX_PREFIX() & REX_X) instruction.prefix.rex_x_spurious = TRUE;
-#define SET_SPURIOUS_REX_R() \
- if (GET_REX_PREFIX() & REX_R) instruction.prefix.rex_r_spurious = TRUE;
-#define SET_SPURIOUS_REX_W() \
- if (GET_REX_PREFIX() & REX_W) instruction.prefix.rex_w_spurious = TRUE;
-
-enum {
- REX_B = 1,
- REX_X = 2,
- REX_R = 4,
- REX_W = 8
-};
-
-enum imm_mode {
- IMMNONE,
- IMM2,
- IMM8,
- IMM16,
- IMM32,
- IMM64
-};
-
int DecodeChunkAMD64(const uint8_t *data, size_t size,
ProcessInstructionFunc process_instruction,
ProcessDecodingErrorFunc process_error,
void *userdata) {
const uint8_t *current_position = data;
const uint8_t *end_of_data = data + size;
- const uint8_t *disp = NULL;
- const uint8_t *imm = NULL;
- const uint8_t *imm2 = NULL;
const uint8_t *instruction_start = current_position;
uint8_t vex_prefix2 = 0xe0;
uint8_t vex_prefix3 = 0x00;
- enum imm_mode imm_operand = IMMNONE;
- enum imm_mode imm2_operand = IMMNONE;
+ enum ImmediateMode imm_operand = IMMNONE;
+ enum ImmediateMode imm2_operand = IMMNONE;
struct Instruction instruction;
int result = TRUE;
@@ -48001,52 +47944,6 @@
break;
case 1413:
{
- switch (instruction.rm.disp_type) {
- case DISPNONE: instruction.rm.offset = 0; break;
- case DISP8: instruction.rm.offset = (int8_t) *disp; break;
- case DISP16: instruction.rm.offset =
- (uint16_t) (disp[0] + 256U * disp[1]);
- break;
- case DISP32: instruction.rm.offset = (int32_t)
- (disp[0] + 256U * (disp[1] + 256U * (disp[2] + 256U * (disp[3]))));
- break;
- case DISP64: instruction.rm.offset = (int64_t)
- (*disp + 256ULL * (disp[1] + 256ULL * (disp[2] + 256ULL * (disp[3] +
- 256ULL * (disp[4] + 256ULL * (disp[5] + 256ULL * (disp[6] + 256ULL *
- disp[7])))))));
- break;
- }
- switch (imm_operand) {
- case IMMNONE: instruction.imm[0] = 0; break;
- case IMM2: instruction.imm[0] = imm[0] & 0x03; break;
- case IMM8: instruction.imm[0] = imm[0]; break;
- case IMM16: instruction.imm[0] = (uint64_t) (*imm + 256U * (imm[1]));
- break;
- case IMM32: instruction.imm[0] = (uint64_t)
- (imm[0] + 256U * (imm[1] + 256U * (imm[2] + 256U * (imm[3]))));
- break;
- case IMM64: instruction.imm[0] = (uint64_t)
- (imm[0] + 256LL * (imm[1] + 256ULL * (imm[2] + 256ULL * (imm[3] +
- 256ULL * (imm[4] + 256ULL * (imm[5] + 256ULL * (imm[6] + 256ULL *
- imm[7])))))));
- break;
- }
- switch (imm2_operand) {
- case IMMNONE: instruction.imm[1] = 0; break;
- case IMM2: instruction.imm[1] = imm2[0] & 0x03; break;
- case IMM8: instruction.imm[1] = imm2[0]; break;
- case IMM16: instruction.imm[1] = (uint64_t)
- (imm2[0] + 256U * (imm2[1]));
- break;
- case IMM32: instruction.imm[1] = (uint64_t)
- (imm2[0] + 256U * (imm2[1] + 256U * (imm2[2] + 256U * (imm2[3]))));
- break;
- case IMM64: instruction.imm[1] = (uint64_t)
- (*imm2 + 256ULL * (imm2[1] + 256ULL * (imm2[2] + 256ULL * (imm2[3] +
- 256ULL * (imm2[4] + 256ULL * (imm2[5] + 256ULL * (imm2[6] + 256ULL *
- imm2[7])))))));
- break;
- }
process_instruction(instruction_start, current_position+1, &instruction,
userdata);
instruction_start = current_position + 1;
@@ -48060,14 +47957,15 @@
SET_REPZ_PREFIX(FALSE);
SET_BRANCH_NOT_TAKEN(FALSE);
SET_BRANCH_TAKEN(FALSE);
+ /* Top three bis of VEX2 are inverted: see AMD/Intel manual. */
SET_VEX_PREFIX2(0xe0);
SET_VEX_PREFIX3(0x00);
SET_ATT_INSTRUCTION_SUFFIX(NULL);
- instruction.prefix.data16_spurious = FALSE;
- instruction.prefix.rex_b_spurious = FALSE;
- instruction.prefix.rex_x_spurious = FALSE;
- instruction.prefix.rex_r_spurious = FALSE;
- instruction.prefix.rex_w_spurious = FALSE;
+ CLEAR_SPURIOUS_DATA16();
+ CLEAR_SPURIOUS_REX_B();
+ CLEAR_SPURIOUS_REX_X();
+ CLEAR_SPURIOUS_REX_R();
+ CLEAR_SPURIOUS_REX_W();
}
break;
case 1414:

Powered by Google App Engine
This is Rietveld 408576698