1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
|
diff -c /Users/john/Development/gtk-sources/libgcrypt-1.5.3/cipher/rijndael.c\~ /Users/john/Development/gtk-sources/libgcrypt-1.5.3/cipher/rijndael.c
--- a/cipher/rijndael.c Tue Oct 15 11:09:37 2013
+++ b/cipher/rijndael.c
Tue Oct 15 11:53:22 2013
@@ -725,13 +725,13 @@
"movdqa 0x90(%%esi), %%xmm1\n\t"
aesenc_xmm1_xmm0
"movdqa 0xa0(%%esi), %%xmm1\n\t"
- "cmp $10, %[rounds]\n\t"
+ "cmpl $10, %[rounds]\n\t"
"jz .Lenclast%=\n\t"
aesenc_xmm1_xmm0
"movdqa 0xb0(%%esi), %%xmm1\n\t"
aesenc_xmm1_xmm0
"movdqa 0xc0(%%esi), %%xmm1\n\t"
- "cmp $12, %[rounds]\n\t"
+ "cmpl $12, %[rounds]\n\t"
"jz .Lenclast%=\n\t"
aesenc_xmm1_xmm0
"movdqa 0xd0(%%esi), %%xmm1\n\t"
@@ -780,13 +780,13 @@
"movdqa 0x90(%%esi), %%xmm1\n\t"
aesdec_xmm1_xmm0
"movdqa 0xa0(%%esi), %%xmm1\n\t"
- "cmp $10, %[rounds]\n\t"
+ "cmpl $10, %[rounds]\n\t"
"jz .Ldeclast%=\n\t"
aesdec_xmm1_xmm0
"movdqa 0xb0(%%esi), %%xmm1\n\t"
aesdec_xmm1_xmm0
"movdqa 0xc0(%%esi), %%xmm1\n\t"
- "cmp $12, %[rounds]\n\t"
+ "cmpl $12, %[rounds]\n\t"
"jz .Ldeclast%=\n\t"
aesdec_xmm1_xmm0
"movdqa 0xd0(%%esi), %%xmm1\n\t"
@@ -839,13 +839,13 @@
"movdqa 0x90(%%esi), %%xmm1\n\t"
aesenc_xmm1_xmm0
"movdqa 0xa0(%%esi), %%xmm1\n\t"
- "cmp $10, %[rounds]\n\t"
+ "cmpl $10, %[rounds]\n\t"
"jz .Lenclast%=\n\t"
aesenc_xmm1_xmm0
"movdqa 0xb0(%%esi), %%xmm1\n\t"
aesenc_xmm1_xmm0
"movdqa 0xc0(%%esi), %%xmm1\n\t"
- "cmp $12, %[rounds]\n\t"
+ "cmpl $12, %[rounds]\n\t"
"jz .Lenclast%=\n\t"
aesenc_xmm1_xmm0
"movdqa 0xd0(%%esi), %%xmm1\n\t"
@@ -857,7 +857,7 @@
"movdqu %[src], %%xmm1\n\t" /* Save input. */
"pxor %%xmm1, %%xmm0\n\t" /* xmm0 = input ^ IV */
- "cmp $1, %[decrypt]\n\t"
+ "cmpl $1, %[decrypt]\n\t"
"jz .Ldecrypt_%=\n\t"
"movdqa %%xmm0, %[iv]\n\t" /* [encrypt] Store IV. */
"jmp .Lleave_%=\n"
@@ -918,13 +918,13 @@
"movdqa 0x90(%%esi), %%xmm1\n\t"
aesenc_xmm1_xmm0
"movdqa 0xa0(%%esi), %%xmm1\n\t"
- "cmp $10, %[rounds]\n\t"
+ "cmpl $10, %[rounds]\n\t"
"jz .Lenclast%=\n\t"
aesenc_xmm1_xmm0
"movdqa 0xb0(%%esi), %%xmm1\n\t"
aesenc_xmm1_xmm0
"movdqa 0xc0(%%esi), %%xmm1\n\t"
- "cmp $12, %[rounds]\n\t"
+ "cmpl $12, %[rounds]\n\t"
"jz .Lenclast%=\n\t"
aesenc_xmm1_xmm0
"movdqa 0xd0(%%esi), %%xmm1\n\t"
@@ -1045,7 +1045,7 @@
aesenc_xmm1_xmm3
aesenc_xmm1_xmm4
"movdqa 0xa0(%%esi), %%xmm1\n\t"
- "cmp $10, %[rounds]\n\t"
+ "cmpl $10, %[rounds]\n\t"
"jz .Lenclast%=\n\t"
aesenc_xmm1_xmm0
aesenc_xmm1_xmm2
@@ -1057,7 +1057,7 @@
aesenc_xmm1_xmm3
aesenc_xmm1_xmm4
"movdqa 0xc0(%%esi), %%xmm1\n\t"
- "cmp $12, %[rounds]\n\t"
+ "cmpl $12, %[rounds]\n\t"
"jz .Lenclast%=\n\t"
aesenc_xmm1_xmm0
aesenc_xmm1_xmm2
Diff finished. Tue Oct 15 11:53:31 2013
|