VirtualBox

source: vbox/trunk/src/libs/openssl-1.1.1l/crypto/genasm-elf/ghash-x86.S@ 91977

最後變更 在這個檔案從91977是 83531,由 vboxsync 提交於 5 年 前

setting svn:sync-process=export for openssl-1.1.1f, all files except tests

檔案大小: 12.9 KB
 
1.text
2.globl gcm_gmult_4bit_x86
3.type gcm_gmult_4bit_x86,@function
4.align 16
5gcm_gmult_4bit_x86:
6.L_gcm_gmult_4bit_x86_begin:
7 pushl %ebp
8 pushl %ebx
9 pushl %esi
10 pushl %edi
11 subl $84,%esp
12 movl 104(%esp),%edi
13 movl 108(%esp),%esi
14 movl (%edi),%ebp
15 movl 4(%edi),%edx
16 movl 8(%edi),%ecx
17 movl 12(%edi),%ebx
18 movl $0,16(%esp)
19 movl $471859200,20(%esp)
20 movl $943718400,24(%esp)
21 movl $610271232,28(%esp)
22 movl $1887436800,32(%esp)
23 movl $1822425088,36(%esp)
24 movl $1220542464,40(%esp)
25 movl $1423966208,44(%esp)
26 movl $3774873600,48(%esp)
27 movl $4246732800,52(%esp)
28 movl $3644850176,56(%esp)
29 movl $3311403008,60(%esp)
30 movl $2441084928,64(%esp)
31 movl $2376073216,68(%esp)
32 movl $2847932416,72(%esp)
33 movl $3051356160,76(%esp)
34 movl %ebp,(%esp)
35 movl %edx,4(%esp)
36 movl %ecx,8(%esp)
37 movl %ebx,12(%esp)
38 shrl $20,%ebx
39 andl $240,%ebx
40 movl 4(%esi,%ebx,1),%ebp
41 movl (%esi,%ebx,1),%edx
42 movl 12(%esi,%ebx,1),%ecx
43 movl 8(%esi,%ebx,1),%ebx
44 xorl %eax,%eax
45 movl $15,%edi
46 jmp .L000x86_loop
47.align 16
48.L000x86_loop:
49 movb %bl,%al
50 shrdl $4,%ecx,%ebx
51 andb $15,%al
52 shrdl $4,%edx,%ecx
53 shrdl $4,%ebp,%edx
54 shrl $4,%ebp
55 xorl 16(%esp,%eax,4),%ebp
56 movb (%esp,%edi,1),%al
57 andb $240,%al
58 xorl 8(%esi,%eax,1),%ebx
59 xorl 12(%esi,%eax,1),%ecx
60 xorl (%esi,%eax,1),%edx
61 xorl 4(%esi,%eax,1),%ebp
62 decl %edi
63 js .L001x86_break
64 movb %bl,%al
65 shrdl $4,%ecx,%ebx
66 andb $15,%al
67 shrdl $4,%edx,%ecx
68 shrdl $4,%ebp,%edx
69 shrl $4,%ebp
70 xorl 16(%esp,%eax,4),%ebp
71 movb (%esp,%edi,1),%al
72 shlb $4,%al
73 xorl 8(%esi,%eax,1),%ebx
74 xorl 12(%esi,%eax,1),%ecx
75 xorl (%esi,%eax,1),%edx
76 xorl 4(%esi,%eax,1),%ebp
77 jmp .L000x86_loop
78.align 16
79.L001x86_break:
80 bswap %ebx
81 bswap %ecx
82 bswap %edx
83 bswap %ebp
84 movl 104(%esp),%edi
85 movl %ebx,12(%edi)
86 movl %ecx,8(%edi)
87 movl %edx,4(%edi)
88 movl %ebp,(%edi)
89 addl $84,%esp
90 popl %edi
91 popl %esi
92 popl %ebx
93 popl %ebp
94 ret
95.size gcm_gmult_4bit_x86,.-.L_gcm_gmult_4bit_x86_begin
96.globl gcm_ghash_4bit_x86
97.type gcm_ghash_4bit_x86,@function
98.align 16
99gcm_ghash_4bit_x86:
100.L_gcm_ghash_4bit_x86_begin:
101 pushl %ebp
102 pushl %ebx
103 pushl %esi
104 pushl %edi
105 subl $84,%esp
106 movl 104(%esp),%ebx
107 movl 108(%esp),%esi
108 movl 112(%esp),%edi
109 movl 116(%esp),%ecx
110 addl %edi,%ecx
111 movl %ecx,116(%esp)
112 movl (%ebx),%ebp
113 movl 4(%ebx),%edx
114 movl 8(%ebx),%ecx
115 movl 12(%ebx),%ebx
116 movl $0,16(%esp)
117 movl $471859200,20(%esp)
118 movl $943718400,24(%esp)
119 movl $610271232,28(%esp)
120 movl $1887436800,32(%esp)
121 movl $1822425088,36(%esp)
122 movl $1220542464,40(%esp)
123 movl $1423966208,44(%esp)
124 movl $3774873600,48(%esp)
125 movl $4246732800,52(%esp)
126 movl $3644850176,56(%esp)
127 movl $3311403008,60(%esp)
128 movl $2441084928,64(%esp)
129 movl $2376073216,68(%esp)
130 movl $2847932416,72(%esp)
131 movl $3051356160,76(%esp)
132.align 16
133.L002x86_outer_loop:
134 xorl 12(%edi),%ebx
135 xorl 8(%edi),%ecx
136 xorl 4(%edi),%edx
137 xorl (%edi),%ebp
138 movl %ebx,12(%esp)
139 movl %ecx,8(%esp)
140 movl %edx,4(%esp)
141 movl %ebp,(%esp)
142 shrl $20,%ebx
143 andl $240,%ebx
144 movl 4(%esi,%ebx,1),%ebp
145 movl (%esi,%ebx,1),%edx
146 movl 12(%esi,%ebx,1),%ecx
147 movl 8(%esi,%ebx,1),%ebx
148 xorl %eax,%eax
149 movl $15,%edi
150 jmp .L003x86_loop
151.align 16
152.L003x86_loop:
153 movb %bl,%al
154 shrdl $4,%ecx,%ebx
155 andb $15,%al
156 shrdl $4,%edx,%ecx
157 shrdl $4,%ebp,%edx
158 shrl $4,%ebp
159 xorl 16(%esp,%eax,4),%ebp
160 movb (%esp,%edi,1),%al
161 andb $240,%al
162 xorl 8(%esi,%eax,1),%ebx
163 xorl 12(%esi,%eax,1),%ecx
164 xorl (%esi,%eax,1),%edx
165 xorl 4(%esi,%eax,1),%ebp
166 decl %edi
167 js .L004x86_break
168 movb %bl,%al
169 shrdl $4,%ecx,%ebx
170 andb $15,%al
171 shrdl $4,%edx,%ecx
172 shrdl $4,%ebp,%edx
173 shrl $4,%ebp
174 xorl 16(%esp,%eax,4),%ebp
175 movb (%esp,%edi,1),%al
176 shlb $4,%al
177 xorl 8(%esi,%eax,1),%ebx
178 xorl 12(%esi,%eax,1),%ecx
179 xorl (%esi,%eax,1),%edx
180 xorl 4(%esi,%eax,1),%ebp
181 jmp .L003x86_loop
182.align 16
183.L004x86_break:
184 bswap %ebx
185 bswap %ecx
186 bswap %edx
187 bswap %ebp
188 movl 112(%esp),%edi
189 leal 16(%edi),%edi
190 cmpl 116(%esp),%edi
191 movl %edi,112(%esp)
192 jb .L002x86_outer_loop
193 movl 104(%esp),%edi
194 movl %ebx,12(%edi)
195 movl %ecx,8(%edi)
196 movl %edx,4(%edi)
197 movl %ebp,(%edi)
198 addl $84,%esp
199 popl %edi
200 popl %esi
201 popl %ebx
202 popl %ebp
203 ret
204.size gcm_ghash_4bit_x86,.-.L_gcm_ghash_4bit_x86_begin
205.type _mmx_gmult_4bit_inner,@function
206.align 16
207_mmx_gmult_4bit_inner:
208 xorl %ecx,%ecx
209 movl %ebx,%edx
210 movb %dl,%cl
211 shlb $4,%cl
212 andl $240,%edx
213 movq 8(%esi,%ecx,1),%mm0
214 movq (%esi,%ecx,1),%mm1
215 movd %mm0,%ebp
216 psrlq $4,%mm0
217 movq %mm1,%mm2
218 psrlq $4,%mm1
219 pxor 8(%esi,%edx,1),%mm0
220 movb 14(%edi),%cl
221 psllq $60,%mm2
222 andl $15,%ebp
223 pxor (%esi,%edx,1),%mm1
224 movl %ecx,%edx
225 movd %mm0,%ebx
226 pxor %mm2,%mm0
227 shlb $4,%cl
228 psrlq $4,%mm0
229 movq %mm1,%mm2
230 psrlq $4,%mm1
231 pxor 8(%esi,%ecx,1),%mm0
232 psllq $60,%mm2
233 andl $240,%edx
234 pxor (%eax,%ebp,8),%mm1
235 andl $15,%ebx
236 pxor (%esi,%ecx,1),%mm1
237 movd %mm0,%ebp
238 pxor %mm2,%mm0
239 psrlq $4,%mm0
240 movq %mm1,%mm2
241 psrlq $4,%mm1
242 pxor 8(%esi,%edx,1),%mm0
243 movb 13(%edi),%cl
244 psllq $60,%mm2
245 pxor (%eax,%ebx,8),%mm1
246 andl $15,%ebp
247 pxor (%esi,%edx,1),%mm1
248 movl %ecx,%edx
249 movd %mm0,%ebx
250 pxor %mm2,%mm0
251 shlb $4,%cl
252 psrlq $4,%mm0
253 movq %mm1,%mm2
254 psrlq $4,%mm1
255 pxor 8(%esi,%ecx,1),%mm0
256 psllq $60,%mm2
257 andl $240,%edx
258 pxor (%eax,%ebp,8),%mm1
259 andl $15,%ebx
260 pxor (%esi,%ecx,1),%mm1
261 movd %mm0,%ebp
262 pxor %mm2,%mm0
263 psrlq $4,%mm0
264 movq %mm1,%mm2
265 psrlq $4,%mm1
266 pxor 8(%esi,%edx,1),%mm0
267 movb 12(%edi),%cl
268 psllq $60,%mm2
269 pxor (%eax,%ebx,8),%mm1
270 andl $15,%ebp
271 pxor (%esi,%edx,1),%mm1
272 movl %ecx,%edx
273 movd %mm0,%ebx
274 pxor %mm2,%mm0
275 shlb $4,%cl
276 psrlq $4,%mm0
277 movq %mm1,%mm2
278 psrlq $4,%mm1
279 pxor 8(%esi,%ecx,1),%mm0
280 psllq $60,%mm2
281 andl $240,%edx
282 pxor (%eax,%ebp,8),%mm1
283 andl $15,%ebx
284 pxor (%esi,%ecx,1),%mm1
285 movd %mm0,%ebp
286 pxor %mm2,%mm0
287 psrlq $4,%mm0
288 movq %mm1,%mm2
289 psrlq $4,%mm1
290 pxor 8(%esi,%edx,1),%mm0
291 movb 11(%edi),%cl
292 psllq $60,%mm2
293 pxor (%eax,%ebx,8),%mm1
294 andl $15,%ebp
295 pxor (%esi,%edx,1),%mm1
296 movl %ecx,%edx
297 movd %mm0,%ebx
298 pxor %mm2,%mm0
299 shlb $4,%cl
300 psrlq $4,%mm0
301 movq %mm1,%mm2
302 psrlq $4,%mm1
303 pxor 8(%esi,%ecx,1),%mm0
304 psllq $60,%mm2
305 andl $240,%edx
306 pxor (%eax,%ebp,8),%mm1
307 andl $15,%ebx
308 pxor (%esi,%ecx,1),%mm1
309 movd %mm0,%ebp
310 pxor %mm2,%mm0
311 psrlq $4,%mm0
312 movq %mm1,%mm2
313 psrlq $4,%mm1
314 pxor 8(%esi,%edx,1),%mm0
315 movb 10(%edi),%cl
316 psllq $60,%mm2
317 pxor (%eax,%ebx,8),%mm1
318 andl $15,%ebp
319 pxor (%esi,%edx,1),%mm1
320 movl %ecx,%edx
321 movd %mm0,%ebx
322 pxor %mm2,%mm0
323 shlb $4,%cl
324 psrlq $4,%mm0
325 movq %mm1,%mm2
326 psrlq $4,%mm1
327 pxor 8(%esi,%ecx,1),%mm0
328 psllq $60,%mm2
329 andl $240,%edx
330 pxor (%eax,%ebp,8),%mm1
331 andl $15,%ebx
332 pxor (%esi,%ecx,1),%mm1
333 movd %mm0,%ebp
334 pxor %mm2,%mm0
335 psrlq $4,%mm0
336 movq %mm1,%mm2
337 psrlq $4,%mm1
338 pxor 8(%esi,%edx,1),%mm0
339 movb 9(%edi),%cl
340 psllq $60,%mm2
341 pxor (%eax,%ebx,8),%mm1
342 andl $15,%ebp
343 pxor (%esi,%edx,1),%mm1
344 movl %ecx,%edx
345 movd %mm0,%ebx
346 pxor %mm2,%mm0
347 shlb $4,%cl
348 psrlq $4,%mm0
349 movq %mm1,%mm2
350 psrlq $4,%mm1
351 pxor 8(%esi,%ecx,1),%mm0
352 psllq $60,%mm2
353 andl $240,%edx
354 pxor (%eax,%ebp,8),%mm1
355 andl $15,%ebx
356 pxor (%esi,%ecx,1),%mm1
357 movd %mm0,%ebp
358 pxor %mm2,%mm0
359 psrlq $4,%mm0
360 movq %mm1,%mm2
361 psrlq $4,%mm1
362 pxor 8(%esi,%edx,1),%mm0
363 movb 8(%edi),%cl
364 psllq $60,%mm2
365 pxor (%eax,%ebx,8),%mm1
366 andl $15,%ebp
367 pxor (%esi,%edx,1),%mm1
368 movl %ecx,%edx
369 movd %mm0,%ebx
370 pxor %mm2,%mm0
371 shlb $4,%cl
372 psrlq $4,%mm0
373 movq %mm1,%mm2
374 psrlq $4,%mm1
375 pxor 8(%esi,%ecx,1),%mm0
376 psllq $60,%mm2
377 andl $240,%edx
378 pxor (%eax,%ebp,8),%mm1
379 andl $15,%ebx
380 pxor (%esi,%ecx,1),%mm1
381 movd %mm0,%ebp
382 pxor %mm2,%mm0
383 psrlq $4,%mm0
384 movq %mm1,%mm2
385 psrlq $4,%mm1
386 pxor 8(%esi,%edx,1),%mm0
387 movb 7(%edi),%cl
388 psllq $60,%mm2
389 pxor (%eax,%ebx,8),%mm1
390 andl $15,%ebp
391 pxor (%esi,%edx,1),%mm1
392 movl %ecx,%edx
393 movd %mm0,%ebx
394 pxor %mm2,%mm0
395 shlb $4,%cl
396 psrlq $4,%mm0
397 movq %mm1,%mm2
398 psrlq $4,%mm1
399 pxor 8(%esi,%ecx,1),%mm0
400 psllq $60,%mm2
401 andl $240,%edx
402 pxor (%eax,%ebp,8),%mm1
403 andl $15,%ebx
404 pxor (%esi,%ecx,1),%mm1
405 movd %mm0,%ebp
406 pxor %mm2,%mm0
407 psrlq $4,%mm0
408 movq %mm1,%mm2
409 psrlq $4,%mm1
410 pxor 8(%esi,%edx,1),%mm0
411 movb 6(%edi),%cl
412 psllq $60,%mm2
413 pxor (%eax,%ebx,8),%mm1
414 andl $15,%ebp
415 pxor (%esi,%edx,1),%mm1
416 movl %ecx,%edx
417 movd %mm0,%ebx
418 pxor %mm2,%mm0
419 shlb $4,%cl
420 psrlq $4,%mm0
421 movq %mm1,%mm2
422 psrlq $4,%mm1
423 pxor 8(%esi,%ecx,1),%mm0
424 psllq $60,%mm2
425 andl $240,%edx
426 pxor (%eax,%ebp,8),%mm1
427 andl $15,%ebx
428 pxor (%esi,%ecx,1),%mm1
429 movd %mm0,%ebp
430 pxor %mm2,%mm0
431 psrlq $4,%mm0
432 movq %mm1,%mm2
433 psrlq $4,%mm1
434 pxor 8(%esi,%edx,1),%mm0
435 movb 5(%edi),%cl
436 psllq $60,%mm2
437 pxor (%eax,%ebx,8),%mm1
438 andl $15,%ebp
439 pxor (%esi,%edx,1),%mm1
440 movl %ecx,%edx
441 movd %mm0,%ebx
442 pxor %mm2,%mm0
443 shlb $4,%cl
444 psrlq $4,%mm0
445 movq %mm1,%mm2
446 psrlq $4,%mm1
447 pxor 8(%esi,%ecx,1),%mm0
448 psllq $60,%mm2
449 andl $240,%edx
450 pxor (%eax,%ebp,8),%mm1
451 andl $15,%ebx
452 pxor (%esi,%ecx,1),%mm1
453 movd %mm0,%ebp
454 pxor %mm2,%mm0
455 psrlq $4,%mm0
456 movq %mm1,%mm2
457 psrlq $4,%mm1
458 pxor 8(%esi,%edx,1),%mm0
459 movb 4(%edi),%cl
460 psllq $60,%mm2
461 pxor (%eax,%ebx,8),%mm1
462 andl $15,%ebp
463 pxor (%esi,%edx,1),%mm1
464 movl %ecx,%edx
465 movd %mm0,%ebx
466 pxor %mm2,%mm0
467 shlb $4,%cl
468 psrlq $4,%mm0
469 movq %mm1,%mm2
470 psrlq $4,%mm1
471 pxor 8(%esi,%ecx,1),%mm0
472 psllq $60,%mm2
473 andl $240,%edx
474 pxor (%eax,%ebp,8),%mm1
475 andl $15,%ebx
476 pxor (%esi,%ecx,1),%mm1
477 movd %mm0,%ebp
478 pxor %mm2,%mm0
479 psrlq $4,%mm0
480 movq %mm1,%mm2
481 psrlq $4,%mm1
482 pxor 8(%esi,%edx,1),%mm0
483 movb 3(%edi),%cl
484 psllq $60,%mm2
485 pxor (%eax,%ebx,8),%mm1
486 andl $15,%ebp
487 pxor (%esi,%edx,1),%mm1
488 movl %ecx,%edx
489 movd %mm0,%ebx
490 pxor %mm2,%mm0
491 shlb $4,%cl
492 psrlq $4,%mm0
493 movq %mm1,%mm2
494 psrlq $4,%mm1
495 pxor 8(%esi,%ecx,1),%mm0
496 psllq $60,%mm2
497 andl $240,%edx
498 pxor (%eax,%ebp,8),%mm1
499 andl $15,%ebx
500 pxor (%esi,%ecx,1),%mm1
501 movd %mm0,%ebp
502 pxor %mm2,%mm0
503 psrlq $4,%mm0
504 movq %mm1,%mm2
505 psrlq $4,%mm1
506 pxor 8(%esi,%edx,1),%mm0
507 movb 2(%edi),%cl
508 psllq $60,%mm2
509 pxor (%eax,%ebx,8),%mm1
510 andl $15,%ebp
511 pxor (%esi,%edx,1),%mm1
512 movl %ecx,%edx
513 movd %mm0,%ebx
514 pxor %mm2,%mm0
515 shlb $4,%cl
516 psrlq $4,%mm0
517 movq %mm1,%mm2
518 psrlq $4,%mm1
519 pxor 8(%esi,%ecx,1),%mm0
520 psllq $60,%mm2
521 andl $240,%edx
522 pxor (%eax,%ebp,8),%mm1
523 andl $15,%ebx
524 pxor (%esi,%ecx,1),%mm1
525 movd %mm0,%ebp
526 pxor %mm2,%mm0
527 psrlq $4,%mm0
528 movq %mm1,%mm2
529 psrlq $4,%mm1
530 pxor 8(%esi,%edx,1),%mm0
531 movb 1(%edi),%cl
532 psllq $60,%mm2
533 pxor (%eax,%ebx,8),%mm1
534 andl $15,%ebp
535 pxor (%esi,%edx,1),%mm1
536 movl %ecx,%edx
537 movd %mm0,%ebx
538 pxor %mm2,%mm0
539 shlb $4,%cl
540 psrlq $4,%mm0
541 movq %mm1,%mm2
542 psrlq $4,%mm1
543 pxor 8(%esi,%ecx,1),%mm0
544 psllq $60,%mm2
545 andl $240,%edx
546 pxor (%eax,%ebp,8),%mm1
547 andl $15,%ebx
548 pxor (%esi,%ecx,1),%mm1
549 movd %mm0,%ebp
550 pxor %mm2,%mm0
551 psrlq $4,%mm0
552 movq %mm1,%mm2
553 psrlq $4,%mm1
554 pxor 8(%esi,%edx,1),%mm0
555 movb (%edi),%cl
556 psllq $60,%mm2
557 pxor (%eax,%ebx,8),%mm1
558 andl $15,%ebp
559 pxor (%esi,%edx,1),%mm1
560 movl %ecx,%edx
561 movd %mm0,%ebx
562 pxor %mm2,%mm0
563 shlb $4,%cl
564 psrlq $4,%mm0
565 movq %mm1,%mm2
566 psrlq $4,%mm1
567 pxor 8(%esi,%ecx,1),%mm0
568 psllq $60,%mm2
569 andl $240,%edx
570 pxor (%eax,%ebp,8),%mm1
571 andl $15,%ebx
572 pxor (%esi,%ecx,1),%mm1
573 movd %mm0,%ebp
574 pxor %mm2,%mm0
575 psrlq $4,%mm0
576 movq %mm1,%mm2
577 psrlq $4,%mm1
578 pxor 8(%esi,%edx,1),%mm0
579 psllq $60,%mm2
580 pxor (%eax,%ebx,8),%mm1
581 andl $15,%ebp
582 pxor (%esi,%edx,1),%mm1
583 movd %mm0,%ebx
584 pxor %mm2,%mm0
585 movl 4(%eax,%ebp,8),%edi
586 psrlq $32,%mm0
587 movd %mm1,%edx
588 psrlq $32,%mm1
589 movd %mm0,%ecx
590 movd %mm1,%ebp
591 shll $4,%edi
592 bswap %ebx
593 bswap %edx
594 bswap %ecx
595 xorl %edi,%ebp
596 bswap %ebp
597 ret
598.size _mmx_gmult_4bit_inner,.-_mmx_gmult_4bit_inner
599.globl gcm_gmult_4bit_mmx
600.type gcm_gmult_4bit_mmx,@function
601.align 16
602gcm_gmult_4bit_mmx:
603.L_gcm_gmult_4bit_mmx_begin:
604 pushl %ebp
605 pushl %ebx
606 pushl %esi
607 pushl %edi
608 movl 20(%esp),%edi
609 movl 24(%esp),%esi
610 call .L005pic_point
611.L005pic_point:
612 popl %eax
613 leal .Lrem_4bit-.L005pic_point(%eax),%eax
614 movzbl 15(%edi),%ebx
615 call _mmx_gmult_4bit_inner
616 movl 20(%esp),%edi
617 emms
618 movl %ebx,12(%edi)
619 movl %edx,4(%edi)
620 movl %ecx,8(%edi)
621 movl %ebp,(%edi)
622 popl %edi
623 popl %esi
624 popl %ebx
625 popl %ebp
626 ret
627.size gcm_gmult_4bit_mmx,.-.L_gcm_gmult_4bit_mmx_begin
628.globl gcm_ghash_4bit_mmx
629.type gcm_ghash_4bit_mmx,@function
630.align 16
631gcm_ghash_4bit_mmx:
632.L_gcm_ghash_4bit_mmx_begin:
633 pushl %ebp
634 pushl %ebx
635 pushl %esi
636 pushl %edi
637 movl 20(%esp),%ebp
638 movl 24(%esp),%esi
639 movl 28(%esp),%edi
640 movl 32(%esp),%ecx
641 call .L006pic_point
642.L006pic_point:
643 popl %eax
644 leal .Lrem_4bit-.L006pic_point(%eax),%eax
645 addl %edi,%ecx
646 movl %ecx,32(%esp)
647 subl $20,%esp
648 movl 12(%ebp),%ebx
649 movl 4(%ebp),%edx
650 movl 8(%ebp),%ecx
651 movl (%ebp),%ebp
652 jmp .L007mmx_outer_loop
653.align 16
654.L007mmx_outer_loop:
655 xorl 12(%edi),%ebx
656 xorl 4(%edi),%edx
657 xorl 8(%edi),%ecx
658 xorl (%edi),%ebp
659 movl %edi,48(%esp)
660 movl %ebx,12(%esp)
661 movl %edx,4(%esp)
662 movl %ecx,8(%esp)
663 movl %ebp,(%esp)
664 movl %esp,%edi
665 shrl $24,%ebx
666 call _mmx_gmult_4bit_inner
667 movl 48(%esp),%edi
668 leal 16(%edi),%edi
669 cmpl 52(%esp),%edi
670 jb .L007mmx_outer_loop
671 movl 40(%esp),%edi
672 emms
673 movl %ebx,12(%edi)
674 movl %edx,4(%edi)
675 movl %ecx,8(%edi)
676 movl %ebp,(%edi)
677 addl $20,%esp
678 popl %edi
679 popl %esi
680 popl %ebx
681 popl %ebp
682 ret
683.size gcm_ghash_4bit_mmx,.-.L_gcm_ghash_4bit_mmx_begin
684.align 64
685.Lrem_4bit:
686.long 0,0,0,29491200,0,58982400,0,38141952
687.long 0,117964800,0,113901568,0,76283904,0,88997888
688.long 0,235929600,0,265420800,0,227803136,0,206962688
689.long 0,152567808,0,148504576,0,177995776,0,190709760
690.byte 71,72,65,83,72,32,102,111,114,32,120,56,54,44,32,67
691.byte 82,89,80,84,79,71,65,77,83,32,98,121,32,60,97,112
692.byte 112,114,111,64,111,112,101,110,115,115,108,46,111,114,103,62
693.byte 0
注意: 瀏覽 TracBrowser 來幫助您使用儲存庫瀏覽器

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette