Annotation of sys/lib/libkern/arch/hppa64/milli.S, Revision 1.1.1.1
1.1 nbrk 1: ; $OpenBSD: milli.S,v 1.1 2005/04/01 10:45:29 mickey Exp $
2: ;
3: ; (c) Copyright 1986 HEWLETT-PACKARD COMPANY
4: ;
5: ; To anyone who acknowledges that this file is provided "AS IS"
6: ; without any express or implied warranty:
7: ; permission to use, copy, modify, and distribute this file
8: ; for any purpose is hereby granted without fee, provided that
9: ; the above copyright notice and this notice appears in all
10: ; copies, and that the name of Hewlett-Packard Company not be
11: ; used in advertising or publicity pertaining to distribution
12: ; of the software without specific, written prior permission.
13: ; Hewlett-Packard Company makes no representations about the
14: ; suitability of this software for any purpose.
15: ;
16:
17: .text
18: .EXPORT $$remI,millicode
19: $$remI:
20: .PROC
21: .CALLINFO NO_CALLS
22: .ENTRY
23: addit,= 0,%arg1,%r0
24: add,>= %r0,%arg0,%ret1
25: sub %r0,%ret1,%ret1
26: sub %r0,%arg1,%r1
27: ds %r0,%r1,%r0
28: or %r0,%r0,%r1
29: add %ret1,%ret1,%ret1
30: ds %r1,%arg1,%r1
31: addc %ret1,%ret1,%ret1
32: ds %r1,%arg1,%r1
33: addc %ret1,%ret1,%ret1
34: ds %r1,%arg1,%r1
35: addc %ret1,%ret1,%ret1
36: ds %r1,%arg1,%r1
37: addc %ret1,%ret1,%ret1
38: ds %r1,%arg1,%r1
39: addc %ret1,%ret1,%ret1
40: ds %r1,%arg1,%r1
41: addc %ret1,%ret1,%ret1
42: ds %r1,%arg1,%r1
43: addc %ret1,%ret1,%ret1
44: ds %r1,%arg1,%r1
45: addc %ret1,%ret1,%ret1
46: ds %r1,%arg1,%r1
47: addc %ret1,%ret1,%ret1
48: ds %r1,%arg1,%r1
49: addc %ret1,%ret1,%ret1
50: ds %r1,%arg1,%r1
51: addc %ret1,%ret1,%ret1
52: ds %r1,%arg1,%r1
53: addc %ret1,%ret1,%ret1
54: ds %r1,%arg1,%r1
55: addc %ret1,%ret1,%ret1
56: ds %r1,%arg1,%r1
57: addc %ret1,%ret1,%ret1
58: ds %r1,%arg1,%r1
59: addc %ret1,%ret1,%ret1
60: ds %r1,%arg1,%r1
61: addc %ret1,%ret1,%ret1
62: ds %r1,%arg1,%r1
63: addc %ret1,%ret1,%ret1
64: ds %r1,%arg1,%r1
65: addc %ret1,%ret1,%ret1
66: ds %r1,%arg1,%r1
67: addc %ret1,%ret1,%ret1
68: ds %r1,%arg1,%r1
69: addc %ret1,%ret1,%ret1
70: ds %r1,%arg1,%r1
71: addc %ret1,%ret1,%ret1
72: ds %r1,%arg1,%r1
73: addc %ret1,%ret1,%ret1
74: ds %r1,%arg1,%r1
75: addc %ret1,%ret1,%ret1
76: ds %r1,%arg1,%r1
77: addc %ret1,%ret1,%ret1
78: ds %r1,%arg1,%r1
79: addc %ret1,%ret1,%ret1
80: ds %r1,%arg1,%r1
81: addc %ret1,%ret1,%ret1
82: ds %r1,%arg1,%r1
83: addc %ret1,%ret1,%ret1
84: ds %r1,%arg1,%r1
85: addc %ret1,%ret1,%ret1
86: ds %r1,%arg1,%r1
87: addc %ret1,%ret1,%ret1
88: ds %r1,%arg1,%r1
89: addc %ret1,%ret1,%ret1
90: ds %r1,%arg1,%r1
91: addc %ret1,%ret1,%ret1
92: ds %r1,%arg1,%r1
93: addc %ret1,%ret1,%ret1
94: movb,>=,n %r1,%ret1,remI300
95: add,< %arg1,%r0,%r0
96: add,tr %r1,%arg1,%ret1
97: sub %r1,%arg1,%ret1
98: remI300: add,>= %arg0,%r0,%r0
99:
100: bv %r0(%rp)
101: sub %r0,%ret1,%ret1
102: .EXIT
103: .PROCEND
104:
105: .export $$divU,millicode
106: .import $$divU_3,millicode
107: .import $$divU_5,millicode
108: .import $$divU_6,millicode
109: .import $$divU_7,millicode
110: .import $$divU_9,millicode
111: .import $$divU_10,millicode
112: .import $$divU_12,millicode
113: .import $$divU_14,millicode
114: .import $$divU_15,millicode
115: $$divU:
116: .proc
117: .callinfo NO_CALLS
118: ; The subtract is not nullified since it does no harm and can be used
119: ; by the two cases that branch back to "normal".
120: comib,>= 15,%arg1,special_divisor
121: sub %r0,%arg1,%r1 ; clear carry, negate the divisor
122: ds %r0,%r1,%r0 ; set V-bit to 1
123: normal:
124: add %arg0,%arg0,%ret1 ; shift msb bit into carry
125: ds %r0,%arg1,%r1 ; 1st divide step, if no carry
126: addc %ret1,%ret1,%ret1 ; shift %ret1 with/into carry
127: ds %r1,%arg1,%r1 ; 2nd divide step
128: addc %ret1,%ret1,%ret1 ; shift %ret1 with/into carry
129: ds %r1,%arg1,%r1 ; 3rd divide step
130: addc %ret1,%ret1,%ret1 ; shift %ret1 with/into carry
131: ds %r1,%arg1,%r1 ; 4th divide step
132: addc %ret1,%ret1,%ret1 ; shift %ret1 with/into carry
133: ds %r1,%arg1,%r1 ; 5th divide step
134: addc %ret1,%ret1,%ret1 ; shift %ret1 with/into carry
135: ds %r1,%arg1,%r1 ; 6th divide step
136: addc %ret1,%ret1,%ret1 ; shift %ret1 with/into carry
137: ds %r1,%arg1,%r1 ; 7th divide step
138: addc %ret1,%ret1,%ret1 ; shift %ret1 with/into carry
139: ds %r1,%arg1,%r1 ; 8th divide step
140: addc %ret1,%ret1,%ret1 ; shift %ret1 with/into carry
141: ds %r1,%arg1,%r1 ; 9th divide step
142: addc %ret1,%ret1,%ret1 ; shift %ret1 with/into carry
143: ds %r1,%arg1,%r1 ; 10th divide step
144: addc %ret1,%ret1,%ret1 ; shift %ret1 with/into carry
145: ds %r1,%arg1,%r1 ; 11th divide step
146: addc %ret1,%ret1,%ret1 ; shift %ret1 with/into carry
147: ds %r1,%arg1,%r1 ; 12th divide step
148: addc %ret1,%ret1,%ret1 ; shift %ret1 with/into carry
149: ds %r1,%arg1,%r1 ; 13th divide step
150: addc %ret1,%ret1,%ret1 ; shift %ret1 with/into carry
151: ds %r1,%arg1,%r1 ; 14th divide step
152: addc %ret1,%ret1,%ret1 ; shift %ret1 with/into carry
153: ds %r1,%arg1,%r1 ; 15th divide step
154: addc %ret1,%ret1,%ret1 ; shift %ret1 with/into carry
155: ds %r1,%arg1,%r1 ; 16th divide step
156: addc %ret1,%ret1,%ret1 ; shift %ret1 with/into carry
157: ds %r1,%arg1,%r1 ; 17th divide step
158: addc %ret1,%ret1,%ret1 ; shift %ret1 with/into carry
159: ds %r1,%arg1,%r1 ; 18th divide step
160: addc %ret1,%ret1,%ret1 ; shift %ret1 with/into carry
161: ds %r1,%arg1,%r1 ; 19th divide step
162: addc %ret1,%ret1,%ret1 ; shift %ret1 with/into carry
163: ds %r1,%arg1,%r1 ; 20th divide step
164: addc %ret1,%ret1,%ret1 ; shift %ret1 with/into carry
165: ds %r1,%arg1,%r1 ; 21st divide step
166: addc %ret1,%ret1,%ret1 ; shift %ret1 with/into carry
167: ds %r1,%arg1,%r1 ; 22nd divide step
168: addc %ret1,%ret1,%ret1 ; shift %ret1 with/into carry
169: ds %r1,%arg1,%r1 ; 23rd divide step
170: addc %ret1,%ret1,%ret1 ; shift %ret1 with/into carry
171: ds %r1,%arg1,%r1 ; 24th divide step
172: addc %ret1,%ret1,%ret1 ; shift %ret1 with/into carry
173: ds %r1,%arg1,%r1 ; 25th divide step
174: addc %ret1,%ret1,%ret1 ; shift %ret1 with/into carry
175: ds %r1,%arg1,%r1 ; 26th divide step
176: addc %ret1,%ret1,%ret1 ; shift %ret1 with/into carry
177: ds %r1,%arg1,%r1 ; 27th divide step
178: addc %ret1,%ret1,%ret1 ; shift %ret1 with/into carry
179: ds %r1,%arg1,%r1 ; 28th divide step
180: addc %ret1,%ret1,%ret1 ; shift %ret1 with/into carry
181: ds %r1,%arg1,%r1 ; 29th divide step
182: addc %ret1,%ret1,%ret1 ; shift %ret1 with/into carry
183: ds %r1,%arg1,%r1 ; 30th divide step
184: addc %ret1,%ret1,%ret1 ; shift %ret1 with/into carry
185: ds %r1,%arg1,%r1 ; 31st divide step
186: addc %ret1,%ret1,%ret1 ; shift %ret1 with/into carry
187: ds %r1,%arg1,%r1 ; 32nd divide step,
188: bv 0(%rp)
189: addc %ret1,%ret1,%ret1 ; shift last %ret1 bit into %ret1
190: ;_____________________________________________________________________________
191: ; handle the cases where divisor is a small constant or has high bit on
192: special_divisor:
193: depd %r0,31,32,%arg1
194:
195: comib,>,n 0,%arg1,big_divisor ; nullify previous instruction
196: nop
197: blr %arg1,%r0
198: nop
199: zero_divisor: ; this label is here to provide external visibility
200:
201: addit,= 0,%arg1,0 ; trap for zero dvr
202: nop
203: bv 0(%rp) ; divisor == 1
204: copy %arg0,%ret1
205: bv 0(%rp) ; divisor == 2
206: extru %arg0,30,31,%ret1
207: b,n $$divU_3 ; divisor == 3
208: nop
209: bv 0(%rp) ; divisor == 4
210: extru %arg0,29,30,%ret1
211: b,n $$divU_5 ; divisor == 5
212: nop
213: b,n $$divU_6 ; divisor == 6
214: nop
215: b,n $$divU_7 ; divisor == 7
216: nop
217: bv 0(%rp) ; divisor == 8
218: extru %arg0,28,29,%ret1
219: b,n $$divU_9 ; divisor == 9
220: nop
221: b,n $$divU_10 ; divisor == 10
222: nop
223: b normal ; divisor == 11
224: ds %r0,%r1,%r0 ; set V-bit to 1
225: b,n $$divU_12 ; divisor == 12
226: nop
227: b normal ; divisor == 13
228: ds %r0,%r1,%r0 ; set V-bit to 1
229: b,n $$divU_14 ; divisor == 14
230: nop
231: b,n $$divU_15 ; divisor == 15
232: nop
233: ;_____________________________________________________________________________
234: ; Handle the case where the high bit is on in the divisor.
235: ; Compute: if( dividend>=divisor) quotient=1; else quotient=0;
236: ; Note: dividend>==divisor iff dividend-divisor does not borrow
237: ; and not borrow iff carry
238: big_divisor:
239: sub %arg0,%arg1,%r0
240: bv 0(%rp)
241: addc %r0,%r0,%ret1
242: .procend
243: .end
244:
245: ;_____________________________________________________________________________
246:
247: $$divide_by_constant:
248: .PROC
249: .CALLINFO NO_CALLS
250: .export $$divide_by_constant,millicode
251: ; Provides a "nice" label for the code covered by the unwind descriptor
252: ; for things like gprof.
253:
254: $$divI_2:
255: .EXPORT $$divI_2,MILLICODE
256: COMCLR,>= %arg0,0,0
257: ADDI 1,%arg0,%arg0
258: bv 0(%rp)
259: EXTRS %arg0,30,31,%ret1
260:
261: $$divI_4:
262: .EXPORT $$divI_4,MILLICODE
263: COMCLR,>= %arg0,0,0
264: ADDI 3,%arg0,%arg0
265: bv 0(%rp)
266: EXTRS %arg0,29,30,%ret1
267:
268: $$divI_8:
269: .EXPORT $$divI_8,MILLICODE
270: COMCLR,>= %arg0,0,0
271: ADDI 7,%arg0,%arg0
272: bv 0(%rp)
273: EXTRS %arg0,28,29,%ret1
274:
275: $$divI_16:
276: .EXPORT $$divI_16,MILLICODE
277: COMCLR,>= %arg0,0,0
278: ADDI 15,%arg0,%arg0
279: bv 0(%rp)
280: EXTRS %arg0,27,28,%ret1
281:
282: $$divI_3:
283: .EXPORT $$divI_3,MILLICODE
284: COMB,<,N %arg0,0,$neg3
285:
286: ADDI 1,%arg0,%arg0
287: EXTRU %arg0,1,2,%ret1
288: SH2ADD %arg0,%arg0,%arg0
289: B $pos
290: ADDC %ret1,0,%ret1
291:
292: $neg3:
293: SUBI 1,%arg0,%arg0
294: EXTRU %arg0,1,2,%ret1
295: SH2ADD %arg0,%arg0,%arg0
296: B $neg
297: ADDC %ret1,0,%ret1
298:
299: $$divU_3:
300: .EXPORT $$divU_3,MILLICODE
301: ADDI 1,%arg0,%arg0
302: ADDC 0,0,%ret1
303: SHD %ret1,%arg0,30,%arg1
304: SH2ADD %arg0,%arg0,%arg0
305: B $pos
306: ADDC %ret1,%arg1,%ret1
307:
308: $$divI_5:
309: .EXPORT $$divI_5,MILLICODE
310: COMB,<,N %arg0,0,$neg5
311: ADDI 3,%arg0,%arg1
312: SH1ADD %arg0,%arg1,%arg0
313: B $pos
314: ADDC 0,0,%ret1
315:
316: $neg5:
317: SUB 0,%arg0,%arg0
318: ADDI 1,%arg0,%arg0
319: SHD 0,%arg0,31,%ret1
320: SH1ADD %arg0,%arg0,%arg0
321: B $neg
322: ADDC %ret1,0,%ret1
323:
324: $$divU_5:
325: .EXPORT $$divU_5,MILLICODE
326: ADDI 1,%arg0,%arg0
327: ADDC 0,0,%ret1
328: SHD %ret1,%arg0,31,%arg1
329: SH1ADD %arg0,%arg0,%arg0
330: B $pos
331: ADDC %arg1,%ret1,%ret1
332:
333: $$divI_6:
334: .EXPORT $$divI_6,MILLICODE
335: COMB,<,N %arg0,0,$neg6
336: EXTRU %arg0,30,31,%arg0
337: ADDI 5,%arg0,%arg1
338: SH2ADD %arg0,%arg1,%arg0
339: B $pos
340: ADDC 0,0,%ret1
341:
342: $neg6:
343: SUBI 2,%arg0,%arg0
344: EXTRU %arg0,30,31,%arg0
345: SHD 0,%arg0,30,%ret1
346: SH2ADD %arg0,%arg0,%arg0
347: B $neg
348: ADDC %ret1,0,%ret1
349:
350: $$divU_6:
351: .EXPORT $$divU_6,MILLICODE
352: EXTRU %arg0,30,31,%arg0
353: ADDI 1,%arg0,%arg0
354: SHD 0,%arg0,30,%ret1
355: SH2ADD %arg0,%arg0,%arg0
356: B $pos
357: ADDC %ret1,0,%ret1
358:
359: $$divU_10:
360: .EXPORT $$divU_10,MILLICODE
361: EXTRU %arg0,30,31,%arg0
362: ADDI 3,%arg0,%arg1
363: SH1ADD %arg0,%arg1,%arg0
364: ADDC 0,0,%ret1
365: $pos:
366: SHD %ret1,%arg0,28,%arg1
367: SHD %arg0,0,28,%r1
368: ADD %arg0,%r1,%arg0
369: ADDC %ret1,%arg1,%ret1
370: $pos_for_17:
371: SHD %ret1,%arg0,24,%arg1
372: SHD %arg0,0,24,%r1
373: ADD %arg0,%r1,%arg0
374: ADDC %ret1,%arg1,%ret1
375:
376: SHD %ret1,%arg0,16,%arg1
377: SHD %arg0,0,16,%r1
378: ADD %arg0,%r1,%arg0
379: bv 0(%rp)
380: ADDC %ret1,%arg1,%ret1
381:
382: $$divI_10:
383: .EXPORT $$divI_10,MILLICODE
384: COMB,< %arg0,0,$neg10
385: COPY 0,%ret1
386: EXTRU %arg0,30,31,%arg0
387: ADDIB,TR 1,%arg0,$pos
388: SH1ADD %arg0,%arg0,%arg0
389:
390: $neg10:
391: SUBI 2,%arg0,%arg0
392: EXTRU %arg0,30,31,%arg0
393: SH1ADD %arg0,%arg0,%arg0
394: $neg:
395: SHD %ret1,%arg0,28,%arg1
396: SHD %arg0,0,28,%r1
397: ADD %arg0,%r1,%arg0
398: ADDC %ret1,%arg1,%ret1
399: $neg_for_17:
400: SHD %ret1,%arg0,24,%arg1
401: SHD %arg0,0,24,%r1
402: ADD %arg0,%r1,%arg0
403: ADDC %ret1,%arg1,%ret1
404:
405: SHD %ret1,%arg0,16,%arg1
406: SHD %arg0,0,16,%r1
407: ADD %arg0,%r1,%arg0
408: ADDC %ret1,%arg1,%ret1
409: bv 0(%rp)
410: SUB 0,%ret1,%ret1
411:
412: $$divI_12:
413: .EXPORT $$divI_12,MILLICODE
414: COMB,< %arg0,0,$neg12
415: COPY 0,%ret1
416: EXTRU %arg0,29,30,%arg0
417: ADDIB,TR 1,%arg0,$pos
418: SH2ADD %arg0,%arg0,%arg0
419:
420: $neg12:
421: SUBI 4,%arg0,%arg0
422: EXTRU %arg0,29,30,%arg0
423: B $neg
424: SH2ADD %arg0,%arg0,%arg0
425:
426: $$divU_12:
427: .EXPORT $$divU_12,MILLICODE
428: EXTRU %arg0,29,30,%arg0
429: ADDI 5,%arg0,%arg1
430: SH2ADD %arg0,%arg1,%arg0
431: B $pos
432: ADDC 0,0,%ret1
433:
434: $$divI_15:
435: .EXPORT $$divI_15,MILLICODE
436: COMB,< %arg0,0,$neg15
437: COPY 0,%ret1
438: ADDIB,TR 1,%arg0,$pos+4
439: SHD %ret1,%arg0,28,%arg1
440:
441: $neg15:
442: B $neg
443: SUBI 1,%arg0,%arg0
444:
445: $$divU_15:
446: .EXPORT $$divU_15,MILLICODE
447: ADDI 1,%arg0,%arg0
448: B $pos
449: ADDC 0,0,%ret1
450:
451: $$divI_17:
452: .EXPORT $$divI_17,MILLICODE
453: COMB,<,N %arg0,0,$neg17
454: ADDI 1,%arg0,%arg0
455: SHD 0,%arg0,28,%arg1
456: SHD %arg0,0,28,%r1
457: SUB %r1,%arg0,%arg0
458: B $pos_for_17
459: SUBB %arg1,0,%ret1
460:
461: $neg17:
462: SUBI 1,%arg0,%arg0
463: SHD 0,%arg0,28,%arg1
464: SHD %arg0,0,28,%r1
465: SUB %r1,%arg0,%arg0
466: B $neg_for_17
467: SUBB %arg1,0,%ret1
468:
469: $$divU_17:
470: .EXPORT $$divU_17,MILLICODE
471: ADDI 1,%arg0,%arg0
472: ADDC 0,0,%ret1
473: SHD %ret1,%arg0,28,%arg1
474: $u17:
475: SHD %arg0,0,28,%r1
476: SUB %r1,%arg0,%arg0
477: B $pos_for_17
478: SUBB %arg1,%ret1,%ret1
479:
480: $$divI_7:
481: .EXPORT $$divI_7,MILLICODE
482: COMB,<,N %arg0,0,$neg7
483: $7:
484: ADDI 1,%arg0,%arg0
485: SHD 0,%arg0,29,%ret1
486: SH3ADD %arg0,%arg0,%arg0
487: ADDC %ret1,0,%ret1
488: $pos7:
489: SHD %ret1,%arg0,26,%arg1
490: SHD %arg0,0,26,%r1
491: ADD %arg0,%r1,%arg0
492: ADDC %ret1,%arg1,%ret1
493:
494: SHD %ret1,%arg0,20,%arg1
495: SHD %arg0,0,20,%r1
496: ADD %arg0,%r1,%arg0
497: ADDC %ret1,%arg1,%arg1
498:
499: COPY 0,%ret1
500: SHD,= %arg1,%arg0,24,%arg1
501: $1:
502: ADDB,TR %arg1,%ret1,$2
503: EXTRU %arg0,31,24,%arg0
504:
505: bv,n 0(%rp)
506:
507: $2:
508: ADDB,TR %arg1,%arg0,$1
509: EXTRU,= %arg0,7,8,%arg1
510:
511: $neg7:
512: SUBI 1,%arg0,%arg0
513: $8:
514: SHD 0,%arg0,29,%ret1
515: SH3ADD %arg0,%arg0,%arg0
516: ADDC %ret1,0,%ret1
517:
518: $neg7_shift:
519: SHD %ret1,%arg0,26,%arg1
520: SHD %arg0,0,26,%r1
521: ADD %arg0,%r1,%arg0
522: ADDC %ret1,%arg1,%ret1
523:
524: SHD %ret1,%arg0,20,%arg1
525: SHD %arg0,0,20,%r1
526: ADD %arg0,%r1,%arg0
527: ADDC %ret1,%arg1,%arg1
528:
529: COPY 0,%ret1
530: SHD,= %arg1,%arg0,24,%arg1
531: $3:
532: ADDB,TR %arg1,%ret1,$4
533: EXTRU %arg0,31,24,%arg0
534:
535: bv 0(%rp)
536: SUB 0,%ret1,%ret1
537:
538: $4:
539: ADDB,TR %arg1,%arg0,$3
540: EXTRU,= %arg0,7,8,%arg1
541:
542: $$divU_7:
543: .EXPORT $$divU_7,MILLICODE
544: ADDI 1,%arg0,%arg0
545: ADDC 0,0,%ret1
546: SHD %ret1,%arg0,29,%arg1
547: SH3ADD %arg0,%arg0,%arg0
548: B $pos7
549: ADDC %arg1,%ret1,%ret1
550:
551: $$divI_9:
552: .EXPORT $$divI_9,MILLICODE
553: COMB,<,N %arg0,0,$neg9
554: ADDI 1,%arg0,%arg0
555: SHD 0,%arg0,29,%arg1
556: SHD %arg0,0,29,%r1
557: SUB %r1,%arg0,%arg0
558: B $pos7
559: SUBB %arg1,0,%ret1
560:
561: $neg9:
562: SUBI 1,%arg0,%arg0
563: SHD 0,%arg0,29,%arg1
564: SHD %arg0,0,29,%r1
565: SUB %r1,%arg0,%arg0
566: B $neg7_shift
567: SUBB %arg1,0,%ret1
568:
569: $$divU_9:
570: .EXPORT $$divU_9,MILLICODE
571: ADDI 1,%arg0,%arg0
572: ADDC 0,0,%ret1
573: SHD %ret1,%arg0,29,%arg1
574: SHD %arg0,0,29,%r1
575: SUB %r1,%arg0,%arg0
576: B $pos7
577: SUBB %arg1,%ret1,%ret1
578:
579: $$divI_14:
580: .EXPORT $$divI_14,MILLICODE
581: COMB,<,N %arg0,0,$neg14
582: $$divU_14:
583: .EXPORT $$divU_14,MILLICODE
584: B $7
585: EXTRU %arg0,30,31,%arg0
586:
587: $neg14:
588: SUBI 2,%arg0,%arg0
589: B $8
590: EXTRU %arg0,30,31,%arg0
591:
592: .PROCEND
593: .END
594:
595: .export $$remU,millicode
596: $$remU:
597: .proc
598: .callinfo NO_CALLS
599: .entry
600:
601: comib,>=,n 0,%arg1,special_case
602: sub %r0,%arg1,%ret1 ; clear carry, negate the divisor
603: ds %r0,%ret1,%r0 ; set V-bit to 1
604: add %arg0,%arg0,%r1 ; shift msb bit into carry
605: ds %r0,%arg1,%ret1 ; 1st divide step, if no carry
606: addc %r1,%r1,%r1 ; shift %r1 with/into carry
607: ds %ret1,%arg1,%ret1 ; 2nd divide step
608: addc %r1,%r1,%r1 ; shift %r1 with/into carry
609: ds %ret1,%arg1,%ret1 ; 3rd divide step
610: addc %r1,%r1,%r1 ; shift %r1 with/into carry
611: ds %ret1,%arg1,%ret1 ; 4th divide step
612: addc %r1,%r1,%r1 ; shift %r1 with/into carry
613: ds %ret1,%arg1,%ret1 ; 5th divide step
614: addc %r1,%r1,%r1 ; shift %r1 with/into carry
615: ds %ret1,%arg1,%ret1 ; 6th divide step
616: addc %r1,%r1,%r1 ; shift %r1 with/into carry
617: ds %ret1,%arg1,%ret1 ; 7th divide step
618: addc %r1,%r1,%r1 ; shift %r1 with/into carry
619: ds %ret1,%arg1,%ret1 ; 8th divide step
620: addc %r1,%r1,%r1 ; shift %r1 with/into carry
621: ds %ret1,%arg1,%ret1 ; 9th divide step
622: addc %r1,%r1,%r1 ; shift %r1 with/into carry
623: ds %ret1,%arg1,%ret1 ; 10th divide step
624: addc %r1,%r1,%r1 ; shift %r1 with/into carry
625: ds %ret1,%arg1,%ret1 ; 11th divide step
626: addc %r1,%r1,%r1 ; shift %r1 with/into carry
627: ds %ret1,%arg1,%ret1 ; 12th divide step
628: addc %r1,%r1,%r1 ; shift %r1 with/into carry
629: ds %ret1,%arg1,%ret1 ; 13th divide step
630: addc %r1,%r1,%r1 ; shift %r1 with/into carry
631: ds %ret1,%arg1,%ret1 ; 14th divide step
632: addc %r1,%r1,%r1 ; shift %r1 with/into carry
633: ds %ret1,%arg1,%ret1 ; 15th divide step
634: addc %r1,%r1,%r1 ; shift %r1 with/into carry
635: ds %ret1,%arg1,%ret1 ; 16th divide step
636: addc %r1,%r1,%r1 ; shift %r1 with/into carry
637: ds %ret1,%arg1,%ret1 ; 17th divide step
638: addc %r1,%r1,%r1 ; shift %r1 with/into carry
639: ds %ret1,%arg1,%ret1 ; 18th divide step
640: addc %r1,%r1,%r1 ; shift %r1 with/into carry
641: ds %ret1,%arg1,%ret1 ; 19th divide step
642: addc %r1,%r1,%r1 ; shift %r1 with/into carry
643: ds %ret1,%arg1,%ret1 ; 20th divide step
644: addc %r1,%r1,%r1 ; shift %r1 with/into carry
645: ds %ret1,%arg1,%ret1 ; 21st divide step
646: addc %r1,%r1,%r1 ; shift %r1 with/into carry
647: ds %ret1,%arg1,%ret1 ; 22nd divide step
648: addc %r1,%r1,%r1 ; shift %r1 with/into carry
649: ds %ret1,%arg1,%ret1 ; 23rd divide step
650: addc %r1,%r1,%r1 ; shift %r1 with/into carry
651: ds %ret1,%arg1,%ret1 ; 24th divide step
652: addc %r1,%r1,%r1 ; shift %r1 with/into carry
653: ds %ret1,%arg1,%ret1 ; 25th divide step
654: addc %r1,%r1,%r1 ; shift %r1 with/into carry
655: ds %ret1,%arg1,%ret1 ; 26th divide step
656: addc %r1,%r1,%r1 ; shift %r1 with/into carry
657: ds %ret1,%arg1,%ret1 ; 27th divide step
658: addc %r1,%r1,%r1 ; shift %r1 with/into carry
659: ds %ret1,%arg1,%ret1 ; 28th divide step
660: addc %r1,%r1,%r1 ; shift %r1 with/into carry
661: ds %ret1,%arg1,%ret1 ; 29th divide step
662: addc %r1,%r1,%r1 ; shift %r1 with/into carry
663: ds %ret1,%arg1,%ret1 ; 30th divide step
664: addc %r1,%r1,%r1 ; shift %r1 with/into carry
665: ds %ret1,%arg1,%ret1 ; 31st divide step
666: addc %r1,%r1,%r1 ; shift %r1 with/into carry
667: ds %ret1,%arg1,%ret1 ; 32nd divide step,
668: comiclr,<= 0,%ret1,%r0
669: add %ret1,%arg1,%ret1 ; correction
670: ; .exit
671: bv,n 0(%rp)
672: nop
673: ; Putting >= on the last DS and deleting COMICLR does not work!
674: ;_____________________________________________________________________________
675: special_case:
676: addit,= 0,%arg1,%r0 ; trap on div by zero
677: sub,>>= %arg0,%arg1,%ret1
678: copy %arg0,%ret1
679: .exit
680: bv,n 0(%rp)
681: nop
682: .procend
683: .end
684:
685: .align 16
686: $$mulI:
687:
688: .proc
689: .callinfo NO_CALLS
690: .export $$mulI, millicode
691: combt,<<= %arg1,%arg0,l4 ; swap args if unsigned %arg1>%arg0
692: copy 0,%ret1 ; zero out the result
693: xor %arg0,%arg1,%arg0 ; swap %arg0 & %arg1 using the
694: xor %arg0,%arg1,%arg1 ; old xor trick
695: xor %arg0,%arg1,%arg0
696: l4: combt,<= 0,%arg0,l3 ; if %arg0>=0 then proceed like unsigned
697:
698: zdep %arg1,30,8,%r1 ; %r1 = (%arg1&0xff)<<1 *********
699: sub,> 0,%arg1,%r1 ; otherwise negate both and
700: combt,<=,n %arg0,%r1,l2 ; swap back if |%arg0|<|%arg1|
701: sub 0,%arg0,%arg1
702: movb,tr,n %r1,%arg0,l2 ; 10th inst.
703:
704: l0: add %ret1,%r1,%ret1 ; add in this partial product
705:
706: l1: zdep %arg0,23,24,%arg0 ; %arg0 <<= 8 ******************
707:
708: l2: zdep %arg1,30,8,%r1 ; %r1 = (%arg1&0xff)<<1 *********
709:
710: l3: blr %r1,0 ; case on these 8 bits ******
711:
712: extru %arg1,23,24,%arg1 ; %arg1 >>= 8 ******************
713:
714: ;16 insts before this.
715: ; %arg0 <<= 8 **************************
716: x0: comb,<> %arg1,0,l2 ! zdep %arg0,23,24,%arg0 ! bv,n 0(%rp) ! nop
717:
718: x1: comb,<> %arg1,0,l1 ! add %ret1,%arg0,%ret1 ! bv,n 0(%rp) ! nop
719:
720: x2: comb,<> %arg1,0,l1 ! sh1add %arg0,%ret1,%ret1 ! bv,n 0(%rp) ! nop
721:
722: x3: comb,<> %arg1,0,l0 ! sh1add %arg0,%arg0,%r1 ! bv 0(%rp) ! add %ret1,%r1,%ret1
723:
724: x4: comb,<> %arg1,0,l1 ! sh2add %arg0,%ret1,%ret1 ! bv,n 0(%rp) ! nop
725:
726: x5: comb,<> %arg1,0,l0 ! sh2add %arg0,%arg0,%r1 ! bv 0(%rp) ! add %ret1,%r1,%ret1
727:
728: x6: sh1add %arg0,%arg0,%r1 ! comb,<> %arg1,0,l1 ! sh1add %r1,%ret1,%ret1 ! bv,n 0(%rp)
729:
730: x7: sh1add %arg0,%arg0,%r1 ! comb,<> %arg1,0,l0 ! sh2add %arg0,%ret1,%ret1 ! b,n ret_t0
731:
732: x8: comb,<> %arg1,0,l1 ! sh3add %arg0,%ret1,%ret1 ! bv,n 0(%rp) ! nop
733:
734: x9: comb,<> %arg1,0,l0 ! sh3add %arg0,%arg0,%r1 ! bv 0(%rp) ! add %ret1,%r1,%ret1
735:
736: x10: sh2add %arg0,%arg0,%r1 ! comb,<> %arg1,0,l1 ! sh1add %r1,%ret1,%ret1 ! bv,n 0(%rp)
737:
738: x11: sh1add %arg0,%arg0,%r1 ! comb,<> %arg1,0,l0 ! sh3add %arg0,%ret1,%ret1 ! b,n ret_t0
739:
740: x12: sh1add %arg0,%arg0,%r1 ! comb,<> %arg1,0,l1 ! sh2add %r1,%ret1,%ret1 ! bv,n 0(%rp)
741:
742: x13: sh2add %arg0,%arg0,%r1 ! comb,<> %arg1,0,l0 ! sh3add %arg0,%ret1,%ret1 ! b,n ret_t0
743:
744: x14: sh1add %arg0,%arg0,%r1 ! sh1add %r1,%arg0,%r1 ! b e_shift ! sh1add %r1,%ret1,%ret1
745:
746: x15: sh2add %arg0,%arg0,%r1 ! comb,<> %arg1,0,l0 ! sh1add %r1,%r1,%r1 ! b,n ret_t0
747:
748: x16: zdep %arg0,27,28,%r1 ! comb,<> %arg1,0,l1 ! add %ret1,%r1,%ret1 ! bv,n 0(%rp)
749:
750: x17: sh3add %arg0,%arg0,%r1 ! comb,<> %arg1,0,l0 ! sh3add %arg0,%r1,%r1 ! b,n ret_t0
751:
752: x18: sh3add %arg0,%arg0,%r1 ! comb,<> %arg1,0,l1 ! sh1add %r1,%ret1,%ret1 ! bv,n 0(%rp)
753:
754: x19: sh3add %arg0,%arg0,%r1 ! comb,<> %arg1,0,l0 ! sh1add %r1,%arg0,%r1 ! b,n ret_t0
755:
756: x20: sh2add %arg0,%arg0,%r1 ! comb,<> %arg1,0,l1 ! sh2add %r1,%ret1,%ret1 ! bv,n 0(%rp)
757:
758: x21: sh2add %arg0,%arg0,%r1 ! comb,<> %arg1,0,l0 ! sh2add %r1,%arg0,%r1 ! b,n ret_t0
759:
760: x22: sh2add %arg0,%arg0,%r1 ! sh1add %r1,%arg0,%r1 ! b e_shift ! sh1add %r1,%ret1,%ret1
761:
762: x23: sh2add %arg0,%arg0,%r1 ! sh1add %r1,%arg0,%r1 ! b e_t0 ! sh1add %r1,%arg0,%r1
763:
764: x24: sh1add %arg0,%arg0,%r1 ! comb,<> %arg1,0,l1 ! sh3add %r1,%ret1,%ret1 ! bv,n 0(%rp)
765:
766: x25: sh2add %arg0,%arg0,%r1 ! comb,<> %arg1,0,l0 ! sh2add %r1,%r1,%r1 ! b,n ret_t0
767:
768: x26: sh1add %arg0,%arg0,%r1 ! sh2add %r1,%arg0,%r1 ! b e_shift ! sh1add %r1,%ret1,%ret1
769:
770: x27: sh1add %arg0,%arg0,%r1 ! comb,<> %arg1,0,l0 ! sh3add %r1,%r1,%r1 ! b,n ret_t0
771:
772: x28: sh1add %arg0,%arg0,%r1 ! sh1add %r1,%arg0,%r1 ! b e_shift ! sh2add %r1,%ret1,%ret1
773:
774: x29: sh1add %arg0,%arg0,%r1 ! sh1add %r1,%arg0,%r1 ! b e_t0 ! sh2add %r1,%arg0,%r1
775:
776: x30: sh2add %arg0,%arg0,%r1 ! sh1add %r1,%r1,%r1 ! b e_shift ! sh1add %r1,%ret1,%ret1
777:
778: x31: zdep %arg0,26,27,%r1 ! comb,<> %arg1,0,l0 ! sub %r1,%arg0,%r1 ! b,n ret_t0
779:
780: x32: zdep %arg0,26,27,%r1 ! comb,<> %arg1,0,l1 ! add %ret1,%r1,%ret1 ! bv,n 0(%rp)
781:
782: x33: sh3add %arg0,0,%r1 ! comb,<> %arg1,0,l0 ! sh2add %r1,%arg0,%r1 ! b,n ret_t0
783:
784: x34: zdep %arg0,27,28,%r1 ! add %r1,%arg0,%r1 ! b e_shift ! sh1add %r1,%ret1,%ret1
785:
786: x35: sh3add %arg0,%arg0,%r1 ! sh1add %r1,%r1,%r1 ! b e_t0 ! sh3add %arg0,%r1,%r1
787:
788: x36: sh3add %arg0,%arg0,%r1 ! comb,<> %arg1,0,l1 ! sh2add %r1,%ret1,%ret1 ! bv,n 0(%rp)
789:
790: x37: sh3add %arg0,%arg0,%r1 ! comb,<> %arg1,0,l0 ! sh2add %r1,%arg0,%r1 ! b,n ret_t0
791:
792: x38: sh3add %arg0,%arg0,%r1 ! sh1add %r1,%arg0,%r1 ! b e_shift ! sh1add %r1,%ret1,%ret1
793:
794: x39: sh3add %arg0,%arg0,%r1 ! sh1add %r1,%arg0,%r1 ! b e_t0 ! sh1add %r1,%arg0,%r1
795:
796: x40: sh2add %arg0,%arg0,%r1 ! comb,<> %arg1,0,l1 ! sh3add %r1,%ret1,%ret1 ! bv,n 0(%rp)
797:
798: x41: sh2add %arg0,%arg0,%r1 ! comb,<> %arg1,0,l0 ! sh3add %r1,%arg0,%r1 ! b,n ret_t0
799:
800: x42: sh2add %arg0,%arg0,%r1 ! sh2add %r1,%arg0,%r1 ! b e_shift ! sh1add %r1,%ret1,%ret1
801:
802: x43: sh2add %arg0,%arg0,%r1 ! sh2add %r1,%arg0,%r1 ! b e_t0 ! sh1add %r1,%arg0,%r1
803:
804: x44: sh2add %arg0,%arg0,%r1 ! sh1add %r1,%arg0,%r1 ! b e_shift ! sh2add %r1,%ret1,%ret1
805:
806: x45: sh3add %arg0,%arg0,%r1 ! comb,<> %arg1,0,l0 ! sh2add %r1,%r1,%r1 ! b,n ret_t0
807:
808: x46: sh3add %arg0,%arg0,%r1 ! sh2add %r1,%r1,%r1 ! b e_t0 ! add %r1,%arg0,%r1
809:
810: x47: sh3add %arg0,%arg0,%r1 ! sh2add %r1,%r1,%r1 ! b e_t0 ! sh1add %arg0,%r1,%r1
811:
812: x48: sh1add %arg0,%arg0,%r1 ! comb,<> %arg1,0,l0 ! zdep %r1,27,28,%r1 ! b,n ret_t0
813:
814: x49: sh3add %arg0,%arg0,%r1 ! sh2add %r1,%r1,%r1 ! b e_t0 ! sh2add %arg0,%r1,%r1
815:
816: x50: sh2add %arg0,%arg0,%r1 ! sh2add %r1,%r1,%r1 ! b e_shift ! sh1add %r1,%ret1,%ret1
817:
818: x51: sh3add %arg0,%arg0,%r1 ! sh3add %arg0,%r1,%r1 ! b e_t0 ! sh1add %r1,%r1,%r1
819:
820: x52: sh1add %arg0,%arg0,%r1 ! sh2add %r1,%arg0,%r1 ! b e_shift ! sh2add %r1,%ret1,%ret1
821:
822: x53: sh1add %arg0,%arg0,%r1 ! sh2add %r1,%arg0,%r1 ! b e_t0 ! sh2add %r1,%arg0,%r1
823:
824: x54: sh3add %arg0,%arg0,%r1 ! sh1add %r1,%r1,%r1 ! b e_shift ! sh1add %r1,%ret1,%ret1
825:
826: x55: sh3add %arg0,%arg0,%r1 ! sh1add %r1,%r1,%r1 ! b e_t0 ! sh1add %r1,%arg0,%r1
827:
828: x56: sh1add %arg0,%arg0,%r1 ! sh1add %r1,%arg0,%r1 ! b e_shift ! sh3add %r1,%ret1,%ret1
829:
830: x57: sh3add %arg0,%arg0,%r1 ! sh1add %r1,%arg0,%r1 ! b e_t0 ! sh1add %r1,%r1,%r1
831:
832: x58: sh1add %arg0,%arg0,%r1 ! sh1add %r1,%arg0,%r1 ! b e_2t0 ! sh2add %r1,%arg0,%r1
833:
834: x59: sh3add %arg0,%arg0,%r1 ! sh1add %r1,%arg0,%r1 ! b e_t02a0 ! sh1add %r1,%r1,%r1
835:
836: x60: sh2add %arg0,%arg0,%r1 ! sh1add %r1,%r1,%r1 ! b e_shift ! sh2add %r1,%ret1,%ret1
837:
838: x61: sh2add %arg0,%arg0,%r1 ! sh1add %r1,%r1,%r1 ! b e_t0 ! sh2add %r1,%arg0,%r1
839:
840: x62: zdep %arg0,26,27,%r1 ! sub %r1,%arg0,%r1 ! b e_shift ! sh1add %r1,%ret1,%ret1
841:
842: x63: zdep %arg0,25,26,%r1 ! comb,<> %arg1,0,l0 ! sub %r1,%arg0,%r1 ! b,n ret_t0
843:
844: x64: zdep %arg0,25,26,%r1 ! comb,<> %arg1,0,l1 ! add %ret1,%r1,%ret1 ! bv,n 0(%rp)
845:
846: x65: sh3add %arg0,0,%r1 ! comb,<> %arg1,0,l0 ! sh3add %r1,%arg0,%r1 ! b,n ret_t0
847:
848: x66: zdep %arg0,26,27,%r1 ! add %r1,%arg0,%r1 ! b e_shift ! sh1add %r1,%ret1,%ret1
849:
850: x67: sh3add %arg0,0,%r1 ! sh2add %r1,%arg0,%r1 ! b e_t0 ! sh1add %r1,%arg0,%r1
851:
852: x68: sh3add %arg0,0,%r1 ! sh1add %r1,%arg0,%r1 ! b e_shift ! sh2add %r1,%ret1,%ret1
853:
854: x69: sh3add %arg0,0,%r1 ! sh1add %r1,%arg0,%r1 ! b e_t0 ! sh2add %r1,%arg0,%r1
855:
856: x70: zdep %arg0,25,26,%r1 ! sh2add %arg0,%r1,%r1 ! b e_t0 ! sh1add %arg0,%r1,%r1
857:
858: x71: sh3add %arg0,%arg0,%r1 ! sh3add %r1,0,%r1 ! b e_t0 ! sub %r1,%arg0,%r1
859:
860: x72: sh3add %arg0,%arg0,%r1 ! comb,<> %arg1,0,l1 ! sh3add %r1,%ret1,%ret1 ! bv,n 0(%rp)
861:
862: x73: sh3add %arg0,%arg0,%r1 ! sh3add %r1,%arg0,%r1 ! b e_shift ! add %ret1,%r1,%ret1
863:
864: x74: sh3add %arg0,%arg0,%r1 ! sh2add %r1,%arg0,%r1 ! b e_shift ! sh1add %r1,%ret1,%ret1
865:
866: x75: sh3add %arg0,%arg0,%r1 ! sh2add %r1,%arg0,%r1 ! b e_t0 ! sh1add %r1,%arg0,%r1
867:
868: x76: sh3add %arg0,%arg0,%r1 ! sh1add %r1,%arg0,%r1 ! b e_shift ! sh2add %r1,%ret1,%ret1
869:
870: x77: sh3add %arg0,%arg0,%r1 ! sh1add %r1,%arg0,%r1 ! b e_t0 ! sh2add %r1,%arg0,%r1
871:
872: x78: sh3add %arg0,%arg0,%r1 ! sh1add %r1,%arg0,%r1 ! b e_2t0 ! sh1add %r1,%arg0,%r1
873:
874: x79: zdep %arg0,27,28,%r1 ! sh2add %r1,%r1,%r1 ! b e_t0 ! sub %r1,%arg0,%r1
875:
876: x80: zdep %arg0,27,28,%r1 ! sh2add %r1,%r1,%r1 ! b e_shift ! add %ret1,%r1,%ret1
877:
878: x81: sh3add %arg0,%arg0,%r1 ! sh3add %r1,%r1,%r1 ! b e_shift ! add %ret1,%r1,%ret1
879:
880: x82: sh2add %arg0,%arg0,%r1 ! sh3add %r1,%arg0,%r1 ! b e_shift ! sh1add %r1,%ret1,%ret1
881:
882: x83: sh2add %arg0,%arg0,%r1 ! sh3add %r1,%arg0,%r1 ! b e_t0 ! sh1add %r1,%arg0,%r1
883:
884: x84: sh2add %arg0,%arg0,%r1 ! sh2add %r1,%arg0,%r1 ! b e_shift ! sh2add %r1,%ret1,%ret1
885:
886: x85: sh3add %arg0,0,%r1 ! sh1add %r1,%arg0,%r1 ! b e_t0 ! sh2add %r1,%r1,%r1
887:
888: x86: sh2add %arg0,%arg0,%r1 ! sh2add %r1,%arg0,%r1 ! b e_2t0 ! sh1add %r1,%arg0,%r1
889:
890: x87: sh3add %arg0,%arg0,%r1 ! sh3add %r1,%r1,%r1 ! b e_t02a0 ! sh2add %arg0,%r1,%r1
891:
892: x88: sh2add %arg0,%arg0,%r1 ! sh1add %r1,%arg0,%r1 ! b e_shift ! sh3add %r1,%ret1,%ret1
893:
894: x89: sh2add %arg0,%arg0,%r1 ! sh1add %r1,%arg0,%r1 ! b e_t0 ! sh3add %r1,%arg0,%r1
895:
896: x90: sh3add %arg0,%arg0,%r1 ! sh2add %r1,%r1,%r1 ! b e_shift ! sh1add %r1,%ret1,%ret1
897:
898: x91: sh3add %arg0,%arg0,%r1 ! sh2add %r1,%r1,%r1 ! b e_t0 ! sh1add %r1,%arg0,%r1
899:
900: x92: sh2add %arg0,%arg0,%r1 ! sh1add %r1,%arg0,%r1 ! b e_4t0 ! sh1add %r1,%arg0,%r1
901:
902: x93: zdep %arg0,26,27,%r1 ! sub %r1,%arg0,%r1 ! b e_t0 ! sh1add %r1,%r1,%r1
903:
904: x94: sh3add %arg0,%arg0,%r1 ! sh2add %r1,%r1,%r1 ! b e_2t0 ! sh1add %arg0,%r1,%r1
905:
906: x95: sh3add %arg0,%arg0,%r1 ! sh1add %r1,%arg0,%r1 ! b e_t0 ! sh2add %r1,%r1,%r1
907:
908: x96: sh3add %arg0,0,%r1 ! sh1add %r1,%r1,%r1 ! b e_shift ! sh2add %r1,%ret1,%ret1
909:
910: x97: sh3add %arg0,0,%r1 ! sh1add %r1,%r1,%r1 ! b e_t0 ! sh2add %r1,%arg0,%r1
911:
912: x98: zdep %arg0,26,27,%r1 ! sh1add %r1,%r1,%r1 ! b e_t0 ! sh1add %arg0,%r1,%r1
913:
914: x99: sh3add %arg0,0,%r1 ! sh2add %r1,%arg0,%r1 ! b e_t0 ! sh1add %r1,%r1,%r1
915:
916: x100: sh2add %arg0,%arg0,%r1 ! sh2add %r1,%r1,%r1 ! b e_shift ! sh2add %r1,%ret1,%ret1
917:
918: x101: sh2add %arg0,%arg0,%r1 ! sh2add %r1,%r1,%r1 ! b e_t0 ! sh2add %r1,%arg0,%r1
919:
920: x102: zdep %arg0,26,27,%r1 ! sh1add %arg0,%r1,%r1 ! b e_t0 ! sh1add %r1,%r1,%r1
921:
922: x103: sh2add %arg0,%arg0,%r1 ! sh2add %r1,%r1,%r1 ! b e_t02a0 ! sh2add %r1,%arg0,%r1
923:
924: x104: sh1add %arg0,%arg0,%r1 ! sh2add %r1,%arg0,%r1 ! b e_shift ! sh3add %r1,%ret1,%ret1
925:
926: x105: sh2add %arg0,%arg0,%r1 ! sh2add %r1,%arg0,%r1 ! b e_t0 ! sh2add %r1,%r1,%r1
927:
928: x106: sh1add %arg0,%arg0,%r1 ! sh2add %r1,%arg0,%r1 ! b e_2t0 ! sh2add %r1,%arg0,%r1
929:
930: x107: sh3add %arg0,%arg0,%r1 ! sh2add %arg0,%r1,%r1 ! b e_t02a0 ! sh3add %r1,%arg0,%r1
931:
932: x108: sh3add %arg0,%arg0,%r1 ! sh1add %r1,%r1,%r1 ! b e_shift ! sh2add %r1,%ret1,%ret1
933:
934: x109: sh3add %arg0,%arg0,%r1 ! sh1add %r1,%r1,%r1 ! b e_t0 ! sh2add %r1,%arg0,%r1
935:
936: x110: sh3add %arg0,%arg0,%r1 ! sh1add %r1,%r1,%r1 ! b e_2t0 ! sh1add %r1,%arg0,%r1
937:
938: x111: sh3add %arg0,%arg0,%r1 ! sh2add %r1,%arg0,%r1 ! b e_t0 ! sh1add %r1,%r1,%r1
939:
940: x112: sh1add %arg0,%arg0,%r1 ! sh1add %r1,%arg0,%r1 ! b e_t0 ! zdep %r1,27,28,%r1
941:
942: x113: sh3add %arg0,%arg0,%r1 ! sh2add %r1,%arg0,%r1 ! b e_t02a0 ! sh1add %r1,%r1,%r1
943:
944: x114: sh3add %arg0,%arg0,%r1 ! sh1add %r1,%arg0,%r1 ! b e_2t0 ! sh1add %r1,%r1,%r1
945:
946: x115: sh3add %arg0,%arg0,%r1 ! sh1add %r1,%arg0,%r1 ! b e_2t0a0 ! sh1add %r1,%r1,%r1
947:
948: x116: sh1add %arg0,%arg0,%r1 ! sh1add %r1,%arg0,%r1 ! b e_4t0 ! sh2add %r1,%arg0,%r1
949:
950: x117: sh1add %arg0,%arg0,%r1 ! sh2add %r1,%arg0,%r1 ! b e_t0 ! sh3add %r1,%r1,%r1
951:
952: x118: sh1add %arg0,%arg0,%r1 ! sh2add %r1,%arg0,%r1 ! b e_t0a0 ! sh3add %r1,%r1,%r1
953:
954: x119: sh1add %arg0,%arg0,%r1 ! sh2add %r1,%arg0,%r1 ! b e_t02a0 ! sh3add %r1,%r1,%r1
955:
956: x120: sh2add %arg0,%arg0,%r1 ! sh1add %r1,%r1,%r1 ! b e_shift ! sh3add %r1,%ret1,%ret1
957:
958: x121: sh2add %arg0,%arg0,%r1 ! sh1add %r1,%r1,%r1 ! b e_t0 ! sh3add %r1,%arg0,%r1
959:
960: x122: sh2add %arg0,%arg0,%r1 ! sh1add %r1,%r1,%r1 ! b e_2t0 ! sh2add %r1,%arg0,%r1
961:
962: x123: sh2add %arg0,%arg0,%r1 ! sh3add %r1,%arg0,%r1 ! b e_t0 ! sh1add %r1,%r1,%r1
963:
964: x124: zdep %arg0,26,27,%r1 ! sub %r1,%arg0,%r1 ! b e_shift ! sh2add %r1,%ret1,%ret1
965:
966: x125: sh2add %arg0,%arg0,%r1 ! sh2add %r1,%r1,%r1 ! b e_t0 ! sh2add %r1,%r1,%r1
967:
968: x126: zdep %arg0,25,26,%r1 ! sub %r1,%arg0,%r1 ! b e_shift ! sh1add %r1,%ret1,%ret1
969:
970: x127: zdep %arg0,24,25,%r1 ! comb,<> %arg1,0,l0 ! sub %r1,%arg0,%r1 ! b,n ret_t0
971:
972: x128: zdep %arg0,24,25,%r1 ! comb,<> %arg1,0,l1 ! add %ret1,%r1,%ret1 ! bv,n 0(%rp)
973:
974: x129: zdep %arg0,24,25,%r1 ! comb,<> %arg1,0,l0 ! add %r1,%arg0,%r1 ! b,n ret_t0
975:
976: x130: zdep %arg0,25,26,%r1 ! add %r1,%arg0,%r1 ! b e_shift ! sh1add %r1,%ret1,%ret1
977:
978: x131: sh3add %arg0,0,%r1 ! sh3add %r1,%arg0,%r1 ! b e_t0 ! sh1add %r1,%arg0,%r1
979:
980: x132: sh3add %arg0,0,%r1 ! sh2add %r1,%arg0,%r1 ! b e_shift ! sh2add %r1,%ret1,%ret1
981:
982: x133: sh3add %arg0,0,%r1 ! sh2add %r1,%arg0,%r1 ! b e_t0 ! sh2add %r1,%arg0,%r1
983:
984: x134: sh3add %arg0,0,%r1 ! sh2add %r1,%arg0,%r1 ! b e_2t0 ! sh1add %r1,%arg0,%r1
985:
986: x135: sh3add %arg0,%arg0,%r1 ! sh2add %r1,%r1,%r1 ! b e_t0 ! sh1add %r1,%r1,%r1
987:
988: x136: sh3add %arg0,0,%r1 ! sh1add %r1,%arg0,%r1 ! b e_shift ! sh3add %r1,%ret1,%ret1
989:
990: x137: sh3add %arg0,0,%r1 ! sh1add %r1,%arg0,%r1 ! b e_t0 ! sh3add %r1,%arg0,%r1
991:
992: x138: sh3add %arg0,0,%r1 ! sh1add %r1,%arg0,%r1 ! b e_2t0 ! sh2add %r1,%arg0,%r1
993:
994: x139: sh3add %arg0,0,%r1 ! sh1add %r1,%arg0,%r1 ! b e_2t0a0 ! sh2add %r1,%arg0,%r1
995:
996: x140: sh1add %arg0,%arg0,%r1 ! sh1add %r1,%arg0,%r1 ! b e_4t0 ! sh2add %r1,%r1,%r1
997:
998: x141: sh3add %arg0,0,%r1 ! sh1add %r1,%arg0,%r1 ! b e_4t0a0 ! sh1add %r1,%arg0,%r1
999:
1000: x142: sh3add %arg0,%arg0,%r1 ! sh3add %r1,0,%r1 ! b e_2t0 ! sub %r1,%arg0,%r1
1001:
1002: x143: zdep %arg0,27,28,%r1 ! sh3add %r1,%r1,%r1 ! b e_t0 ! sub %r1,%arg0,%r1
1003:
1004: x144: sh3add %arg0,%arg0,%r1 ! sh3add %r1,0,%r1 ! b e_shift ! sh1add %r1,%ret1,%ret1
1005:
1006: x145: sh3add %arg0,%arg0,%r1 ! sh3add %r1,0,%r1 ! b e_t0 ! sh1add %r1,%arg0,%r1
1007:
1008: x146: sh3add %arg0,%arg0,%r1 ! sh3add %r1,%arg0,%r1 ! b e_shift ! sh1add %r1,%ret1,%ret1
1009:
1010: x147: sh3add %arg0,%arg0,%r1 ! sh3add %r1,%arg0,%r1 ! b e_t0 ! sh1add %r1,%arg0,%r1
1011:
1012: x148: sh3add %arg0,%arg0,%r1 ! sh2add %r1,%arg0,%r1 ! b e_shift ! sh2add %r1,%ret1,%ret1
1013:
1014: x149: sh3add %arg0,%arg0,%r1 ! sh2add %r1,%arg0,%r1 ! b e_t0 ! sh2add %r1,%arg0,%r1
1015:
1016: x150: sh3add %arg0,%arg0,%r1 ! sh2add %r1,%arg0,%r1 ! b e_2t0 ! sh1add %r1,%arg0,%r1
1017:
1018: x151: sh3add %arg0,%arg0,%r1 ! sh2add %r1,%arg0,%r1 ! b e_2t0a0 ! sh1add %r1,%arg0,%r1
1019:
1020: x152: sh3add %arg0,%arg0,%r1 ! sh1add %r1,%arg0,%r1 ! b e_shift ! sh3add %r1,%ret1,%ret1
1021:
1022: x153: sh3add %arg0,%arg0,%r1 ! sh1add %r1,%arg0,%r1 ! b e_t0 ! sh3add %r1,%arg0,%r1
1023:
1024: x154: sh3add %arg0,%arg0,%r1 ! sh1add %r1,%arg0,%r1 ! b e_2t0 ! sh2add %r1,%arg0,%r1
1025:
1026: x155: zdep %arg0,26,27,%r1 ! sub %r1,%arg0,%r1 ! b e_t0 ! sh2add %r1,%r1,%r1
1027:
1028: x156: sh3add %arg0,%arg0,%r1 ! sh1add %r1,%arg0,%r1 ! b e_4t0 ! sh1add %r1,%arg0,%r1
1029:
1030: x157: zdep %arg0,26,27,%r1 ! sub %r1,%arg0,%r1 ! b e_t02a0 ! sh2add %r1,%r1,%r1
1031:
1032: x158: zdep %arg0,27,28,%r1 ! sh2add %r1,%r1,%r1 ! b e_2t0 ! sub %r1,%arg0,%r1
1033:
1034: x159: zdep %arg0,26,27,%r1 ! sh2add %r1,%r1,%r1 ! b e_t0 ! sub %r1,%arg0,%r1
1035:
1036: x160: sh2add %arg0,%arg0,%r1 ! sh2add %r1,0,%r1 ! b e_shift ! sh3add %r1,%ret1,%ret1
1037:
1038: x161: sh3add %arg0,0,%r1 ! sh2add %r1,%r1,%r1 ! b e_t0 ! sh2add %r1,%arg0,%r1
1039:
1040: x162: sh3add %arg0,%arg0,%r1 ! sh3add %r1,%r1,%r1 ! b e_shift ! sh1add %r1,%ret1,%ret1
1041:
1042: x163: sh3add %arg0,%arg0,%r1 ! sh3add %r1,%r1,%r1 ! b e_t0 ! sh1add %r1,%arg0,%r1
1043:
1044: x164: sh2add %arg0,%arg0,%r1 ! sh3add %r1,%arg0,%r1 ! b e_shift ! sh2add %r1,%ret1,%ret1
1045:
1046: x165: sh3add %arg0,0,%r1 ! sh2add %r1,%arg0,%r1 ! b e_t0 ! sh2add %r1,%r1,%r1
1047:
1048: x166: sh2add %arg0,%arg0,%r1 ! sh3add %r1,%arg0,%r1 ! b e_2t0 ! sh1add %r1,%arg0,%r1
1049:
1050: x167: sh2add %arg0,%arg0,%r1 ! sh3add %r1,%arg0,%r1 ! b e_2t0a0 ! sh1add %r1,%arg0,%r1
1051:
1052: x168: sh2add %arg0,%arg0,%r1 ! sh2add %r1,%arg0,%r1 ! b e_shift ! sh3add %r1,%ret1,%ret1
1053:
1054: x169: sh2add %arg0,%arg0,%r1 ! sh2add %r1,%arg0,%r1 ! b e_t0 ! sh3add %r1,%arg0,%r1
1055:
1056: x170: zdep %arg0,26,27,%r1 ! sh1add %arg0,%r1,%r1 ! b e_t0 ! sh2add %r1,%r1,%r1
1057:
1058: x171: sh3add %arg0,%arg0,%r1 ! sh1add %r1,%arg0,%r1 ! b e_t0 ! sh3add %r1,%r1,%r1
1059:
1060: x172: sh2add %arg0,%arg0,%r1 ! sh2add %r1,%arg0,%r1 ! b e_4t0 ! sh1add %r1,%arg0,%r1
1061:
1062: x173: sh3add %arg0,%arg0,%r1 ! sh1add %r1,%arg0,%r1 ! b e_t02a0 ! sh3add %r1,%r1,%r1
1063:
1064: x174: zdep %arg0,26,27,%r1 ! sh1add %arg0,%r1,%r1 ! b e_t04a0 ! sh2add %r1,%r1,%r1
1065:
1066: x175: sh3add %arg0,0,%r1 ! sh1add %r1,%arg0,%r1 ! b e_5t0 ! sh1add %r1,%arg0,%r1
1067:
1068: x176: sh2add %arg0,%arg0,%r1 ! sh2add %r1,%arg0,%r1 ! b e_8t0 ! add %r1,%arg0,%r1
1069:
1070: x177: sh2add %arg0,%arg0,%r1 ! sh2add %r1,%arg0,%r1 ! b e_8t0a0 ! add %r1,%arg0,%r1
1071:
1072: x178: sh2add %arg0,%arg0,%r1 ! sh1add %r1,%arg0,%r1 ! b e_2t0 ! sh3add %r1,%arg0,%r1
1073:
1074: x179: sh2add %arg0,%arg0,%r1 ! sh1add %r1,%arg0,%r1 ! b e_2t0a0 ! sh3add %r1,%arg0,%r1
1075:
1076: x180: sh3add %arg0,%arg0,%r1 ! sh2add %r1,%r1,%r1 ! b e_shift ! sh2add %r1,%ret1,%ret1
1077:
1078: x181: sh3add %arg0,%arg0,%r1 ! sh2add %r1,%r1,%r1 ! b e_t0 ! sh2add %r1,%arg0,%r1
1079:
1080: x182: sh3add %arg0,%arg0,%r1 ! sh2add %r1,%r1,%r1 ! b e_2t0 ! sh1add %r1,%arg0,%r1
1081:
1082: x183: sh3add %arg0,%arg0,%r1 ! sh2add %r1,%r1,%r1 ! b e_2t0a0 ! sh1add %r1,%arg0,%r1
1083:
1084: x184: sh2add %arg0,%arg0,%r1 ! sh3add %r1,%r1,%r1 ! b e_4t0 ! add %r1,%arg0,%r1
1085:
1086: x185: sh3add %arg0,%arg0,%r1 ! sh2add %r1,%arg0,%r1 ! b e_t0 ! sh2add %r1,%r1,%r1
1087:
1088: x186: zdep %arg0,26,27,%r1 ! sub %r1,%arg0,%r1 ! b e_2t0 ! sh1add %r1,%r1,%r1
1089:
1090: x187: sh3add %arg0,%arg0,%r1 ! sh2add %r1,%arg0,%r1 ! b e_t02a0 ! sh2add %r1,%r1,%r1
1091:
1092: x188: sh3add %arg0,%arg0,%r1 ! sh2add %r1,%r1,%r1 ! b e_4t0 ! sh1add %arg0,%r1,%r1
1093:
1094: x189: sh2add %arg0,%arg0,%r1 ! sh2add %r1,%arg0,%r1 ! b e_t0 ! sh3add %r1,%r1,%r1
1095:
1096: x190: sh3add %arg0,%arg0,%r1 ! sh1add %r1,%arg0,%r1 ! b e_2t0 ! sh2add %r1,%r1,%r1
1097:
1098: x191: zdep %arg0,25,26,%r1 ! sh1add %r1,%r1,%r1 ! b e_t0 ! sub %r1,%arg0,%r1
1099:
1100: x192: sh3add %arg0,0,%r1 ! sh1add %r1,%r1,%r1 ! b e_shift ! sh3add %r1,%ret1,%ret1
1101:
1102: x193: sh3add %arg0,0,%r1 ! sh1add %r1,%r1,%r1 ! b e_t0 ! sh3add %r1,%arg0,%r1
1103:
1104: x194: sh3add %arg0,0,%r1 ! sh1add %r1,%r1,%r1 ! b e_2t0 ! sh2add %r1,%arg0,%r1
1105:
1106: x195: sh3add %arg0,0,%r1 ! sh3add %r1,%arg0,%r1 ! b e_t0 ! sh1add %r1,%r1,%r1
1107:
1108: x196: sh3add %arg0,0,%r1 ! sh1add %r1,%r1,%r1 ! b e_4t0 ! sh1add %r1,%arg0,%r1
1109:
1110: x197: sh3add %arg0,0,%r1 ! sh1add %r1,%r1,%r1 ! b e_4t0a0 ! sh1add %r1,%arg0,%r1
1111:
1112: x198: zdep %arg0,25,26,%r1 ! sh1add %arg0,%r1,%r1 ! b e_t0 ! sh1add %r1,%r1,%r1
1113:
1114: x199: sh3add %arg0,0,%r1 ! sh2add %r1,%arg0,%r1 ! b e_2t0a0 ! sh1add %r1,%r1,%r1
1115:
1116: x200: sh2add %arg0,%arg0,%r1 ! sh2add %r1,%r1,%r1 ! b e_shift ! sh3add %r1,%ret1,%ret1
1117:
1118: x201: sh2add %arg0,%arg0,%r1 ! sh2add %r1,%r1,%r1 ! b e_t0 ! sh3add %r1,%arg0,%r1
1119:
1120: x202: sh2add %arg0,%arg0,%r1 ! sh2add %r1,%r1,%r1 ! b e_2t0 ! sh2add %r1,%arg0,%r1
1121:
1122: x203: sh2add %arg0,%arg0,%r1 ! sh2add %r1,%r1,%r1 ! b e_2t0a0 ! sh2add %r1,%arg0,%r1
1123:
1124: x204: sh3add %arg0,0,%r1 ! sh1add %r1,%arg0,%r1 ! b e_4t0 ! sh1add %r1,%r1,%r1
1125:
1126: x205: sh2add %arg0,%arg0,%r1 ! sh3add %r1,%arg0,%r1 ! b e_t0 ! sh2add %r1,%r1,%r1
1127:
1128: x206: zdep %arg0,25,26,%r1 ! sh2add %arg0,%r1,%r1 ! b e_t02a0 ! sh1add %r1,%r1,%r1
1129:
1130: x207: sh3add %arg0,0,%r1 ! sh1add %r1,%arg0,%r1 ! b e_3t0 ! sh2add %r1,%arg0,%r1
1131:
1132: x208: sh2add %arg0,%arg0,%r1 ! sh2add %r1,%r1,%r1 ! b e_8t0 ! add %r1,%arg0,%r1
1133:
1134: x209: sh2add %arg0,%arg0,%r1 ! sh2add %r1,%r1,%r1 ! b e_8t0a0 ! add %r1,%arg0,%r1
1135:
1136: x210: sh2add %arg0,%arg0,%r1 ! sh2add %r1,%arg0,%r1 ! b e_2t0 ! sh2add %r1,%r1,%r1
1137:
1138: x211: sh2add %arg0,%arg0,%r1 ! sh2add %r1,%arg0,%r1 ! b e_2t0a0 ! sh2add %r1,%r1,%r1
1139:
1140: x212: sh1add %arg0,%arg0,%r1 ! sh2add %r1,%arg0,%r1 ! b e_4t0 ! sh2add %r1,%arg0,%r1
1141:
1142: x213: sh1add %arg0,%arg0,%r1 ! sh2add %r1,%arg0,%r1 ! b e_4t0a0 ! sh2add %r1,%arg0,%r1
1143:
1144: x214: sh3add %arg0,%arg0,%r1 ! sh2add %arg0,%r1,%r1 ! b e2t04a0 ! sh3add %r1,%arg0,%r1
1145:
1146: x215: sh2add %arg0,%arg0,%r1 ! sh2add %r1,%arg0,%r1 ! b e_5t0 ! sh1add %r1,%arg0,%r1
1147:
1148: x216: sh3add %arg0,%arg0,%r1 ! sh1add %r1,%r1,%r1 ! b e_shift ! sh3add %r1,%ret1,%ret1
1149:
1150: x217: sh3add %arg0,%arg0,%r1 ! sh1add %r1,%r1,%r1 ! b e_t0 ! sh3add %r1,%arg0,%r1
1151:
1152: x218: sh3add %arg0,%arg0,%r1 ! sh1add %r1,%r1,%r1 ! b e_2t0 ! sh2add %r1,%arg0,%r1
1153:
1154: x219: sh3add %arg0,%arg0,%r1 ! sh3add %r1,%arg0,%r1 ! b e_t0 ! sh1add %r1,%r1,%r1
1155:
1156: x220: sh1add %arg0,%arg0,%r1 ! sh3add %r1,%r1,%r1 ! b e_4t0 ! sh1add %r1,%arg0,%r1
1157:
1158: x221: sh1add %arg0,%arg0,%r1 ! sh3add %r1,%r1,%r1 ! b e_4t0a0 ! sh1add %r1,%arg0,%r1
1159:
1160: x222: sh3add %arg0,%arg0,%r1 ! sh2add %r1,%arg0,%r1 ! b e_2t0 ! sh1add %r1,%r1,%r1
1161:
1162: x223: sh3add %arg0,%arg0,%r1 ! sh2add %r1,%arg0,%r1 ! b e_2t0a0 ! sh1add %r1,%r1,%r1
1163:
1164: x224: sh3add %arg0,%arg0,%r1 ! sh1add %r1,%r1,%r1 ! b e_8t0 ! add %r1,%arg0,%r1
1165:
1166: x225: sh3add %arg0,%arg0,%r1 ! sh2add %r1,%r1,%r1 ! b e_t0 ! sh2add %r1,%r1,%r1
1167:
1168: x226: sh1add %arg0,%arg0,%r1 ! sh1add %r1,%arg0,%r1 ! b e_t02a0 ! zdep %r1,26,27,%r1
1169:
1170: x227: sh3add %arg0,%arg0,%r1 ! sh2add %r1,%r1,%r1 ! b e_t02a0 ! sh2add %r1,%r1,%r1
1171:
1172: x228: sh3add %arg0,%arg0,%r1 ! sh1add %r1,%arg0,%r1 ! b e_4t0 ! sh1add %r1,%r1,%r1
1173:
1174: x229: sh3add %arg0,%arg0,%r1 ! sh1add %r1,%arg0,%r1 ! b e_4t0a0 ! sh1add %r1,%r1,%r1
1175:
1176: x230: sh3add %arg0,%arg0,%r1 ! sh2add %r1,%r1,%r1 ! b e_5t0 ! add %r1,%arg0,%r1
1177:
1178: x231: sh3add %arg0,%arg0,%r1 ! sh1add %r1,%arg0,%r1 ! b e_3t0 ! sh2add %r1,%arg0,%r1
1179:
1180: x232: sh1add %arg0,%arg0,%r1 ! sh1add %r1,%arg0,%r1 ! b e_8t0 ! sh2add %r1,%arg0,%r1
1181:
1182: x233: sh1add %arg0,%arg0,%r1 ! sh1add %r1,%arg0,%r1 ! b e_8t0a0 ! sh2add %r1,%arg0,%r1
1183:
1184: x234: sh1add %arg0,%arg0,%r1 ! sh2add %r1,%arg0,%r1 ! b e_2t0 ! sh3add %r1,%r1,%r1
1185:
1186: x235: sh1add %arg0,%arg0,%r1 ! sh2add %r1,%arg0,%r1 ! b e_2t0a0 ! sh3add %r1,%r1,%r1
1187:
1188: x236: sh3add %arg0,%arg0,%r1 ! sh1add %r1,%arg0,%r1 ! b e4t08a0 ! sh1add %r1,%r1,%r1
1189:
1190: x237: zdep %arg0,27,28,%r1 ! sh2add %r1,%r1,%r1 ! b e_3t0 ! sub %r1,%arg0,%r1
1191:
1192: x238: sh1add %arg0,%arg0,%r1 ! sh2add %r1,%arg0,%r1 ! b e2t04a0 ! sh3add %r1,%r1,%r1
1193:
1194: x239: zdep %arg0,27,28,%r1 ! sh2add %r1,%r1,%r1 ! b e_t0ma0 ! sh1add %r1,%r1,%r1
1195:
1196: x240: sh3add %arg0,%arg0,%r1 ! add %r1,%arg0,%r1 ! b e_8t0 ! sh1add %r1,%r1,%r1
1197:
1198: x241: sh3add %arg0,%arg0,%r1 ! add %r1,%arg0,%r1 ! b e_8t0a0 ! sh1add %r1,%r1,%r1
1199:
1200: x242: sh2add %arg0,%arg0,%r1 ! sh1add %r1,%r1,%r1 ! b e_2t0 ! sh3add %r1,%arg0,%r1
1201:
1202: x243: sh3add %arg0,%arg0,%r1 ! sh3add %r1,%r1,%r1 ! b e_t0 ! sh1add %r1,%r1,%r1
1203:
1204: x244: sh2add %arg0,%arg0,%r1 ! sh1add %r1,%r1,%r1 ! b e_4t0 ! sh2add %r1,%arg0,%r1
1205:
1206: x245: sh3add %arg0,0,%r1 ! sh1add %r1,%r1,%r1 ! b e_5t0 ! sh1add %r1,%arg0,%r1
1207:
1208: x246: sh2add %arg0,%arg0,%r1 ! sh3add %r1,%arg0,%r1 ! b e_2t0 ! sh1add %r1,%r1,%r1
1209:
1210: x247: sh2add %arg0,%arg0,%r1 ! sh3add %r1,%arg0,%r1 ! b e_2t0a0 ! sh1add %r1,%r1,%r1
1211:
1212: x248: zdep %arg0,26,27,%r1 ! sub %r1,%arg0,%r1 ! b e_shift ! sh3add %r1,%ret1,%ret1
1213:
1214: x249: zdep %arg0,26,27,%r1 ! sub %r1,%arg0,%r1 ! b e_t0 ! sh3add %r1,%arg0,%r1
1215:
1216: x250: sh2add %arg0,%arg0,%r1 ! sh2add %r1,%r1,%r1 ! b e_2t0 ! sh2add %r1,%r1,%r1
1217:
1218: x251: sh2add %arg0,%arg0,%r1 ! sh2add %r1,%r1,%r1 ! b e_2t0a0 ! sh2add %r1,%r1,%r1
1219:
1220: x252: zdep %arg0,25,26,%r1 ! sub %r1,%arg0,%r1 ! b e_shift ! sh2add %r1,%ret1,%ret1
1221:
1222: x253: zdep %arg0,25,26,%r1 ! sub %r1,%arg0,%r1 ! b e_t0 ! sh2add %r1,%arg0,%r1
1223:
1224: x254: zdep %arg0,24,25,%r1 ! sub %r1,%arg0,%r1 ! b e_shift ! sh1add %r1,%ret1,%ret1
1225:
1226: x255: zdep %arg0,23,24,%r1 ! comb,<> %arg1,0,l0 ! sub %r1,%arg0,%r1 ! b,n ret_t0
1227:
1228: ;1040 insts before this.
1229: ret_t0: bv 0(%rp)
1230:
1231: e_t0: add %ret1,%r1,%ret1
1232:
1233: e_shift: comb,<> %arg1,0,l2
1234:
1235: zdep %arg0,23,24,%arg0 ; %arg0 <<= 8 ***********
1236: bv,n 0(%rp)
1237: e_t0ma0: comb,<> %arg1,0,l0
1238:
1239: sub %r1,%arg0,%r1
1240: bv 0(%rp)
1241: add %ret1,%r1,%ret1
1242: e_t0a0: comb,<> %arg1,0,l0
1243:
1244: add %r1,%arg0,%r1
1245: bv 0(%rp)
1246: add %ret1,%r1,%ret1
1247: e_t02a0: comb,<> %arg1,0,l0
1248:
1249: sh1add %arg0,%r1,%r1
1250: bv 0(%rp)
1251: add %ret1,%r1,%ret1
1252: e_t04a0: comb,<> %arg1,0,l0
1253:
1254: sh2add %arg0,%r1,%r1
1255: bv 0(%rp)
1256: add %ret1,%r1,%ret1
1257: e_2t0: comb,<> %arg1,0,l1
1258:
1259: sh1add %r1,%ret1,%ret1
1260: bv,n 0(%rp)
1261: e_2t0a0: comb,<> %arg1,0,l0
1262:
1263: sh1add %r1,%arg0,%r1
1264: bv 0(%rp)
1265: add %ret1,%r1,%ret1
1266: e2t04a0: sh1add %arg0,%r1,%r1
1267:
1268: comb,<> %arg1,0,l1
1269: sh1add %r1,%ret1,%ret1
1270: bv,n 0(%rp)
1271: e_3t0: comb,<> %arg1,0,l0
1272:
1273: sh1add %r1,%r1,%r1
1274: bv 0(%rp)
1275: add %ret1,%r1,%ret1
1276: e_4t0: comb,<> %arg1,0,l1
1277:
1278: sh2add %r1,%ret1,%ret1
1279: bv,n 0(%rp)
1280: e_4t0a0: comb,<> %arg1,0,l0
1281:
1282: sh2add %r1,%arg0,%r1
1283: bv 0(%rp)
1284: add %ret1,%r1,%ret1
1285: e4t08a0: sh1add %arg0,%r1,%r1
1286:
1287: comb,<> %arg1,0,l1
1288: sh2add %r1,%ret1,%ret1
1289: bv,n 0(%rp)
1290: e_5t0: comb,<> %arg1,0,l0
1291:
1292: sh2add %r1,%r1,%r1
1293: bv 0(%rp)
1294: add %ret1,%r1,%ret1
1295: e_8t0: comb,<> %arg1,0,l1
1296:
1297: sh3add %r1,%ret1,%ret1
1298: bv,n 0(%rp)
1299: e_8t0a0: comb,<> %arg1,0,l0
1300:
1301: sh3add %r1,%arg0,%r1
1302: bv 0(%rp)
1303: add %ret1,%r1,%ret1
1304:
1305: .procend
1306: .end
1307:
1308: .import $$divI_2,millicode
1309: .import $$divI_3,millicode
1310: .import $$divI_4,millicode
1311: .import $$divI_5,millicode
1312: .import $$divI_6,millicode
1313: .import $$divI_7,millicode
1314: .import $$divI_8,millicode
1315: .import $$divI_9,millicode
1316: .import $$divI_10,millicode
1317: .import $$divI_12,millicode
1318: .import $$divI_14,millicode
1319: .import $$divI_15,millicode
1320: .export $$divI,millicode
1321: .export $$divoI,millicode
1322: $$divoI:
1323: .proc
1324: .callinfo NO_CALLS
1325: comib,=,n -1,%arg1,negative1 ; when divisor == -1
1326: $$divI:
1327: comib,>>=,n 15,%arg1,small_divisor
1328: add,>= 0,%arg0,%ret1 ; move dividend, if %ret1 < 0,
1329: normal1:
1330: sub 0,%ret1,%ret1 ; make it positive
1331: sub 0,%arg1,%r1 ; clear carry,
1332: ; negate the divisor
1333: ds 0,%r1,0 ; set V-bit to the comple-
1334: ; ment of the divisor sign
1335: add %ret1,%ret1,%ret1 ; shift msb bit into carry
1336: ds %r0,%arg1,%r1 ; 1st divide step, if no carry
1337: addc %ret1,%ret1,%ret1 ; shift %ret1 with/into carry
1338: ds %r1,%arg1,%r1 ; 2nd divide step
1339: addc %ret1,%ret1,%ret1 ; shift %ret1 with/into carry
1340: ds %r1,%arg1,%r1 ; 3rd divide step
1341: addc %ret1,%ret1,%ret1 ; shift %ret1 with/into carry
1342: ds %r1,%arg1,%r1 ; 4th divide step
1343: addc %ret1,%ret1,%ret1 ; shift %ret1 with/into carry
1344: ds %r1,%arg1,%r1 ; 5th divide step
1345: addc %ret1,%ret1,%ret1 ; shift %ret1 with/into carry
1346: ds %r1,%arg1,%r1 ; 6th divide step
1347: addc %ret1,%ret1,%ret1 ; shift %ret1 with/into carry
1348: ds %r1,%arg1,%r1 ; 7th divide step
1349: addc %ret1,%ret1,%ret1 ; shift %ret1 with/into carry
1350: ds %r1,%arg1,%r1 ; 8th divide step
1351: addc %ret1,%ret1,%ret1 ; shift %ret1 with/into carry
1352: ds %r1,%arg1,%r1 ; 9th divide step
1353: addc %ret1,%ret1,%ret1 ; shift %ret1 with/into carry
1354: ds %r1,%arg1,%r1 ; 10th divide step
1355: addc %ret1,%ret1,%ret1 ; shift %ret1 with/into carry
1356: ds %r1,%arg1,%r1 ; 11th divide step
1357: addc %ret1,%ret1,%ret1 ; shift %ret1 with/into carry
1358: ds %r1,%arg1,%r1 ; 12th divide step
1359: addc %ret1,%ret1,%ret1 ; shift %ret1 with/into carry
1360: ds %r1,%arg1,%r1 ; 13th divide step
1361: addc %ret1,%ret1,%ret1 ; shift %ret1 with/into carry
1362: ds %r1,%arg1,%r1 ; 14th divide step
1363: addc %ret1,%ret1,%ret1 ; shift %ret1 with/into carry
1364: ds %r1,%arg1,%r1 ; 15th divide step
1365: addc %ret1,%ret1,%ret1 ; shift %ret1 with/into carry
1366: ds %r1,%arg1,%r1 ; 16th divide step
1367: addc %ret1,%ret1,%ret1 ; shift %ret1 with/into carry
1368: ds %r1,%arg1,%r1 ; 17th divide step
1369: addc %ret1,%ret1,%ret1 ; shift %ret1 with/into carry
1370: ds %r1,%arg1,%r1 ; 18th divide step
1371: addc %ret1,%ret1,%ret1 ; shift %ret1 with/into carry
1372: ds %r1,%arg1,%r1 ; 19th divide step
1373: addc %ret1,%ret1,%ret1 ; shift %ret1 with/into carry
1374: ds %r1,%arg1,%r1 ; 20th divide step
1375: addc %ret1,%ret1,%ret1 ; shift %ret1 with/into carry
1376: ds %r1,%arg1,%r1 ; 21st divide step
1377: addc %ret1,%ret1,%ret1 ; shift %ret1 with/into carry
1378: ds %r1,%arg1,%r1 ; 22nd divide step
1379: addc %ret1,%ret1,%ret1 ; shift %ret1 with/into carry
1380: ds %r1,%arg1,%r1 ; 23rd divide step
1381: addc %ret1,%ret1,%ret1 ; shift %ret1 with/into carry
1382: ds %r1,%arg1,%r1 ; 24th divide step
1383: addc %ret1,%ret1,%ret1 ; shift %ret1 with/into carry
1384: ds %r1,%arg1,%r1 ; 25th divide step
1385: addc %ret1,%ret1,%ret1 ; shift %ret1 with/into carry
1386: ds %r1,%arg1,%r1 ; 26th divide step
1387: addc %ret1,%ret1,%ret1 ; shift %ret1 with/into carry
1388: ds %r1,%arg1,%r1 ; 27th divide step
1389: addc %ret1,%ret1,%ret1 ; shift %ret1 with/into carry
1390: ds %r1,%arg1,%r1 ; 28th divide step
1391: addc %ret1,%ret1,%ret1 ; shift %ret1 with/into carry
1392: ds %r1,%arg1,%r1 ; 29th divide step
1393: addc %ret1,%ret1,%ret1 ; shift %ret1 with/into carry
1394: ds %r1,%arg1,%r1 ; 30th divide step
1395: addc %ret1,%ret1,%ret1 ; shift %ret1 with/into carry
1396: ds %r1,%arg1,%r1 ; 31st divide step
1397: addc %ret1,%ret1,%ret1 ; shift %ret1 with/into carry
1398: ds %r1,%arg1,%r1 ; 32nd divide step,
1399: addc %ret1,%ret1,%ret1 ; shift last %ret1 bit into %ret1
1400: xor,>= %arg0,%arg1,0 ; get correct sign of quotient
1401: sub 0,%ret1,%ret1 ; based on operand signs
1402: bv,n 0(%rp)
1403: nop
1404: ;______________________________________________________________________
1405: small_divisor:
1406: depd %r0,31,32,%arg1
1407: blr,n %arg1,%r0
1408: nop
1409: ; table for divisor == 0,1, ... ,15
1410: addit,= 0,%arg1,%r0 ; trap if divisor == 0
1411: nop
1412: bv %r0(%rp) ; divisor == 1
1413: copy %arg0,%ret1
1414: b,n $$divI_2 ; divisor == 2
1415: nop
1416: b,n $$divI_3 ; divisor == 3
1417: nop
1418: b,n $$divI_4 ; divisor == 4
1419: nop
1420: b,n $$divI_5 ; divisor == 5
1421: nop
1422: b,n $$divI_6 ; divisor == 6
1423: nop
1424: b,n $$divI_7 ; divisor == 7
1425: nop
1426: b,n $$divI_8 ; divisor == 8
1427: nop
1428: b,n $$divI_9 ; divisor == 9
1429: nop
1430: b,n $$divI_10 ; divisor == 10
1431: nop
1432: b normal1 ; divisor == 11
1433: add,>= 0,%arg0,%ret1
1434: b,n $$divI_12 ; divisor == 12
1435: nop
1436: b normal1 ; divisor == 13
1437: add,>= 0,%arg0,%ret1
1438: b,n $$divI_14 ; divisor == 14
1439: nop
1440: b,n $$divI_15 ; divisor == 15
1441: nop
1442: ;______________________________________________________________________
1443: negative1:
1444: sub %r0,%arg0,%ret1 ; result is negation of dividend
1445: bv 0(%rp)
1446: addo %arg0,%arg1,%r0 ; trap iff dividend==0x80000000 && divisor==-1
1447: .procend
1448: .end
CVSweb