3 * _udiv(ulong num, ulong den)
13 * for(i=0; den<quo; i++)
38 TEXT _udivmod(SB), NOPROF, $-4
41 MOVW 4(R1), R13 /* numerator */
42 MOVW 8(R1), R10 /* denominator */
45 MOVW R0, -1(R0) /* fault -- divide by zero */
74 MOVW R12, 4(R1) /* quotent */
75 MOVW R13, 8(R1) /* remainder */
79 * save working registers
80 * and bring in num/den parameters
82 TEXT _unsarg(SB), NOPROF, $-4
95 * save working registers
96 * and bring in absolute value
97 * of num/den parameters
99 TEXT _absarg(SB), NOPROF, $-4
110 MOVW R14, 4(R1) /* numerator */
117 MOVW R14, 8(R1) /* denominator */
121 * restore registers and
122 * return to original caller
125 TEXT _retarg(SB), NOPROF, $-4
133 JMP 8(R15) /* back to main sequence */
142 TEXT _div(SB), NOPROF, $-4
143 SUB $28, R1 /* 4 reg save, 2 parameters, link */
150 MOVW 28(R1), R10 /* clean up the sign */
158 JMP 8(R15) /* not executed */
167 TEXT _divl(SB), NOPROF, $-4
168 SUB $((4+2+1)*4), R1 /* 4 reg save, 2 parameters, link */
176 JMP 8(R15) /* not executed */
185 TEXT _mod(SB), NOPROF, $-4
186 SUB $28, R1 /* 4 reg save, 2 parameters, link */
193 MOVW 28(R1), R10 /* clean up the sign */
200 JMP 8(R15) /* not executed */
209 TEXT _modl(SB), NOPROF, $-4
210 SUB $28, R1 /* 4 reg save, 2 parameters, link */
219 JMP 8(R15) /* not executed */
222 * special calling sequence:
224 * arg2 in 4(R1), will save R9
225 * nothing in 0(R1), will save R8
228 TEXT _mul+0(SB), NOPROF, $-4
231 * exchange stack and registers
244 ANDNCC $0xFFF, R14, R0
246 ANDCC R0, R0, R9 /* zero partial product and clear N and V cond's */
249 MULSCC R8, R9, R9 /* 0 */
250 MULSCC R8, R9, R9 /* 1 */
251 MULSCC R8, R9, R9 /* 2 */
252 MULSCC R8, R9, R9 /* 3 */
253 MULSCC R8, R9, R9 /* 4 */
254 MULSCC R8, R9, R9 /* 5 */
255 MULSCC R8, R9, R9 /* 6 */
256 MULSCC R8, R9, R9 /* 7 */
257 MULSCC R8, R9, R9 /* 8 */
258 MULSCC R8, R9, R9 /* 9 */
259 MULSCC R8, R9, R9 /* 10 */
260 MULSCC R8, R9, R9 /* 11 */
261 MULSCC R8, R9, R9 /* 12 */
262 MULSCC R8, R9, R9 /* 13 */
263 MULSCC R8, R9, R9 /* 14 */
264 MULSCC R8, R9, R9 /* 15 */
265 MULSCC R8, R9, R9 /* 16 */
266 MULSCC R8, R9, R9 /* 17 */
267 MULSCC R8, R9, R9 /* 18 */
268 MULSCC R8, R9, R9 /* 19 */
269 MULSCC R8, R9, R9 /* 20 */
270 MULSCC R8, R9, R9 /* 21 */
271 MULSCC R8, R9, R9 /* 22 */
272 MULSCC R8, R9, R9 /* 23 */
273 MULSCC R8, R9, R9 /* 24 */
274 MULSCC R8, R9, R9 /* 25 */
275 MULSCC R8, R9, R9 /* 26 */
276 MULSCC R8, R9, R9 /* 27 */
277 MULSCC R8, R9, R9 /* 28 */
278 MULSCC R8, R9, R9 /* 29 */
279 MULSCC R8, R9, R9 /* 30 */
280 MULSCC R8, R9, R9 /* 31 */
281 MULSCC R0, R9, R9 /* 32; shift only */
283 MOVW Y, R14 /* get low part */
287 ANDCC R0, R0, R9 /* zero partial product and clear N and V cond's */
288 MULSCC R8, R9, R9 /* 0 */
289 MULSCC R8, R9, R9 /* 1 */
290 MULSCC R8, R9, R9 /* 2 */
291 MULSCC R8, R9, R9 /* 3 */
292 MULSCC R8, R9, R9 /* 4 */
293 MULSCC R8, R9, R9 /* 5 */
294 MULSCC R8, R9, R9 /* 6 */
295 MULSCC R8, R9, R9 /* 7 */
296 MULSCC R8, R9, R9 /* 8 */
297 MULSCC R8, R9, R9 /* 9 */
298 MULSCC R8, R9, R9 /* 10 */
299 MULSCC R8, R9, R9 /* 11 */
300 MULSCC R0, R9, R9 /* 12; shift only */