1
2
3 package ssa
4
5 import "cmd/compile/internal/types"
6
7 func rewriteValueMIPS64(v *Value) bool {
8 switch v.Op {
9 case OpAbs:
10 v.Op = OpMIPS64ABSD
11 return true
12 case OpAdd16:
13 v.Op = OpMIPS64ADDV
14 return true
15 case OpAdd32:
16 v.Op = OpMIPS64ADDV
17 return true
18 case OpAdd32F:
19 v.Op = OpMIPS64ADDF
20 return true
21 case OpAdd64:
22 v.Op = OpMIPS64ADDV
23 return true
24 case OpAdd64F:
25 v.Op = OpMIPS64ADDD
26 return true
27 case OpAdd8:
28 v.Op = OpMIPS64ADDV
29 return true
30 case OpAddPtr:
31 v.Op = OpMIPS64ADDV
32 return true
33 case OpAddr:
34 return rewriteValueMIPS64_OpAddr(v)
35 case OpAnd16:
36 v.Op = OpMIPS64AND
37 return true
38 case OpAnd32:
39 v.Op = OpMIPS64AND
40 return true
41 case OpAnd64:
42 v.Op = OpMIPS64AND
43 return true
44 case OpAnd8:
45 v.Op = OpMIPS64AND
46 return true
47 case OpAndB:
48 v.Op = OpMIPS64AND
49 return true
50 case OpAtomicAdd32:
51 v.Op = OpMIPS64LoweredAtomicAdd32
52 return true
53 case OpAtomicAdd64:
54 v.Op = OpMIPS64LoweredAtomicAdd64
55 return true
56 case OpAtomicAnd32:
57 v.Op = OpMIPS64LoweredAtomicAnd32
58 return true
59 case OpAtomicAnd8:
60 return rewriteValueMIPS64_OpAtomicAnd8(v)
61 case OpAtomicCompareAndSwap32:
62 return rewriteValueMIPS64_OpAtomicCompareAndSwap32(v)
63 case OpAtomicCompareAndSwap64:
64 v.Op = OpMIPS64LoweredAtomicCas64
65 return true
66 case OpAtomicExchange32:
67 v.Op = OpMIPS64LoweredAtomicExchange32
68 return true
69 case OpAtomicExchange64:
70 v.Op = OpMIPS64LoweredAtomicExchange64
71 return true
72 case OpAtomicLoad32:
73 v.Op = OpMIPS64LoweredAtomicLoad32
74 return true
75 case OpAtomicLoad64:
76 v.Op = OpMIPS64LoweredAtomicLoad64
77 return true
78 case OpAtomicLoad8:
79 v.Op = OpMIPS64LoweredAtomicLoad8
80 return true
81 case OpAtomicLoadPtr:
82 v.Op = OpMIPS64LoweredAtomicLoad64
83 return true
84 case OpAtomicOr32:
85 v.Op = OpMIPS64LoweredAtomicOr32
86 return true
87 case OpAtomicOr8:
88 return rewriteValueMIPS64_OpAtomicOr8(v)
89 case OpAtomicStore32:
90 v.Op = OpMIPS64LoweredAtomicStore32
91 return true
92 case OpAtomicStore64:
93 v.Op = OpMIPS64LoweredAtomicStore64
94 return true
95 case OpAtomicStore8:
96 v.Op = OpMIPS64LoweredAtomicStore8
97 return true
98 case OpAtomicStorePtrNoWB:
99 v.Op = OpMIPS64LoweredAtomicStore64
100 return true
101 case OpAvg64u:
102 return rewriteValueMIPS64_OpAvg64u(v)
103 case OpClosureCall:
104 v.Op = OpMIPS64CALLclosure
105 return true
106 case OpCom16:
107 return rewriteValueMIPS64_OpCom16(v)
108 case OpCom32:
109 return rewriteValueMIPS64_OpCom32(v)
110 case OpCom64:
111 return rewriteValueMIPS64_OpCom64(v)
112 case OpCom8:
113 return rewriteValueMIPS64_OpCom8(v)
114 case OpConst16:
115 return rewriteValueMIPS64_OpConst16(v)
116 case OpConst32:
117 return rewriteValueMIPS64_OpConst32(v)
118 case OpConst32F:
119 return rewriteValueMIPS64_OpConst32F(v)
120 case OpConst64:
121 return rewriteValueMIPS64_OpConst64(v)
122 case OpConst64F:
123 return rewriteValueMIPS64_OpConst64F(v)
124 case OpConst8:
125 return rewriteValueMIPS64_OpConst8(v)
126 case OpConstBool:
127 return rewriteValueMIPS64_OpConstBool(v)
128 case OpConstNil:
129 return rewriteValueMIPS64_OpConstNil(v)
130 case OpCvt32Fto32:
131 v.Op = OpMIPS64TRUNCFW
132 return true
133 case OpCvt32Fto64:
134 v.Op = OpMIPS64TRUNCFV
135 return true
136 case OpCvt32Fto64F:
137 v.Op = OpMIPS64MOVFD
138 return true
139 case OpCvt32to32F:
140 v.Op = OpMIPS64MOVWF
141 return true
142 case OpCvt32to64F:
143 v.Op = OpMIPS64MOVWD
144 return true
145 case OpCvt64Fto32:
146 v.Op = OpMIPS64TRUNCDW
147 return true
148 case OpCvt64Fto32F:
149 v.Op = OpMIPS64MOVDF
150 return true
151 case OpCvt64Fto64:
152 v.Op = OpMIPS64TRUNCDV
153 return true
154 case OpCvt64to32F:
155 v.Op = OpMIPS64MOVVF
156 return true
157 case OpCvt64to64F:
158 v.Op = OpMIPS64MOVVD
159 return true
160 case OpCvtBoolToUint8:
161 v.Op = OpCopy
162 return true
163 case OpDiv16:
164 return rewriteValueMIPS64_OpDiv16(v)
165 case OpDiv16u:
166 return rewriteValueMIPS64_OpDiv16u(v)
167 case OpDiv32:
168 return rewriteValueMIPS64_OpDiv32(v)
169 case OpDiv32F:
170 v.Op = OpMIPS64DIVF
171 return true
172 case OpDiv32u:
173 return rewriteValueMIPS64_OpDiv32u(v)
174 case OpDiv64:
175 return rewriteValueMIPS64_OpDiv64(v)
176 case OpDiv64F:
177 v.Op = OpMIPS64DIVD
178 return true
179 case OpDiv64u:
180 return rewriteValueMIPS64_OpDiv64u(v)
181 case OpDiv8:
182 return rewriteValueMIPS64_OpDiv8(v)
183 case OpDiv8u:
184 return rewriteValueMIPS64_OpDiv8u(v)
185 case OpEq16:
186 return rewriteValueMIPS64_OpEq16(v)
187 case OpEq32:
188 return rewriteValueMIPS64_OpEq32(v)
189 case OpEq32F:
190 return rewriteValueMIPS64_OpEq32F(v)
191 case OpEq64:
192 return rewriteValueMIPS64_OpEq64(v)
193 case OpEq64F:
194 return rewriteValueMIPS64_OpEq64F(v)
195 case OpEq8:
196 return rewriteValueMIPS64_OpEq8(v)
197 case OpEqB:
198 return rewriteValueMIPS64_OpEqB(v)
199 case OpEqPtr:
200 return rewriteValueMIPS64_OpEqPtr(v)
201 case OpGetCallerPC:
202 v.Op = OpMIPS64LoweredGetCallerPC
203 return true
204 case OpGetCallerSP:
205 v.Op = OpMIPS64LoweredGetCallerSP
206 return true
207 case OpGetClosurePtr:
208 v.Op = OpMIPS64LoweredGetClosurePtr
209 return true
210 case OpHmul32:
211 return rewriteValueMIPS64_OpHmul32(v)
212 case OpHmul32u:
213 return rewriteValueMIPS64_OpHmul32u(v)
214 case OpHmul64:
215 return rewriteValueMIPS64_OpHmul64(v)
216 case OpHmul64u:
217 return rewriteValueMIPS64_OpHmul64u(v)
218 case OpInterCall:
219 v.Op = OpMIPS64CALLinter
220 return true
221 case OpIsInBounds:
222 return rewriteValueMIPS64_OpIsInBounds(v)
223 case OpIsNonNil:
224 return rewriteValueMIPS64_OpIsNonNil(v)
225 case OpIsSliceInBounds:
226 return rewriteValueMIPS64_OpIsSliceInBounds(v)
227 case OpLeq16:
228 return rewriteValueMIPS64_OpLeq16(v)
229 case OpLeq16U:
230 return rewriteValueMIPS64_OpLeq16U(v)
231 case OpLeq32:
232 return rewriteValueMIPS64_OpLeq32(v)
233 case OpLeq32F:
234 return rewriteValueMIPS64_OpLeq32F(v)
235 case OpLeq32U:
236 return rewriteValueMIPS64_OpLeq32U(v)
237 case OpLeq64:
238 return rewriteValueMIPS64_OpLeq64(v)
239 case OpLeq64F:
240 return rewriteValueMIPS64_OpLeq64F(v)
241 case OpLeq64U:
242 return rewriteValueMIPS64_OpLeq64U(v)
243 case OpLeq8:
244 return rewriteValueMIPS64_OpLeq8(v)
245 case OpLeq8U:
246 return rewriteValueMIPS64_OpLeq8U(v)
247 case OpLess16:
248 return rewriteValueMIPS64_OpLess16(v)
249 case OpLess16U:
250 return rewriteValueMIPS64_OpLess16U(v)
251 case OpLess32:
252 return rewriteValueMIPS64_OpLess32(v)
253 case OpLess32F:
254 return rewriteValueMIPS64_OpLess32F(v)
255 case OpLess32U:
256 return rewriteValueMIPS64_OpLess32U(v)
257 case OpLess64:
258 return rewriteValueMIPS64_OpLess64(v)
259 case OpLess64F:
260 return rewriteValueMIPS64_OpLess64F(v)
261 case OpLess64U:
262 return rewriteValueMIPS64_OpLess64U(v)
263 case OpLess8:
264 return rewriteValueMIPS64_OpLess8(v)
265 case OpLess8U:
266 return rewriteValueMIPS64_OpLess8U(v)
267 case OpLoad:
268 return rewriteValueMIPS64_OpLoad(v)
269 case OpLocalAddr:
270 return rewriteValueMIPS64_OpLocalAddr(v)
271 case OpLsh16x16:
272 return rewriteValueMIPS64_OpLsh16x16(v)
273 case OpLsh16x32:
274 return rewriteValueMIPS64_OpLsh16x32(v)
275 case OpLsh16x64:
276 return rewriteValueMIPS64_OpLsh16x64(v)
277 case OpLsh16x8:
278 return rewriteValueMIPS64_OpLsh16x8(v)
279 case OpLsh32x16:
280 return rewriteValueMIPS64_OpLsh32x16(v)
281 case OpLsh32x32:
282 return rewriteValueMIPS64_OpLsh32x32(v)
283 case OpLsh32x64:
284 return rewriteValueMIPS64_OpLsh32x64(v)
285 case OpLsh32x8:
286 return rewriteValueMIPS64_OpLsh32x8(v)
287 case OpLsh64x16:
288 return rewriteValueMIPS64_OpLsh64x16(v)
289 case OpLsh64x32:
290 return rewriteValueMIPS64_OpLsh64x32(v)
291 case OpLsh64x64:
292 return rewriteValueMIPS64_OpLsh64x64(v)
293 case OpLsh64x8:
294 return rewriteValueMIPS64_OpLsh64x8(v)
295 case OpLsh8x16:
296 return rewriteValueMIPS64_OpLsh8x16(v)
297 case OpLsh8x32:
298 return rewriteValueMIPS64_OpLsh8x32(v)
299 case OpLsh8x64:
300 return rewriteValueMIPS64_OpLsh8x64(v)
301 case OpLsh8x8:
302 return rewriteValueMIPS64_OpLsh8x8(v)
303 case OpMIPS64ADDV:
304 return rewriteValueMIPS64_OpMIPS64ADDV(v)
305 case OpMIPS64ADDVconst:
306 return rewriteValueMIPS64_OpMIPS64ADDVconst(v)
307 case OpMIPS64AND:
308 return rewriteValueMIPS64_OpMIPS64AND(v)
309 case OpMIPS64ANDconst:
310 return rewriteValueMIPS64_OpMIPS64ANDconst(v)
311 case OpMIPS64LoweredAtomicAdd32:
312 return rewriteValueMIPS64_OpMIPS64LoweredAtomicAdd32(v)
313 case OpMIPS64LoweredAtomicAdd64:
314 return rewriteValueMIPS64_OpMIPS64LoweredAtomicAdd64(v)
315 case OpMIPS64LoweredAtomicStore32:
316 return rewriteValueMIPS64_OpMIPS64LoweredAtomicStore32(v)
317 case OpMIPS64LoweredAtomicStore64:
318 return rewriteValueMIPS64_OpMIPS64LoweredAtomicStore64(v)
319 case OpMIPS64MOVBUload:
320 return rewriteValueMIPS64_OpMIPS64MOVBUload(v)
321 case OpMIPS64MOVBUreg:
322 return rewriteValueMIPS64_OpMIPS64MOVBUreg(v)
323 case OpMIPS64MOVBload:
324 return rewriteValueMIPS64_OpMIPS64MOVBload(v)
325 case OpMIPS64MOVBreg:
326 return rewriteValueMIPS64_OpMIPS64MOVBreg(v)
327 case OpMIPS64MOVBstore:
328 return rewriteValueMIPS64_OpMIPS64MOVBstore(v)
329 case OpMIPS64MOVBstorezero:
330 return rewriteValueMIPS64_OpMIPS64MOVBstorezero(v)
331 case OpMIPS64MOVDload:
332 return rewriteValueMIPS64_OpMIPS64MOVDload(v)
333 case OpMIPS64MOVDstore:
334 return rewriteValueMIPS64_OpMIPS64MOVDstore(v)
335 case OpMIPS64MOVFload:
336 return rewriteValueMIPS64_OpMIPS64MOVFload(v)
337 case OpMIPS64MOVFstore:
338 return rewriteValueMIPS64_OpMIPS64MOVFstore(v)
339 case OpMIPS64MOVHUload:
340 return rewriteValueMIPS64_OpMIPS64MOVHUload(v)
341 case OpMIPS64MOVHUreg:
342 return rewriteValueMIPS64_OpMIPS64MOVHUreg(v)
343 case OpMIPS64MOVHload:
344 return rewriteValueMIPS64_OpMIPS64MOVHload(v)
345 case OpMIPS64MOVHreg:
346 return rewriteValueMIPS64_OpMIPS64MOVHreg(v)
347 case OpMIPS64MOVHstore:
348 return rewriteValueMIPS64_OpMIPS64MOVHstore(v)
349 case OpMIPS64MOVHstorezero:
350 return rewriteValueMIPS64_OpMIPS64MOVHstorezero(v)
351 case OpMIPS64MOVVload:
352 return rewriteValueMIPS64_OpMIPS64MOVVload(v)
353 case OpMIPS64MOVVnop:
354 return rewriteValueMIPS64_OpMIPS64MOVVnop(v)
355 case OpMIPS64MOVVreg:
356 return rewriteValueMIPS64_OpMIPS64MOVVreg(v)
357 case OpMIPS64MOVVstore:
358 return rewriteValueMIPS64_OpMIPS64MOVVstore(v)
359 case OpMIPS64MOVVstorezero:
360 return rewriteValueMIPS64_OpMIPS64MOVVstorezero(v)
361 case OpMIPS64MOVWUload:
362 return rewriteValueMIPS64_OpMIPS64MOVWUload(v)
363 case OpMIPS64MOVWUreg:
364 return rewriteValueMIPS64_OpMIPS64MOVWUreg(v)
365 case OpMIPS64MOVWload:
366 return rewriteValueMIPS64_OpMIPS64MOVWload(v)
367 case OpMIPS64MOVWreg:
368 return rewriteValueMIPS64_OpMIPS64MOVWreg(v)
369 case OpMIPS64MOVWstore:
370 return rewriteValueMIPS64_OpMIPS64MOVWstore(v)
371 case OpMIPS64MOVWstorezero:
372 return rewriteValueMIPS64_OpMIPS64MOVWstorezero(v)
373 case OpMIPS64NEGV:
374 return rewriteValueMIPS64_OpMIPS64NEGV(v)
375 case OpMIPS64NOR:
376 return rewriteValueMIPS64_OpMIPS64NOR(v)
377 case OpMIPS64NORconst:
378 return rewriteValueMIPS64_OpMIPS64NORconst(v)
379 case OpMIPS64OR:
380 return rewriteValueMIPS64_OpMIPS64OR(v)
381 case OpMIPS64ORconst:
382 return rewriteValueMIPS64_OpMIPS64ORconst(v)
383 case OpMIPS64SGT:
384 return rewriteValueMIPS64_OpMIPS64SGT(v)
385 case OpMIPS64SGTU:
386 return rewriteValueMIPS64_OpMIPS64SGTU(v)
387 case OpMIPS64SGTUconst:
388 return rewriteValueMIPS64_OpMIPS64SGTUconst(v)
389 case OpMIPS64SGTconst:
390 return rewriteValueMIPS64_OpMIPS64SGTconst(v)
391 case OpMIPS64SLLV:
392 return rewriteValueMIPS64_OpMIPS64SLLV(v)
393 case OpMIPS64SLLVconst:
394 return rewriteValueMIPS64_OpMIPS64SLLVconst(v)
395 case OpMIPS64SRAV:
396 return rewriteValueMIPS64_OpMIPS64SRAV(v)
397 case OpMIPS64SRAVconst:
398 return rewriteValueMIPS64_OpMIPS64SRAVconst(v)
399 case OpMIPS64SRLV:
400 return rewriteValueMIPS64_OpMIPS64SRLV(v)
401 case OpMIPS64SRLVconst:
402 return rewriteValueMIPS64_OpMIPS64SRLVconst(v)
403 case OpMIPS64SUBV:
404 return rewriteValueMIPS64_OpMIPS64SUBV(v)
405 case OpMIPS64SUBVconst:
406 return rewriteValueMIPS64_OpMIPS64SUBVconst(v)
407 case OpMIPS64XOR:
408 return rewriteValueMIPS64_OpMIPS64XOR(v)
409 case OpMIPS64XORconst:
410 return rewriteValueMIPS64_OpMIPS64XORconst(v)
411 case OpMod16:
412 return rewriteValueMIPS64_OpMod16(v)
413 case OpMod16u:
414 return rewriteValueMIPS64_OpMod16u(v)
415 case OpMod32:
416 return rewriteValueMIPS64_OpMod32(v)
417 case OpMod32u:
418 return rewriteValueMIPS64_OpMod32u(v)
419 case OpMod64:
420 return rewriteValueMIPS64_OpMod64(v)
421 case OpMod64u:
422 return rewriteValueMIPS64_OpMod64u(v)
423 case OpMod8:
424 return rewriteValueMIPS64_OpMod8(v)
425 case OpMod8u:
426 return rewriteValueMIPS64_OpMod8u(v)
427 case OpMove:
428 return rewriteValueMIPS64_OpMove(v)
429 case OpMul16:
430 return rewriteValueMIPS64_OpMul16(v)
431 case OpMul32:
432 return rewriteValueMIPS64_OpMul32(v)
433 case OpMul32F:
434 v.Op = OpMIPS64MULF
435 return true
436 case OpMul64:
437 return rewriteValueMIPS64_OpMul64(v)
438 case OpMul64F:
439 v.Op = OpMIPS64MULD
440 return true
441 case OpMul64uhilo:
442 v.Op = OpMIPS64MULVU
443 return true
444 case OpMul8:
445 return rewriteValueMIPS64_OpMul8(v)
446 case OpNeg16:
447 v.Op = OpMIPS64NEGV
448 return true
449 case OpNeg32:
450 v.Op = OpMIPS64NEGV
451 return true
452 case OpNeg32F:
453 v.Op = OpMIPS64NEGF
454 return true
455 case OpNeg64:
456 v.Op = OpMIPS64NEGV
457 return true
458 case OpNeg64F:
459 v.Op = OpMIPS64NEGD
460 return true
461 case OpNeg8:
462 v.Op = OpMIPS64NEGV
463 return true
464 case OpNeq16:
465 return rewriteValueMIPS64_OpNeq16(v)
466 case OpNeq32:
467 return rewriteValueMIPS64_OpNeq32(v)
468 case OpNeq32F:
469 return rewriteValueMIPS64_OpNeq32F(v)
470 case OpNeq64:
471 return rewriteValueMIPS64_OpNeq64(v)
472 case OpNeq64F:
473 return rewriteValueMIPS64_OpNeq64F(v)
474 case OpNeq8:
475 return rewriteValueMIPS64_OpNeq8(v)
476 case OpNeqB:
477 v.Op = OpMIPS64XOR
478 return true
479 case OpNeqPtr:
480 return rewriteValueMIPS64_OpNeqPtr(v)
481 case OpNilCheck:
482 v.Op = OpMIPS64LoweredNilCheck
483 return true
484 case OpNot:
485 return rewriteValueMIPS64_OpNot(v)
486 case OpOffPtr:
487 return rewriteValueMIPS64_OpOffPtr(v)
488 case OpOr16:
489 v.Op = OpMIPS64OR
490 return true
491 case OpOr32:
492 v.Op = OpMIPS64OR
493 return true
494 case OpOr64:
495 v.Op = OpMIPS64OR
496 return true
497 case OpOr8:
498 v.Op = OpMIPS64OR
499 return true
500 case OpOrB:
501 v.Op = OpMIPS64OR
502 return true
503 case OpPanicBounds:
504 return rewriteValueMIPS64_OpPanicBounds(v)
505 case OpRotateLeft16:
506 return rewriteValueMIPS64_OpRotateLeft16(v)
507 case OpRotateLeft32:
508 return rewriteValueMIPS64_OpRotateLeft32(v)
509 case OpRotateLeft64:
510 return rewriteValueMIPS64_OpRotateLeft64(v)
511 case OpRotateLeft8:
512 return rewriteValueMIPS64_OpRotateLeft8(v)
513 case OpRound32F:
514 v.Op = OpCopy
515 return true
516 case OpRound64F:
517 v.Op = OpCopy
518 return true
519 case OpRsh16Ux16:
520 return rewriteValueMIPS64_OpRsh16Ux16(v)
521 case OpRsh16Ux32:
522 return rewriteValueMIPS64_OpRsh16Ux32(v)
523 case OpRsh16Ux64:
524 return rewriteValueMIPS64_OpRsh16Ux64(v)
525 case OpRsh16Ux8:
526 return rewriteValueMIPS64_OpRsh16Ux8(v)
527 case OpRsh16x16:
528 return rewriteValueMIPS64_OpRsh16x16(v)
529 case OpRsh16x32:
530 return rewriteValueMIPS64_OpRsh16x32(v)
531 case OpRsh16x64:
532 return rewriteValueMIPS64_OpRsh16x64(v)
533 case OpRsh16x8:
534 return rewriteValueMIPS64_OpRsh16x8(v)
535 case OpRsh32Ux16:
536 return rewriteValueMIPS64_OpRsh32Ux16(v)
537 case OpRsh32Ux32:
538 return rewriteValueMIPS64_OpRsh32Ux32(v)
539 case OpRsh32Ux64:
540 return rewriteValueMIPS64_OpRsh32Ux64(v)
541 case OpRsh32Ux8:
542 return rewriteValueMIPS64_OpRsh32Ux8(v)
543 case OpRsh32x16:
544 return rewriteValueMIPS64_OpRsh32x16(v)
545 case OpRsh32x32:
546 return rewriteValueMIPS64_OpRsh32x32(v)
547 case OpRsh32x64:
548 return rewriteValueMIPS64_OpRsh32x64(v)
549 case OpRsh32x8:
550 return rewriteValueMIPS64_OpRsh32x8(v)
551 case OpRsh64Ux16:
552 return rewriteValueMIPS64_OpRsh64Ux16(v)
553 case OpRsh64Ux32:
554 return rewriteValueMIPS64_OpRsh64Ux32(v)
555 case OpRsh64Ux64:
556 return rewriteValueMIPS64_OpRsh64Ux64(v)
557 case OpRsh64Ux8:
558 return rewriteValueMIPS64_OpRsh64Ux8(v)
559 case OpRsh64x16:
560 return rewriteValueMIPS64_OpRsh64x16(v)
561 case OpRsh64x32:
562 return rewriteValueMIPS64_OpRsh64x32(v)
563 case OpRsh64x64:
564 return rewriteValueMIPS64_OpRsh64x64(v)
565 case OpRsh64x8:
566 return rewriteValueMIPS64_OpRsh64x8(v)
567 case OpRsh8Ux16:
568 return rewriteValueMIPS64_OpRsh8Ux16(v)
569 case OpRsh8Ux32:
570 return rewriteValueMIPS64_OpRsh8Ux32(v)
571 case OpRsh8Ux64:
572 return rewriteValueMIPS64_OpRsh8Ux64(v)
573 case OpRsh8Ux8:
574 return rewriteValueMIPS64_OpRsh8Ux8(v)
575 case OpRsh8x16:
576 return rewriteValueMIPS64_OpRsh8x16(v)
577 case OpRsh8x32:
578 return rewriteValueMIPS64_OpRsh8x32(v)
579 case OpRsh8x64:
580 return rewriteValueMIPS64_OpRsh8x64(v)
581 case OpRsh8x8:
582 return rewriteValueMIPS64_OpRsh8x8(v)
583 case OpSelect0:
584 return rewriteValueMIPS64_OpSelect0(v)
585 case OpSelect1:
586 return rewriteValueMIPS64_OpSelect1(v)
587 case OpSignExt16to32:
588 v.Op = OpMIPS64MOVHreg
589 return true
590 case OpSignExt16to64:
591 v.Op = OpMIPS64MOVHreg
592 return true
593 case OpSignExt32to64:
594 v.Op = OpMIPS64MOVWreg
595 return true
596 case OpSignExt8to16:
597 v.Op = OpMIPS64MOVBreg
598 return true
599 case OpSignExt8to32:
600 v.Op = OpMIPS64MOVBreg
601 return true
602 case OpSignExt8to64:
603 v.Op = OpMIPS64MOVBreg
604 return true
605 case OpSlicemask:
606 return rewriteValueMIPS64_OpSlicemask(v)
607 case OpSqrt:
608 v.Op = OpMIPS64SQRTD
609 return true
610 case OpSqrt32:
611 v.Op = OpMIPS64SQRTF
612 return true
613 case OpStaticCall:
614 v.Op = OpMIPS64CALLstatic
615 return true
616 case OpStore:
617 return rewriteValueMIPS64_OpStore(v)
618 case OpSub16:
619 v.Op = OpMIPS64SUBV
620 return true
621 case OpSub32:
622 v.Op = OpMIPS64SUBV
623 return true
624 case OpSub32F:
625 v.Op = OpMIPS64SUBF
626 return true
627 case OpSub64:
628 v.Op = OpMIPS64SUBV
629 return true
630 case OpSub64F:
631 v.Op = OpMIPS64SUBD
632 return true
633 case OpSub8:
634 v.Op = OpMIPS64SUBV
635 return true
636 case OpSubPtr:
637 v.Op = OpMIPS64SUBV
638 return true
639 case OpTailCall:
640 v.Op = OpMIPS64CALLtail
641 return true
642 case OpTrunc16to8:
643 v.Op = OpCopy
644 return true
645 case OpTrunc32to16:
646 v.Op = OpCopy
647 return true
648 case OpTrunc32to8:
649 v.Op = OpCopy
650 return true
651 case OpTrunc64to16:
652 v.Op = OpCopy
653 return true
654 case OpTrunc64to32:
655 v.Op = OpCopy
656 return true
657 case OpTrunc64to8:
658 v.Op = OpCopy
659 return true
660 case OpWB:
661 v.Op = OpMIPS64LoweredWB
662 return true
663 case OpXor16:
664 v.Op = OpMIPS64XOR
665 return true
666 case OpXor32:
667 v.Op = OpMIPS64XOR
668 return true
669 case OpXor64:
670 v.Op = OpMIPS64XOR
671 return true
672 case OpXor8:
673 v.Op = OpMIPS64XOR
674 return true
675 case OpZero:
676 return rewriteValueMIPS64_OpZero(v)
677 case OpZeroExt16to32:
678 v.Op = OpMIPS64MOVHUreg
679 return true
680 case OpZeroExt16to64:
681 v.Op = OpMIPS64MOVHUreg
682 return true
683 case OpZeroExt32to64:
684 v.Op = OpMIPS64MOVWUreg
685 return true
686 case OpZeroExt8to16:
687 v.Op = OpMIPS64MOVBUreg
688 return true
689 case OpZeroExt8to32:
690 v.Op = OpMIPS64MOVBUreg
691 return true
692 case OpZeroExt8to64:
693 v.Op = OpMIPS64MOVBUreg
694 return true
695 }
696 return false
697 }
698 func rewriteValueMIPS64_OpAddr(v *Value) bool {
699 v_0 := v.Args[0]
700
701
702 for {
703 sym := auxToSym(v.Aux)
704 base := v_0
705 v.reset(OpMIPS64MOVVaddr)
706 v.Aux = symToAux(sym)
707 v.AddArg(base)
708 return true
709 }
710 }
711 func rewriteValueMIPS64_OpAtomicAnd8(v *Value) bool {
712 v_2 := v.Args[2]
713 v_1 := v.Args[1]
714 v_0 := v.Args[0]
715 b := v.Block
716 config := b.Func.Config
717 typ := &b.Func.Config.Types
718
719
720
721 for {
722 ptr := v_0
723 val := v_1
724 mem := v_2
725 if !(!config.BigEndian) {
726 break
727 }
728 v.reset(OpMIPS64LoweredAtomicAnd32)
729 v0 := b.NewValue0(v.Pos, OpMIPS64AND, typ.UInt32Ptr)
730 v1 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
731 v1.AuxInt = int64ToAuxInt(^3)
732 v0.AddArg2(v1, ptr)
733 v2 := b.NewValue0(v.Pos, OpMIPS64OR, typ.UInt64)
734 v3 := b.NewValue0(v.Pos, OpMIPS64SLLV, typ.UInt32)
735 v4 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
736 v4.AddArg(val)
737 v5 := b.NewValue0(v.Pos, OpMIPS64SLLVconst, typ.UInt64)
738 v5.AuxInt = int64ToAuxInt(3)
739 v6 := b.NewValue0(v.Pos, OpMIPS64ANDconst, typ.UInt64)
740 v6.AuxInt = int64ToAuxInt(3)
741 v6.AddArg(ptr)
742 v5.AddArg(v6)
743 v3.AddArg2(v4, v5)
744 v7 := b.NewValue0(v.Pos, OpMIPS64NORconst, typ.UInt64)
745 v7.AuxInt = int64ToAuxInt(0)
746 v8 := b.NewValue0(v.Pos, OpMIPS64SLLV, typ.UInt64)
747 v9 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
748 v9.AuxInt = int64ToAuxInt(0xff)
749 v8.AddArg2(v9, v5)
750 v7.AddArg(v8)
751 v2.AddArg2(v3, v7)
752 v.AddArg3(v0, v2, mem)
753 return true
754 }
755
756
757
758 for {
759 ptr := v_0
760 val := v_1
761 mem := v_2
762 if !(config.BigEndian) {
763 break
764 }
765 v.reset(OpMIPS64LoweredAtomicAnd32)
766 v0 := b.NewValue0(v.Pos, OpMIPS64AND, typ.UInt32Ptr)
767 v1 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
768 v1.AuxInt = int64ToAuxInt(^3)
769 v0.AddArg2(v1, ptr)
770 v2 := b.NewValue0(v.Pos, OpMIPS64OR, typ.UInt64)
771 v3 := b.NewValue0(v.Pos, OpMIPS64SLLV, typ.UInt32)
772 v4 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
773 v4.AddArg(val)
774 v5 := b.NewValue0(v.Pos, OpMIPS64SLLVconst, typ.UInt64)
775 v5.AuxInt = int64ToAuxInt(3)
776 v6 := b.NewValue0(v.Pos, OpMIPS64ANDconst, typ.UInt64)
777 v6.AuxInt = int64ToAuxInt(3)
778 v7 := b.NewValue0(v.Pos, OpMIPS64XORconst, typ.UInt64)
779 v7.AuxInt = int64ToAuxInt(3)
780 v7.AddArg(ptr)
781 v6.AddArg(v7)
782 v5.AddArg(v6)
783 v3.AddArg2(v4, v5)
784 v8 := b.NewValue0(v.Pos, OpMIPS64NORconst, typ.UInt64)
785 v8.AuxInt = int64ToAuxInt(0)
786 v9 := b.NewValue0(v.Pos, OpMIPS64SLLV, typ.UInt64)
787 v10 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
788 v10.AuxInt = int64ToAuxInt(0xff)
789 v9.AddArg2(v10, v5)
790 v8.AddArg(v9)
791 v2.AddArg2(v3, v8)
792 v.AddArg3(v0, v2, mem)
793 return true
794 }
795 return false
796 }
797 func rewriteValueMIPS64_OpAtomicCompareAndSwap32(v *Value) bool {
798 v_3 := v.Args[3]
799 v_2 := v.Args[2]
800 v_1 := v.Args[1]
801 v_0 := v.Args[0]
802 b := v.Block
803 typ := &b.Func.Config.Types
804
805
806 for {
807 ptr := v_0
808 old := v_1
809 new := v_2
810 mem := v_3
811 v.reset(OpMIPS64LoweredAtomicCas32)
812 v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
813 v0.AddArg(old)
814 v.AddArg4(ptr, v0, new, mem)
815 return true
816 }
817 }
818 func rewriteValueMIPS64_OpAtomicOr8(v *Value) bool {
819 v_2 := v.Args[2]
820 v_1 := v.Args[1]
821 v_0 := v.Args[0]
822 b := v.Block
823 config := b.Func.Config
824 typ := &b.Func.Config.Types
825
826
827
828 for {
829 ptr := v_0
830 val := v_1
831 mem := v_2
832 if !(!config.BigEndian) {
833 break
834 }
835 v.reset(OpMIPS64LoweredAtomicOr32)
836 v0 := b.NewValue0(v.Pos, OpMIPS64AND, typ.UInt32Ptr)
837 v1 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
838 v1.AuxInt = int64ToAuxInt(^3)
839 v0.AddArg2(v1, ptr)
840 v2 := b.NewValue0(v.Pos, OpMIPS64SLLV, typ.UInt32)
841 v3 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
842 v3.AddArg(val)
843 v4 := b.NewValue0(v.Pos, OpMIPS64SLLVconst, typ.UInt64)
844 v4.AuxInt = int64ToAuxInt(3)
845 v5 := b.NewValue0(v.Pos, OpMIPS64ANDconst, typ.UInt64)
846 v5.AuxInt = int64ToAuxInt(3)
847 v5.AddArg(ptr)
848 v4.AddArg(v5)
849 v2.AddArg2(v3, v4)
850 v.AddArg3(v0, v2, mem)
851 return true
852 }
853
854
855
856 for {
857 ptr := v_0
858 val := v_1
859 mem := v_2
860 if !(config.BigEndian) {
861 break
862 }
863 v.reset(OpMIPS64LoweredAtomicOr32)
864 v0 := b.NewValue0(v.Pos, OpMIPS64AND, typ.UInt32Ptr)
865 v1 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
866 v1.AuxInt = int64ToAuxInt(^3)
867 v0.AddArg2(v1, ptr)
868 v2 := b.NewValue0(v.Pos, OpMIPS64SLLV, typ.UInt32)
869 v3 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
870 v3.AddArg(val)
871 v4 := b.NewValue0(v.Pos, OpMIPS64SLLVconst, typ.UInt64)
872 v4.AuxInt = int64ToAuxInt(3)
873 v5 := b.NewValue0(v.Pos, OpMIPS64ANDconst, typ.UInt64)
874 v5.AuxInt = int64ToAuxInt(3)
875 v6 := b.NewValue0(v.Pos, OpMIPS64XORconst, typ.UInt64)
876 v6.AuxInt = int64ToAuxInt(3)
877 v6.AddArg(ptr)
878 v5.AddArg(v6)
879 v4.AddArg(v5)
880 v2.AddArg2(v3, v4)
881 v.AddArg3(v0, v2, mem)
882 return true
883 }
884 return false
885 }
886 func rewriteValueMIPS64_OpAvg64u(v *Value) bool {
887 v_1 := v.Args[1]
888 v_0 := v.Args[0]
889 b := v.Block
890
891
892 for {
893 t := v.Type
894 x := v_0
895 y := v_1
896 v.reset(OpMIPS64ADDV)
897 v0 := b.NewValue0(v.Pos, OpMIPS64SRLVconst, t)
898 v0.AuxInt = int64ToAuxInt(1)
899 v1 := b.NewValue0(v.Pos, OpMIPS64SUBV, t)
900 v1.AddArg2(x, y)
901 v0.AddArg(v1)
902 v.AddArg2(v0, y)
903 return true
904 }
905 }
906 func rewriteValueMIPS64_OpCom16(v *Value) bool {
907 v_0 := v.Args[0]
908 b := v.Block
909 typ := &b.Func.Config.Types
910
911
912 for {
913 x := v_0
914 v.reset(OpMIPS64NOR)
915 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
916 v0.AuxInt = int64ToAuxInt(0)
917 v.AddArg2(v0, x)
918 return true
919 }
920 }
921 func rewriteValueMIPS64_OpCom32(v *Value) bool {
922 v_0 := v.Args[0]
923 b := v.Block
924 typ := &b.Func.Config.Types
925
926
927 for {
928 x := v_0
929 v.reset(OpMIPS64NOR)
930 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
931 v0.AuxInt = int64ToAuxInt(0)
932 v.AddArg2(v0, x)
933 return true
934 }
935 }
936 func rewriteValueMIPS64_OpCom64(v *Value) bool {
937 v_0 := v.Args[0]
938 b := v.Block
939 typ := &b.Func.Config.Types
940
941
942 for {
943 x := v_0
944 v.reset(OpMIPS64NOR)
945 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
946 v0.AuxInt = int64ToAuxInt(0)
947 v.AddArg2(v0, x)
948 return true
949 }
950 }
951 func rewriteValueMIPS64_OpCom8(v *Value) bool {
952 v_0 := v.Args[0]
953 b := v.Block
954 typ := &b.Func.Config.Types
955
956
957 for {
958 x := v_0
959 v.reset(OpMIPS64NOR)
960 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
961 v0.AuxInt = int64ToAuxInt(0)
962 v.AddArg2(v0, x)
963 return true
964 }
965 }
966 func rewriteValueMIPS64_OpConst16(v *Value) bool {
967
968
969 for {
970 val := auxIntToInt16(v.AuxInt)
971 v.reset(OpMIPS64MOVVconst)
972 v.AuxInt = int64ToAuxInt(int64(val))
973 return true
974 }
975 }
976 func rewriteValueMIPS64_OpConst32(v *Value) bool {
977
978
979 for {
980 val := auxIntToInt32(v.AuxInt)
981 v.reset(OpMIPS64MOVVconst)
982 v.AuxInt = int64ToAuxInt(int64(val))
983 return true
984 }
985 }
986 func rewriteValueMIPS64_OpConst32F(v *Value) bool {
987
988
989 for {
990 val := auxIntToFloat32(v.AuxInt)
991 v.reset(OpMIPS64MOVFconst)
992 v.AuxInt = float64ToAuxInt(float64(val))
993 return true
994 }
995 }
996 func rewriteValueMIPS64_OpConst64(v *Value) bool {
997
998
999 for {
1000 val := auxIntToInt64(v.AuxInt)
1001 v.reset(OpMIPS64MOVVconst)
1002 v.AuxInt = int64ToAuxInt(int64(val))
1003 return true
1004 }
1005 }
1006 func rewriteValueMIPS64_OpConst64F(v *Value) bool {
1007
1008
1009 for {
1010 val := auxIntToFloat64(v.AuxInt)
1011 v.reset(OpMIPS64MOVDconst)
1012 v.AuxInt = float64ToAuxInt(float64(val))
1013 return true
1014 }
1015 }
1016 func rewriteValueMIPS64_OpConst8(v *Value) bool {
1017
1018
1019 for {
1020 val := auxIntToInt8(v.AuxInt)
1021 v.reset(OpMIPS64MOVVconst)
1022 v.AuxInt = int64ToAuxInt(int64(val))
1023 return true
1024 }
1025 }
1026 func rewriteValueMIPS64_OpConstBool(v *Value) bool {
1027
1028
1029 for {
1030 t := auxIntToBool(v.AuxInt)
1031 v.reset(OpMIPS64MOVVconst)
1032 v.AuxInt = int64ToAuxInt(int64(b2i(t)))
1033 return true
1034 }
1035 }
1036 func rewriteValueMIPS64_OpConstNil(v *Value) bool {
1037
1038
1039 for {
1040 v.reset(OpMIPS64MOVVconst)
1041 v.AuxInt = int64ToAuxInt(0)
1042 return true
1043 }
1044 }
1045 func rewriteValueMIPS64_OpDiv16(v *Value) bool {
1046 v_1 := v.Args[1]
1047 v_0 := v.Args[0]
1048 b := v.Block
1049 typ := &b.Func.Config.Types
1050
1051
1052 for {
1053 x := v_0
1054 y := v_1
1055 v.reset(OpSelect1)
1056 v0 := b.NewValue0(v.Pos, OpMIPS64DIVV, types.NewTuple(typ.Int64, typ.Int64))
1057 v1 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
1058 v1.AddArg(x)
1059 v2 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
1060 v2.AddArg(y)
1061 v0.AddArg2(v1, v2)
1062 v.AddArg(v0)
1063 return true
1064 }
1065 }
1066 func rewriteValueMIPS64_OpDiv16u(v *Value) bool {
1067 v_1 := v.Args[1]
1068 v_0 := v.Args[0]
1069 b := v.Block
1070 typ := &b.Func.Config.Types
1071
1072
1073 for {
1074 x := v_0
1075 y := v_1
1076 v.reset(OpSelect1)
1077 v0 := b.NewValue0(v.Pos, OpMIPS64DIVVU, types.NewTuple(typ.UInt64, typ.UInt64))
1078 v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
1079 v1.AddArg(x)
1080 v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
1081 v2.AddArg(y)
1082 v0.AddArg2(v1, v2)
1083 v.AddArg(v0)
1084 return true
1085 }
1086 }
1087 func rewriteValueMIPS64_OpDiv32(v *Value) bool {
1088 v_1 := v.Args[1]
1089 v_0 := v.Args[0]
1090 b := v.Block
1091 typ := &b.Func.Config.Types
1092
1093
1094 for {
1095 x := v_0
1096 y := v_1
1097 v.reset(OpSelect1)
1098 v0 := b.NewValue0(v.Pos, OpMIPS64DIVV, types.NewTuple(typ.Int64, typ.Int64))
1099 v1 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
1100 v1.AddArg(x)
1101 v2 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
1102 v2.AddArg(y)
1103 v0.AddArg2(v1, v2)
1104 v.AddArg(v0)
1105 return true
1106 }
1107 }
1108 func rewriteValueMIPS64_OpDiv32u(v *Value) bool {
1109 v_1 := v.Args[1]
1110 v_0 := v.Args[0]
1111 b := v.Block
1112 typ := &b.Func.Config.Types
1113
1114
1115 for {
1116 x := v_0
1117 y := v_1
1118 v.reset(OpSelect1)
1119 v0 := b.NewValue0(v.Pos, OpMIPS64DIVVU, types.NewTuple(typ.UInt64, typ.UInt64))
1120 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
1121 v1.AddArg(x)
1122 v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
1123 v2.AddArg(y)
1124 v0.AddArg2(v1, v2)
1125 v.AddArg(v0)
1126 return true
1127 }
1128 }
1129 func rewriteValueMIPS64_OpDiv64(v *Value) bool {
1130 v_1 := v.Args[1]
1131 v_0 := v.Args[0]
1132 b := v.Block
1133 typ := &b.Func.Config.Types
1134
1135
1136 for {
1137 x := v_0
1138 y := v_1
1139 v.reset(OpSelect1)
1140 v0 := b.NewValue0(v.Pos, OpMIPS64DIVV, types.NewTuple(typ.Int64, typ.Int64))
1141 v0.AddArg2(x, y)
1142 v.AddArg(v0)
1143 return true
1144 }
1145 }
1146 func rewriteValueMIPS64_OpDiv64u(v *Value) bool {
1147 v_1 := v.Args[1]
1148 v_0 := v.Args[0]
1149 b := v.Block
1150 typ := &b.Func.Config.Types
1151
1152
1153 for {
1154 x := v_0
1155 y := v_1
1156 v.reset(OpSelect1)
1157 v0 := b.NewValue0(v.Pos, OpMIPS64DIVVU, types.NewTuple(typ.UInt64, typ.UInt64))
1158 v0.AddArg2(x, y)
1159 v.AddArg(v0)
1160 return true
1161 }
1162 }
1163 func rewriteValueMIPS64_OpDiv8(v *Value) bool {
1164 v_1 := v.Args[1]
1165 v_0 := v.Args[0]
1166 b := v.Block
1167 typ := &b.Func.Config.Types
1168
1169
1170 for {
1171 x := v_0
1172 y := v_1
1173 v.reset(OpSelect1)
1174 v0 := b.NewValue0(v.Pos, OpMIPS64DIVV, types.NewTuple(typ.Int64, typ.Int64))
1175 v1 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
1176 v1.AddArg(x)
1177 v2 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
1178 v2.AddArg(y)
1179 v0.AddArg2(v1, v2)
1180 v.AddArg(v0)
1181 return true
1182 }
1183 }
1184 func rewriteValueMIPS64_OpDiv8u(v *Value) bool {
1185 v_1 := v.Args[1]
1186 v_0 := v.Args[0]
1187 b := v.Block
1188 typ := &b.Func.Config.Types
1189
1190
1191 for {
1192 x := v_0
1193 y := v_1
1194 v.reset(OpSelect1)
1195 v0 := b.NewValue0(v.Pos, OpMIPS64DIVVU, types.NewTuple(typ.UInt64, typ.UInt64))
1196 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
1197 v1.AddArg(x)
1198 v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
1199 v2.AddArg(y)
1200 v0.AddArg2(v1, v2)
1201 v.AddArg(v0)
1202 return true
1203 }
1204 }
1205 func rewriteValueMIPS64_OpEq16(v *Value) bool {
1206 v_1 := v.Args[1]
1207 v_0 := v.Args[0]
1208 b := v.Block
1209 typ := &b.Func.Config.Types
1210
1211
1212 for {
1213 x := v_0
1214 y := v_1
1215 v.reset(OpMIPS64SGTU)
1216 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
1217 v0.AuxInt = int64ToAuxInt(1)
1218 v1 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.UInt64)
1219 v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
1220 v2.AddArg(x)
1221 v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
1222 v3.AddArg(y)
1223 v1.AddArg2(v2, v3)
1224 v.AddArg2(v0, v1)
1225 return true
1226 }
1227 }
1228 func rewriteValueMIPS64_OpEq32(v *Value) bool {
1229 v_1 := v.Args[1]
1230 v_0 := v.Args[0]
1231 b := v.Block
1232 typ := &b.Func.Config.Types
1233
1234
1235 for {
1236 x := v_0
1237 y := v_1
1238 v.reset(OpMIPS64SGTU)
1239 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
1240 v0.AuxInt = int64ToAuxInt(1)
1241 v1 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.UInt64)
1242 v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
1243 v2.AddArg(x)
1244 v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
1245 v3.AddArg(y)
1246 v1.AddArg2(v2, v3)
1247 v.AddArg2(v0, v1)
1248 return true
1249 }
1250 }
1251 func rewriteValueMIPS64_OpEq32F(v *Value) bool {
1252 v_1 := v.Args[1]
1253 v_0 := v.Args[0]
1254 b := v.Block
1255
1256
1257 for {
1258 x := v_0
1259 y := v_1
1260 v.reset(OpMIPS64FPFlagTrue)
1261 v0 := b.NewValue0(v.Pos, OpMIPS64CMPEQF, types.TypeFlags)
1262 v0.AddArg2(x, y)
1263 v.AddArg(v0)
1264 return true
1265 }
1266 }
1267 func rewriteValueMIPS64_OpEq64(v *Value) bool {
1268 v_1 := v.Args[1]
1269 v_0 := v.Args[0]
1270 b := v.Block
1271 typ := &b.Func.Config.Types
1272
1273
1274 for {
1275 x := v_0
1276 y := v_1
1277 v.reset(OpMIPS64SGTU)
1278 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
1279 v0.AuxInt = int64ToAuxInt(1)
1280 v1 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.UInt64)
1281 v1.AddArg2(x, y)
1282 v.AddArg2(v0, v1)
1283 return true
1284 }
1285 }
1286 func rewriteValueMIPS64_OpEq64F(v *Value) bool {
1287 v_1 := v.Args[1]
1288 v_0 := v.Args[0]
1289 b := v.Block
1290
1291
1292 for {
1293 x := v_0
1294 y := v_1
1295 v.reset(OpMIPS64FPFlagTrue)
1296 v0 := b.NewValue0(v.Pos, OpMIPS64CMPEQD, types.TypeFlags)
1297 v0.AddArg2(x, y)
1298 v.AddArg(v0)
1299 return true
1300 }
1301 }
1302 func rewriteValueMIPS64_OpEq8(v *Value) bool {
1303 v_1 := v.Args[1]
1304 v_0 := v.Args[0]
1305 b := v.Block
1306 typ := &b.Func.Config.Types
1307
1308
1309 for {
1310 x := v_0
1311 y := v_1
1312 v.reset(OpMIPS64SGTU)
1313 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
1314 v0.AuxInt = int64ToAuxInt(1)
1315 v1 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.UInt64)
1316 v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
1317 v2.AddArg(x)
1318 v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
1319 v3.AddArg(y)
1320 v1.AddArg2(v2, v3)
1321 v.AddArg2(v0, v1)
1322 return true
1323 }
1324 }
1325 func rewriteValueMIPS64_OpEqB(v *Value) bool {
1326 v_1 := v.Args[1]
1327 v_0 := v.Args[0]
1328 b := v.Block
1329 typ := &b.Func.Config.Types
1330
1331
1332 for {
1333 x := v_0
1334 y := v_1
1335 v.reset(OpMIPS64XOR)
1336 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
1337 v0.AuxInt = int64ToAuxInt(1)
1338 v1 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.Bool)
1339 v1.AddArg2(x, y)
1340 v.AddArg2(v0, v1)
1341 return true
1342 }
1343 }
1344 func rewriteValueMIPS64_OpEqPtr(v *Value) bool {
1345 v_1 := v.Args[1]
1346 v_0 := v.Args[0]
1347 b := v.Block
1348 typ := &b.Func.Config.Types
1349
1350
1351 for {
1352 x := v_0
1353 y := v_1
1354 v.reset(OpMIPS64SGTU)
1355 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
1356 v0.AuxInt = int64ToAuxInt(1)
1357 v1 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.UInt64)
1358 v1.AddArg2(x, y)
1359 v.AddArg2(v0, v1)
1360 return true
1361 }
1362 }
1363 func rewriteValueMIPS64_OpHmul32(v *Value) bool {
1364 v_1 := v.Args[1]
1365 v_0 := v.Args[0]
1366 b := v.Block
1367 typ := &b.Func.Config.Types
1368
1369
1370 for {
1371 x := v_0
1372 y := v_1
1373 v.reset(OpMIPS64SRAVconst)
1374 v.AuxInt = int64ToAuxInt(32)
1375 v0 := b.NewValue0(v.Pos, OpSelect1, typ.Int64)
1376 v1 := b.NewValue0(v.Pos, OpMIPS64MULV, types.NewTuple(typ.Int64, typ.Int64))
1377 v2 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
1378 v2.AddArg(x)
1379 v3 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
1380 v3.AddArg(y)
1381 v1.AddArg2(v2, v3)
1382 v0.AddArg(v1)
1383 v.AddArg(v0)
1384 return true
1385 }
1386 }
1387 func rewriteValueMIPS64_OpHmul32u(v *Value) bool {
1388 v_1 := v.Args[1]
1389 v_0 := v.Args[0]
1390 b := v.Block
1391 typ := &b.Func.Config.Types
1392
1393
1394 for {
1395 x := v_0
1396 y := v_1
1397 v.reset(OpMIPS64SRLVconst)
1398 v.AuxInt = int64ToAuxInt(32)
1399 v0 := b.NewValue0(v.Pos, OpSelect1, typ.UInt64)
1400 v1 := b.NewValue0(v.Pos, OpMIPS64MULVU, types.NewTuple(typ.UInt64, typ.UInt64))
1401 v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
1402 v2.AddArg(x)
1403 v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
1404 v3.AddArg(y)
1405 v1.AddArg2(v2, v3)
1406 v0.AddArg(v1)
1407 v.AddArg(v0)
1408 return true
1409 }
1410 }
1411 func rewriteValueMIPS64_OpHmul64(v *Value) bool {
1412 v_1 := v.Args[1]
1413 v_0 := v.Args[0]
1414 b := v.Block
1415 typ := &b.Func.Config.Types
1416
1417
1418 for {
1419 x := v_0
1420 y := v_1
1421 v.reset(OpSelect0)
1422 v0 := b.NewValue0(v.Pos, OpMIPS64MULV, types.NewTuple(typ.Int64, typ.Int64))
1423 v0.AddArg2(x, y)
1424 v.AddArg(v0)
1425 return true
1426 }
1427 }
1428 func rewriteValueMIPS64_OpHmul64u(v *Value) bool {
1429 v_1 := v.Args[1]
1430 v_0 := v.Args[0]
1431 b := v.Block
1432 typ := &b.Func.Config.Types
1433
1434
1435 for {
1436 x := v_0
1437 y := v_1
1438 v.reset(OpSelect0)
1439 v0 := b.NewValue0(v.Pos, OpMIPS64MULVU, types.NewTuple(typ.UInt64, typ.UInt64))
1440 v0.AddArg2(x, y)
1441 v.AddArg(v0)
1442 return true
1443 }
1444 }
1445 func rewriteValueMIPS64_OpIsInBounds(v *Value) bool {
1446 v_1 := v.Args[1]
1447 v_0 := v.Args[0]
1448
1449
1450 for {
1451 idx := v_0
1452 len := v_1
1453 v.reset(OpMIPS64SGTU)
1454 v.AddArg2(len, idx)
1455 return true
1456 }
1457 }
1458 func rewriteValueMIPS64_OpIsNonNil(v *Value) bool {
1459 v_0 := v.Args[0]
1460 b := v.Block
1461 typ := &b.Func.Config.Types
1462
1463
1464 for {
1465 ptr := v_0
1466 v.reset(OpMIPS64SGTU)
1467 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
1468 v0.AuxInt = int64ToAuxInt(0)
1469 v.AddArg2(ptr, v0)
1470 return true
1471 }
1472 }
1473 func rewriteValueMIPS64_OpIsSliceInBounds(v *Value) bool {
1474 v_1 := v.Args[1]
1475 v_0 := v.Args[0]
1476 b := v.Block
1477 typ := &b.Func.Config.Types
1478
1479
1480 for {
1481 idx := v_0
1482 len := v_1
1483 v.reset(OpMIPS64XOR)
1484 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
1485 v0.AuxInt = int64ToAuxInt(1)
1486 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
1487 v1.AddArg2(idx, len)
1488 v.AddArg2(v0, v1)
1489 return true
1490 }
1491 }
1492 func rewriteValueMIPS64_OpLeq16(v *Value) bool {
1493 v_1 := v.Args[1]
1494 v_0 := v.Args[0]
1495 b := v.Block
1496 typ := &b.Func.Config.Types
1497
1498
1499 for {
1500 x := v_0
1501 y := v_1
1502 v.reset(OpMIPS64XOR)
1503 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
1504 v0.AuxInt = int64ToAuxInt(1)
1505 v1 := b.NewValue0(v.Pos, OpMIPS64SGT, typ.Bool)
1506 v2 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
1507 v2.AddArg(x)
1508 v3 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
1509 v3.AddArg(y)
1510 v1.AddArg2(v2, v3)
1511 v.AddArg2(v0, v1)
1512 return true
1513 }
1514 }
1515 func rewriteValueMIPS64_OpLeq16U(v *Value) bool {
1516 v_1 := v.Args[1]
1517 v_0 := v.Args[0]
1518 b := v.Block
1519 typ := &b.Func.Config.Types
1520
1521
1522 for {
1523 x := v_0
1524 y := v_1
1525 v.reset(OpMIPS64XOR)
1526 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
1527 v0.AuxInt = int64ToAuxInt(1)
1528 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
1529 v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
1530 v2.AddArg(x)
1531 v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
1532 v3.AddArg(y)
1533 v1.AddArg2(v2, v3)
1534 v.AddArg2(v0, v1)
1535 return true
1536 }
1537 }
1538 func rewriteValueMIPS64_OpLeq32(v *Value) bool {
1539 v_1 := v.Args[1]
1540 v_0 := v.Args[0]
1541 b := v.Block
1542 typ := &b.Func.Config.Types
1543
1544
1545 for {
1546 x := v_0
1547 y := v_1
1548 v.reset(OpMIPS64XOR)
1549 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
1550 v0.AuxInt = int64ToAuxInt(1)
1551 v1 := b.NewValue0(v.Pos, OpMIPS64SGT, typ.Bool)
1552 v2 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
1553 v2.AddArg(x)
1554 v3 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
1555 v3.AddArg(y)
1556 v1.AddArg2(v2, v3)
1557 v.AddArg2(v0, v1)
1558 return true
1559 }
1560 }
1561 func rewriteValueMIPS64_OpLeq32F(v *Value) bool {
1562 v_1 := v.Args[1]
1563 v_0 := v.Args[0]
1564 b := v.Block
1565
1566
1567 for {
1568 x := v_0
1569 y := v_1
1570 v.reset(OpMIPS64FPFlagTrue)
1571 v0 := b.NewValue0(v.Pos, OpMIPS64CMPGEF, types.TypeFlags)
1572 v0.AddArg2(y, x)
1573 v.AddArg(v0)
1574 return true
1575 }
1576 }
1577 func rewriteValueMIPS64_OpLeq32U(v *Value) bool {
1578 v_1 := v.Args[1]
1579 v_0 := v.Args[0]
1580 b := v.Block
1581 typ := &b.Func.Config.Types
1582
1583
1584 for {
1585 x := v_0
1586 y := v_1
1587 v.reset(OpMIPS64XOR)
1588 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
1589 v0.AuxInt = int64ToAuxInt(1)
1590 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
1591 v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
1592 v2.AddArg(x)
1593 v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
1594 v3.AddArg(y)
1595 v1.AddArg2(v2, v3)
1596 v.AddArg2(v0, v1)
1597 return true
1598 }
1599 }
1600 func rewriteValueMIPS64_OpLeq64(v *Value) bool {
1601 v_1 := v.Args[1]
1602 v_0 := v.Args[0]
1603 b := v.Block
1604 typ := &b.Func.Config.Types
1605
1606
1607 for {
1608 x := v_0
1609 y := v_1
1610 v.reset(OpMIPS64XOR)
1611 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
1612 v0.AuxInt = int64ToAuxInt(1)
1613 v1 := b.NewValue0(v.Pos, OpMIPS64SGT, typ.Bool)
1614 v1.AddArg2(x, y)
1615 v.AddArg2(v0, v1)
1616 return true
1617 }
1618 }
1619 func rewriteValueMIPS64_OpLeq64F(v *Value) bool {
1620 v_1 := v.Args[1]
1621 v_0 := v.Args[0]
1622 b := v.Block
1623
1624
1625 for {
1626 x := v_0
1627 y := v_1
1628 v.reset(OpMIPS64FPFlagTrue)
1629 v0 := b.NewValue0(v.Pos, OpMIPS64CMPGED, types.TypeFlags)
1630 v0.AddArg2(y, x)
1631 v.AddArg(v0)
1632 return true
1633 }
1634 }
1635 func rewriteValueMIPS64_OpLeq64U(v *Value) bool {
1636 v_1 := v.Args[1]
1637 v_0 := v.Args[0]
1638 b := v.Block
1639 typ := &b.Func.Config.Types
1640
1641
1642 for {
1643 x := v_0
1644 y := v_1
1645 v.reset(OpMIPS64XOR)
1646 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
1647 v0.AuxInt = int64ToAuxInt(1)
1648 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
1649 v1.AddArg2(x, y)
1650 v.AddArg2(v0, v1)
1651 return true
1652 }
1653 }
1654 func rewriteValueMIPS64_OpLeq8(v *Value) bool {
1655 v_1 := v.Args[1]
1656 v_0 := v.Args[0]
1657 b := v.Block
1658 typ := &b.Func.Config.Types
1659
1660
1661 for {
1662 x := v_0
1663 y := v_1
1664 v.reset(OpMIPS64XOR)
1665 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
1666 v0.AuxInt = int64ToAuxInt(1)
1667 v1 := b.NewValue0(v.Pos, OpMIPS64SGT, typ.Bool)
1668 v2 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
1669 v2.AddArg(x)
1670 v3 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
1671 v3.AddArg(y)
1672 v1.AddArg2(v2, v3)
1673 v.AddArg2(v0, v1)
1674 return true
1675 }
1676 }
1677 func rewriteValueMIPS64_OpLeq8U(v *Value) bool {
1678 v_1 := v.Args[1]
1679 v_0 := v.Args[0]
1680 b := v.Block
1681 typ := &b.Func.Config.Types
1682
1683
1684 for {
1685 x := v_0
1686 y := v_1
1687 v.reset(OpMIPS64XOR)
1688 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
1689 v0.AuxInt = int64ToAuxInt(1)
1690 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
1691 v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
1692 v2.AddArg(x)
1693 v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
1694 v3.AddArg(y)
1695 v1.AddArg2(v2, v3)
1696 v.AddArg2(v0, v1)
1697 return true
1698 }
1699 }
1700 func rewriteValueMIPS64_OpLess16(v *Value) bool {
1701 v_1 := v.Args[1]
1702 v_0 := v.Args[0]
1703 b := v.Block
1704 typ := &b.Func.Config.Types
1705
1706
1707 for {
1708 x := v_0
1709 y := v_1
1710 v.reset(OpMIPS64SGT)
1711 v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
1712 v0.AddArg(y)
1713 v1 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
1714 v1.AddArg(x)
1715 v.AddArg2(v0, v1)
1716 return true
1717 }
1718 }
1719 func rewriteValueMIPS64_OpLess16U(v *Value) bool {
1720 v_1 := v.Args[1]
1721 v_0 := v.Args[0]
1722 b := v.Block
1723 typ := &b.Func.Config.Types
1724
1725
1726 for {
1727 x := v_0
1728 y := v_1
1729 v.reset(OpMIPS64SGTU)
1730 v0 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
1731 v0.AddArg(y)
1732 v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
1733 v1.AddArg(x)
1734 v.AddArg2(v0, v1)
1735 return true
1736 }
1737 }
1738 func rewriteValueMIPS64_OpLess32(v *Value) bool {
1739 v_1 := v.Args[1]
1740 v_0 := v.Args[0]
1741 b := v.Block
1742 typ := &b.Func.Config.Types
1743
1744
1745 for {
1746 x := v_0
1747 y := v_1
1748 v.reset(OpMIPS64SGT)
1749 v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
1750 v0.AddArg(y)
1751 v1 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
1752 v1.AddArg(x)
1753 v.AddArg2(v0, v1)
1754 return true
1755 }
1756 }
1757 func rewriteValueMIPS64_OpLess32F(v *Value) bool {
1758 v_1 := v.Args[1]
1759 v_0 := v.Args[0]
1760 b := v.Block
1761
1762
1763 for {
1764 x := v_0
1765 y := v_1
1766 v.reset(OpMIPS64FPFlagTrue)
1767 v0 := b.NewValue0(v.Pos, OpMIPS64CMPGTF, types.TypeFlags)
1768 v0.AddArg2(y, x)
1769 v.AddArg(v0)
1770 return true
1771 }
1772 }
1773 func rewriteValueMIPS64_OpLess32U(v *Value) bool {
1774 v_1 := v.Args[1]
1775 v_0 := v.Args[0]
1776 b := v.Block
1777 typ := &b.Func.Config.Types
1778
1779
1780 for {
1781 x := v_0
1782 y := v_1
1783 v.reset(OpMIPS64SGTU)
1784 v0 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
1785 v0.AddArg(y)
1786 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
1787 v1.AddArg(x)
1788 v.AddArg2(v0, v1)
1789 return true
1790 }
1791 }
1792 func rewriteValueMIPS64_OpLess64(v *Value) bool {
1793 v_1 := v.Args[1]
1794 v_0 := v.Args[0]
1795
1796
1797 for {
1798 x := v_0
1799 y := v_1
1800 v.reset(OpMIPS64SGT)
1801 v.AddArg2(y, x)
1802 return true
1803 }
1804 }
1805 func rewriteValueMIPS64_OpLess64F(v *Value) bool {
1806 v_1 := v.Args[1]
1807 v_0 := v.Args[0]
1808 b := v.Block
1809
1810
1811 for {
1812 x := v_0
1813 y := v_1
1814 v.reset(OpMIPS64FPFlagTrue)
1815 v0 := b.NewValue0(v.Pos, OpMIPS64CMPGTD, types.TypeFlags)
1816 v0.AddArg2(y, x)
1817 v.AddArg(v0)
1818 return true
1819 }
1820 }
1821 func rewriteValueMIPS64_OpLess64U(v *Value) bool {
1822 v_1 := v.Args[1]
1823 v_0 := v.Args[0]
1824
1825
1826 for {
1827 x := v_0
1828 y := v_1
1829 v.reset(OpMIPS64SGTU)
1830 v.AddArg2(y, x)
1831 return true
1832 }
1833 }
1834 func rewriteValueMIPS64_OpLess8(v *Value) bool {
1835 v_1 := v.Args[1]
1836 v_0 := v.Args[0]
1837 b := v.Block
1838 typ := &b.Func.Config.Types
1839
1840
1841 for {
1842 x := v_0
1843 y := v_1
1844 v.reset(OpMIPS64SGT)
1845 v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
1846 v0.AddArg(y)
1847 v1 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
1848 v1.AddArg(x)
1849 v.AddArg2(v0, v1)
1850 return true
1851 }
1852 }
1853 func rewriteValueMIPS64_OpLess8U(v *Value) bool {
1854 v_1 := v.Args[1]
1855 v_0 := v.Args[0]
1856 b := v.Block
1857 typ := &b.Func.Config.Types
1858
1859
1860 for {
1861 x := v_0
1862 y := v_1
1863 v.reset(OpMIPS64SGTU)
1864 v0 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
1865 v0.AddArg(y)
1866 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
1867 v1.AddArg(x)
1868 v.AddArg2(v0, v1)
1869 return true
1870 }
1871 }
1872 func rewriteValueMIPS64_OpLoad(v *Value) bool {
1873 v_1 := v.Args[1]
1874 v_0 := v.Args[0]
1875
1876
1877
1878 for {
1879 t := v.Type
1880 ptr := v_0
1881 mem := v_1
1882 if !(t.IsBoolean()) {
1883 break
1884 }
1885 v.reset(OpMIPS64MOVBUload)
1886 v.AddArg2(ptr, mem)
1887 return true
1888 }
1889
1890
1891
1892 for {
1893 t := v.Type
1894 ptr := v_0
1895 mem := v_1
1896 if !(is8BitInt(t) && t.IsSigned()) {
1897 break
1898 }
1899 v.reset(OpMIPS64MOVBload)
1900 v.AddArg2(ptr, mem)
1901 return true
1902 }
1903
1904
1905
1906 for {
1907 t := v.Type
1908 ptr := v_0
1909 mem := v_1
1910 if !(is8BitInt(t) && !t.IsSigned()) {
1911 break
1912 }
1913 v.reset(OpMIPS64MOVBUload)
1914 v.AddArg2(ptr, mem)
1915 return true
1916 }
1917
1918
1919
1920 for {
1921 t := v.Type
1922 ptr := v_0
1923 mem := v_1
1924 if !(is16BitInt(t) && t.IsSigned()) {
1925 break
1926 }
1927 v.reset(OpMIPS64MOVHload)
1928 v.AddArg2(ptr, mem)
1929 return true
1930 }
1931
1932
1933
1934 for {
1935 t := v.Type
1936 ptr := v_0
1937 mem := v_1
1938 if !(is16BitInt(t) && !t.IsSigned()) {
1939 break
1940 }
1941 v.reset(OpMIPS64MOVHUload)
1942 v.AddArg2(ptr, mem)
1943 return true
1944 }
1945
1946
1947
1948 for {
1949 t := v.Type
1950 ptr := v_0
1951 mem := v_1
1952 if !(is32BitInt(t) && t.IsSigned()) {
1953 break
1954 }
1955 v.reset(OpMIPS64MOVWload)
1956 v.AddArg2(ptr, mem)
1957 return true
1958 }
1959
1960
1961
1962 for {
1963 t := v.Type
1964 ptr := v_0
1965 mem := v_1
1966 if !(is32BitInt(t) && !t.IsSigned()) {
1967 break
1968 }
1969 v.reset(OpMIPS64MOVWUload)
1970 v.AddArg2(ptr, mem)
1971 return true
1972 }
1973
1974
1975
1976 for {
1977 t := v.Type
1978 ptr := v_0
1979 mem := v_1
1980 if !(is64BitInt(t) || isPtr(t)) {
1981 break
1982 }
1983 v.reset(OpMIPS64MOVVload)
1984 v.AddArg2(ptr, mem)
1985 return true
1986 }
1987
1988
1989
1990 for {
1991 t := v.Type
1992 ptr := v_0
1993 mem := v_1
1994 if !(is32BitFloat(t)) {
1995 break
1996 }
1997 v.reset(OpMIPS64MOVFload)
1998 v.AddArg2(ptr, mem)
1999 return true
2000 }
2001
2002
2003
2004 for {
2005 t := v.Type
2006 ptr := v_0
2007 mem := v_1
2008 if !(is64BitFloat(t)) {
2009 break
2010 }
2011 v.reset(OpMIPS64MOVDload)
2012 v.AddArg2(ptr, mem)
2013 return true
2014 }
2015 return false
2016 }
2017 func rewriteValueMIPS64_OpLocalAddr(v *Value) bool {
2018 v_1 := v.Args[1]
2019 v_0 := v.Args[0]
2020 b := v.Block
2021 typ := &b.Func.Config.Types
2022
2023
2024
2025 for {
2026 t := v.Type
2027 sym := auxToSym(v.Aux)
2028 base := v_0
2029 mem := v_1
2030 if !(t.Elem().HasPointers()) {
2031 break
2032 }
2033 v.reset(OpMIPS64MOVVaddr)
2034 v.Aux = symToAux(sym)
2035 v0 := b.NewValue0(v.Pos, OpSPanchored, typ.Uintptr)
2036 v0.AddArg2(base, mem)
2037 v.AddArg(v0)
2038 return true
2039 }
2040
2041
2042
2043 for {
2044 t := v.Type
2045 sym := auxToSym(v.Aux)
2046 base := v_0
2047 if !(!t.Elem().HasPointers()) {
2048 break
2049 }
2050 v.reset(OpMIPS64MOVVaddr)
2051 v.Aux = symToAux(sym)
2052 v.AddArg(base)
2053 return true
2054 }
2055 return false
2056 }
2057 func rewriteValueMIPS64_OpLsh16x16(v *Value) bool {
2058 v_1 := v.Args[1]
2059 v_0 := v.Args[0]
2060 b := v.Block
2061 typ := &b.Func.Config.Types
2062
2063
2064 for {
2065 t := v.Type
2066 x := v_0
2067 y := v_1
2068 v.reset(OpMIPS64AND)
2069 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
2070 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
2071 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
2072 v2.AuxInt = int64ToAuxInt(64)
2073 v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
2074 v3.AddArg(y)
2075 v1.AddArg2(v2, v3)
2076 v0.AddArg(v1)
2077 v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
2078 v4.AddArg2(x, v3)
2079 v.AddArg2(v0, v4)
2080 return true
2081 }
2082 }
2083 func rewriteValueMIPS64_OpLsh16x32(v *Value) bool {
2084 v_1 := v.Args[1]
2085 v_0 := v.Args[0]
2086 b := v.Block
2087 typ := &b.Func.Config.Types
2088
2089
2090 for {
2091 t := v.Type
2092 x := v_0
2093 y := v_1
2094 v.reset(OpMIPS64AND)
2095 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
2096 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
2097 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
2098 v2.AuxInt = int64ToAuxInt(64)
2099 v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
2100 v3.AddArg(y)
2101 v1.AddArg2(v2, v3)
2102 v0.AddArg(v1)
2103 v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
2104 v4.AddArg2(x, v3)
2105 v.AddArg2(v0, v4)
2106 return true
2107 }
2108 }
2109 func rewriteValueMIPS64_OpLsh16x64(v *Value) bool {
2110 v_1 := v.Args[1]
2111 v_0 := v.Args[0]
2112 b := v.Block
2113 typ := &b.Func.Config.Types
2114
2115
2116 for {
2117 t := v.Type
2118 x := v_0
2119 y := v_1
2120 v.reset(OpMIPS64AND)
2121 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
2122 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
2123 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
2124 v2.AuxInt = int64ToAuxInt(64)
2125 v1.AddArg2(v2, y)
2126 v0.AddArg(v1)
2127 v3 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
2128 v3.AddArg2(x, y)
2129 v.AddArg2(v0, v3)
2130 return true
2131 }
2132 }
2133 func rewriteValueMIPS64_OpLsh16x8(v *Value) bool {
2134 v_1 := v.Args[1]
2135 v_0 := v.Args[0]
2136 b := v.Block
2137 typ := &b.Func.Config.Types
2138
2139
2140 for {
2141 t := v.Type
2142 x := v_0
2143 y := v_1
2144 v.reset(OpMIPS64AND)
2145 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
2146 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
2147 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
2148 v2.AuxInt = int64ToAuxInt(64)
2149 v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
2150 v3.AddArg(y)
2151 v1.AddArg2(v2, v3)
2152 v0.AddArg(v1)
2153 v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
2154 v4.AddArg2(x, v3)
2155 v.AddArg2(v0, v4)
2156 return true
2157 }
2158 }
2159 func rewriteValueMIPS64_OpLsh32x16(v *Value) bool {
2160 v_1 := v.Args[1]
2161 v_0 := v.Args[0]
2162 b := v.Block
2163 typ := &b.Func.Config.Types
2164
2165
2166 for {
2167 t := v.Type
2168 x := v_0
2169 y := v_1
2170 v.reset(OpMIPS64AND)
2171 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
2172 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
2173 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
2174 v2.AuxInt = int64ToAuxInt(64)
2175 v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
2176 v3.AddArg(y)
2177 v1.AddArg2(v2, v3)
2178 v0.AddArg(v1)
2179 v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
2180 v4.AddArg2(x, v3)
2181 v.AddArg2(v0, v4)
2182 return true
2183 }
2184 }
2185 func rewriteValueMIPS64_OpLsh32x32(v *Value) bool {
2186 v_1 := v.Args[1]
2187 v_0 := v.Args[0]
2188 b := v.Block
2189 typ := &b.Func.Config.Types
2190
2191
2192 for {
2193 t := v.Type
2194 x := v_0
2195 y := v_1
2196 v.reset(OpMIPS64AND)
2197 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
2198 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
2199 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
2200 v2.AuxInt = int64ToAuxInt(64)
2201 v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
2202 v3.AddArg(y)
2203 v1.AddArg2(v2, v3)
2204 v0.AddArg(v1)
2205 v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
2206 v4.AddArg2(x, v3)
2207 v.AddArg2(v0, v4)
2208 return true
2209 }
2210 }
2211 func rewriteValueMIPS64_OpLsh32x64(v *Value) bool {
2212 v_1 := v.Args[1]
2213 v_0 := v.Args[0]
2214 b := v.Block
2215 typ := &b.Func.Config.Types
2216
2217
2218 for {
2219 t := v.Type
2220 x := v_0
2221 y := v_1
2222 v.reset(OpMIPS64AND)
2223 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
2224 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
2225 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
2226 v2.AuxInt = int64ToAuxInt(64)
2227 v1.AddArg2(v2, y)
2228 v0.AddArg(v1)
2229 v3 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
2230 v3.AddArg2(x, y)
2231 v.AddArg2(v0, v3)
2232 return true
2233 }
2234 }
2235 func rewriteValueMIPS64_OpLsh32x8(v *Value) bool {
2236 v_1 := v.Args[1]
2237 v_0 := v.Args[0]
2238 b := v.Block
2239 typ := &b.Func.Config.Types
2240
2241
2242 for {
2243 t := v.Type
2244 x := v_0
2245 y := v_1
2246 v.reset(OpMIPS64AND)
2247 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
2248 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
2249 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
2250 v2.AuxInt = int64ToAuxInt(64)
2251 v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
2252 v3.AddArg(y)
2253 v1.AddArg2(v2, v3)
2254 v0.AddArg(v1)
2255 v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
2256 v4.AddArg2(x, v3)
2257 v.AddArg2(v0, v4)
2258 return true
2259 }
2260 }
2261 func rewriteValueMIPS64_OpLsh64x16(v *Value) bool {
2262 v_1 := v.Args[1]
2263 v_0 := v.Args[0]
2264 b := v.Block
2265 typ := &b.Func.Config.Types
2266
2267
2268 for {
2269 t := v.Type
2270 x := v_0
2271 y := v_1
2272 v.reset(OpMIPS64AND)
2273 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
2274 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
2275 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
2276 v2.AuxInt = int64ToAuxInt(64)
2277 v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
2278 v3.AddArg(y)
2279 v1.AddArg2(v2, v3)
2280 v0.AddArg(v1)
2281 v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
2282 v4.AddArg2(x, v3)
2283 v.AddArg2(v0, v4)
2284 return true
2285 }
2286 }
2287 func rewriteValueMIPS64_OpLsh64x32(v *Value) bool {
2288 v_1 := v.Args[1]
2289 v_0 := v.Args[0]
2290 b := v.Block
2291 typ := &b.Func.Config.Types
2292
2293
2294 for {
2295 t := v.Type
2296 x := v_0
2297 y := v_1
2298 v.reset(OpMIPS64AND)
2299 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
2300 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
2301 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
2302 v2.AuxInt = int64ToAuxInt(64)
2303 v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
2304 v3.AddArg(y)
2305 v1.AddArg2(v2, v3)
2306 v0.AddArg(v1)
2307 v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
2308 v4.AddArg2(x, v3)
2309 v.AddArg2(v0, v4)
2310 return true
2311 }
2312 }
2313 func rewriteValueMIPS64_OpLsh64x64(v *Value) bool {
2314 v_1 := v.Args[1]
2315 v_0 := v.Args[0]
2316 b := v.Block
2317 typ := &b.Func.Config.Types
2318
2319
2320 for {
2321 t := v.Type
2322 x := v_0
2323 y := v_1
2324 v.reset(OpMIPS64AND)
2325 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
2326 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
2327 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
2328 v2.AuxInt = int64ToAuxInt(64)
2329 v1.AddArg2(v2, y)
2330 v0.AddArg(v1)
2331 v3 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
2332 v3.AddArg2(x, y)
2333 v.AddArg2(v0, v3)
2334 return true
2335 }
2336 }
2337 func rewriteValueMIPS64_OpLsh64x8(v *Value) bool {
2338 v_1 := v.Args[1]
2339 v_0 := v.Args[0]
2340 b := v.Block
2341 typ := &b.Func.Config.Types
2342
2343
2344 for {
2345 t := v.Type
2346 x := v_0
2347 y := v_1
2348 v.reset(OpMIPS64AND)
2349 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
2350 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
2351 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
2352 v2.AuxInt = int64ToAuxInt(64)
2353 v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
2354 v3.AddArg(y)
2355 v1.AddArg2(v2, v3)
2356 v0.AddArg(v1)
2357 v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
2358 v4.AddArg2(x, v3)
2359 v.AddArg2(v0, v4)
2360 return true
2361 }
2362 }
2363 func rewriteValueMIPS64_OpLsh8x16(v *Value) bool {
2364 v_1 := v.Args[1]
2365 v_0 := v.Args[0]
2366 b := v.Block
2367 typ := &b.Func.Config.Types
2368
2369
2370 for {
2371 t := v.Type
2372 x := v_0
2373 y := v_1
2374 v.reset(OpMIPS64AND)
2375 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
2376 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
2377 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
2378 v2.AuxInt = int64ToAuxInt(64)
2379 v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
2380 v3.AddArg(y)
2381 v1.AddArg2(v2, v3)
2382 v0.AddArg(v1)
2383 v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
2384 v4.AddArg2(x, v3)
2385 v.AddArg2(v0, v4)
2386 return true
2387 }
2388 }
2389 func rewriteValueMIPS64_OpLsh8x32(v *Value) bool {
2390 v_1 := v.Args[1]
2391 v_0 := v.Args[0]
2392 b := v.Block
2393 typ := &b.Func.Config.Types
2394
2395
2396 for {
2397 t := v.Type
2398 x := v_0
2399 y := v_1
2400 v.reset(OpMIPS64AND)
2401 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
2402 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
2403 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
2404 v2.AuxInt = int64ToAuxInt(64)
2405 v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
2406 v3.AddArg(y)
2407 v1.AddArg2(v2, v3)
2408 v0.AddArg(v1)
2409 v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
2410 v4.AddArg2(x, v3)
2411 v.AddArg2(v0, v4)
2412 return true
2413 }
2414 }
2415 func rewriteValueMIPS64_OpLsh8x64(v *Value) bool {
2416 v_1 := v.Args[1]
2417 v_0 := v.Args[0]
2418 b := v.Block
2419 typ := &b.Func.Config.Types
2420
2421
2422 for {
2423 t := v.Type
2424 x := v_0
2425 y := v_1
2426 v.reset(OpMIPS64AND)
2427 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
2428 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
2429 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
2430 v2.AuxInt = int64ToAuxInt(64)
2431 v1.AddArg2(v2, y)
2432 v0.AddArg(v1)
2433 v3 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
2434 v3.AddArg2(x, y)
2435 v.AddArg2(v0, v3)
2436 return true
2437 }
2438 }
2439 func rewriteValueMIPS64_OpLsh8x8(v *Value) bool {
2440 v_1 := v.Args[1]
2441 v_0 := v.Args[0]
2442 b := v.Block
2443 typ := &b.Func.Config.Types
2444
2445
2446 for {
2447 t := v.Type
2448 x := v_0
2449 y := v_1
2450 v.reset(OpMIPS64AND)
2451 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
2452 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
2453 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
2454 v2.AuxInt = int64ToAuxInt(64)
2455 v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
2456 v3.AddArg(y)
2457 v1.AddArg2(v2, v3)
2458 v0.AddArg(v1)
2459 v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
2460 v4.AddArg2(x, v3)
2461 v.AddArg2(v0, v4)
2462 return true
2463 }
2464 }
2465 func rewriteValueMIPS64_OpMIPS64ADDV(v *Value) bool {
2466 v_1 := v.Args[1]
2467 v_0 := v.Args[0]
2468
2469
2470
2471 for {
2472 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
2473 x := v_0
2474 if v_1.Op != OpMIPS64MOVVconst {
2475 continue
2476 }
2477 t := v_1.Type
2478 c := auxIntToInt64(v_1.AuxInt)
2479 if !(is32Bit(c) && !t.IsPtr()) {
2480 continue
2481 }
2482 v.reset(OpMIPS64ADDVconst)
2483 v.AuxInt = int64ToAuxInt(c)
2484 v.AddArg(x)
2485 return true
2486 }
2487 break
2488 }
2489
2490
2491 for {
2492 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
2493 x := v_0
2494 if v_1.Op != OpMIPS64NEGV {
2495 continue
2496 }
2497 y := v_1.Args[0]
2498 v.reset(OpMIPS64SUBV)
2499 v.AddArg2(x, y)
2500 return true
2501 }
2502 break
2503 }
2504 return false
2505 }
2506 func rewriteValueMIPS64_OpMIPS64ADDVconst(v *Value) bool {
2507 v_0 := v.Args[0]
2508
2509
2510
2511 for {
2512 off1 := auxIntToInt64(v.AuxInt)
2513 if v_0.Op != OpMIPS64MOVVaddr {
2514 break
2515 }
2516 off2 := auxIntToInt32(v_0.AuxInt)
2517 sym := auxToSym(v_0.Aux)
2518 ptr := v_0.Args[0]
2519 if !(is32Bit(off1 + int64(off2))) {
2520 break
2521 }
2522 v.reset(OpMIPS64MOVVaddr)
2523 v.AuxInt = int32ToAuxInt(int32(off1) + int32(off2))
2524 v.Aux = symToAux(sym)
2525 v.AddArg(ptr)
2526 return true
2527 }
2528
2529
2530 for {
2531 if auxIntToInt64(v.AuxInt) != 0 {
2532 break
2533 }
2534 x := v_0
2535 v.copyOf(x)
2536 return true
2537 }
2538
2539
2540 for {
2541 c := auxIntToInt64(v.AuxInt)
2542 if v_0.Op != OpMIPS64MOVVconst {
2543 break
2544 }
2545 d := auxIntToInt64(v_0.AuxInt)
2546 v.reset(OpMIPS64MOVVconst)
2547 v.AuxInt = int64ToAuxInt(c + d)
2548 return true
2549 }
2550
2551
2552
2553 for {
2554 c := auxIntToInt64(v.AuxInt)
2555 if v_0.Op != OpMIPS64ADDVconst {
2556 break
2557 }
2558 d := auxIntToInt64(v_0.AuxInt)
2559 x := v_0.Args[0]
2560 if !(is32Bit(c + d)) {
2561 break
2562 }
2563 v.reset(OpMIPS64ADDVconst)
2564 v.AuxInt = int64ToAuxInt(c + d)
2565 v.AddArg(x)
2566 return true
2567 }
2568
2569
2570
2571 for {
2572 c := auxIntToInt64(v.AuxInt)
2573 if v_0.Op != OpMIPS64SUBVconst {
2574 break
2575 }
2576 d := auxIntToInt64(v_0.AuxInt)
2577 x := v_0.Args[0]
2578 if !(is32Bit(c - d)) {
2579 break
2580 }
2581 v.reset(OpMIPS64ADDVconst)
2582 v.AuxInt = int64ToAuxInt(c - d)
2583 v.AddArg(x)
2584 return true
2585 }
2586 return false
2587 }
2588 func rewriteValueMIPS64_OpMIPS64AND(v *Value) bool {
2589 v_1 := v.Args[1]
2590 v_0 := v.Args[0]
2591
2592
2593
2594 for {
2595 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
2596 x := v_0
2597 if v_1.Op != OpMIPS64MOVVconst {
2598 continue
2599 }
2600 c := auxIntToInt64(v_1.AuxInt)
2601 if !(is32Bit(c)) {
2602 continue
2603 }
2604 v.reset(OpMIPS64ANDconst)
2605 v.AuxInt = int64ToAuxInt(c)
2606 v.AddArg(x)
2607 return true
2608 }
2609 break
2610 }
2611
2612
2613 for {
2614 x := v_0
2615 if x != v_1 {
2616 break
2617 }
2618 v.copyOf(x)
2619 return true
2620 }
2621 return false
2622 }
2623 func rewriteValueMIPS64_OpMIPS64ANDconst(v *Value) bool {
2624 v_0 := v.Args[0]
2625
2626
2627 for {
2628 if auxIntToInt64(v.AuxInt) != 0 {
2629 break
2630 }
2631 v.reset(OpMIPS64MOVVconst)
2632 v.AuxInt = int64ToAuxInt(0)
2633 return true
2634 }
2635
2636
2637 for {
2638 if auxIntToInt64(v.AuxInt) != -1 {
2639 break
2640 }
2641 x := v_0
2642 v.copyOf(x)
2643 return true
2644 }
2645
2646
2647 for {
2648 c := auxIntToInt64(v.AuxInt)
2649 if v_0.Op != OpMIPS64MOVVconst {
2650 break
2651 }
2652 d := auxIntToInt64(v_0.AuxInt)
2653 v.reset(OpMIPS64MOVVconst)
2654 v.AuxInt = int64ToAuxInt(c & d)
2655 return true
2656 }
2657
2658
2659 for {
2660 c := auxIntToInt64(v.AuxInt)
2661 if v_0.Op != OpMIPS64ANDconst {
2662 break
2663 }
2664 d := auxIntToInt64(v_0.AuxInt)
2665 x := v_0.Args[0]
2666 v.reset(OpMIPS64ANDconst)
2667 v.AuxInt = int64ToAuxInt(c & d)
2668 v.AddArg(x)
2669 return true
2670 }
2671 return false
2672 }
2673 func rewriteValueMIPS64_OpMIPS64LoweredAtomicAdd32(v *Value) bool {
2674 v_2 := v.Args[2]
2675 v_1 := v.Args[1]
2676 v_0 := v.Args[0]
2677
2678
2679
2680 for {
2681 ptr := v_0
2682 if v_1.Op != OpMIPS64MOVVconst {
2683 break
2684 }
2685 c := auxIntToInt64(v_1.AuxInt)
2686 mem := v_2
2687 if !(is32Bit(c)) {
2688 break
2689 }
2690 v.reset(OpMIPS64LoweredAtomicAddconst32)
2691 v.AuxInt = int32ToAuxInt(int32(c))
2692 v.AddArg2(ptr, mem)
2693 return true
2694 }
2695 return false
2696 }
2697 func rewriteValueMIPS64_OpMIPS64LoweredAtomicAdd64(v *Value) bool {
2698 v_2 := v.Args[2]
2699 v_1 := v.Args[1]
2700 v_0 := v.Args[0]
2701
2702
2703
2704 for {
2705 ptr := v_0
2706 if v_1.Op != OpMIPS64MOVVconst {
2707 break
2708 }
2709 c := auxIntToInt64(v_1.AuxInt)
2710 mem := v_2
2711 if !(is32Bit(c)) {
2712 break
2713 }
2714 v.reset(OpMIPS64LoweredAtomicAddconst64)
2715 v.AuxInt = int64ToAuxInt(c)
2716 v.AddArg2(ptr, mem)
2717 return true
2718 }
2719 return false
2720 }
2721 func rewriteValueMIPS64_OpMIPS64LoweredAtomicStore32(v *Value) bool {
2722 v_2 := v.Args[2]
2723 v_1 := v.Args[1]
2724 v_0 := v.Args[0]
2725
2726
2727 for {
2728 ptr := v_0
2729 if v_1.Op != OpMIPS64MOVVconst || auxIntToInt64(v_1.AuxInt) != 0 {
2730 break
2731 }
2732 mem := v_2
2733 v.reset(OpMIPS64LoweredAtomicStorezero32)
2734 v.AddArg2(ptr, mem)
2735 return true
2736 }
2737 return false
2738 }
2739 func rewriteValueMIPS64_OpMIPS64LoweredAtomicStore64(v *Value) bool {
2740 v_2 := v.Args[2]
2741 v_1 := v.Args[1]
2742 v_0 := v.Args[0]
2743
2744
2745 for {
2746 ptr := v_0
2747 if v_1.Op != OpMIPS64MOVVconst || auxIntToInt64(v_1.AuxInt) != 0 {
2748 break
2749 }
2750 mem := v_2
2751 v.reset(OpMIPS64LoweredAtomicStorezero64)
2752 v.AddArg2(ptr, mem)
2753 return true
2754 }
2755 return false
2756 }
2757 func rewriteValueMIPS64_OpMIPS64MOVBUload(v *Value) bool {
2758 v_1 := v.Args[1]
2759 v_0 := v.Args[0]
2760 b := v.Block
2761 config := b.Func.Config
2762
2763
2764
2765 for {
2766 off1 := auxIntToInt32(v.AuxInt)
2767 sym := auxToSym(v.Aux)
2768 if v_0.Op != OpMIPS64ADDVconst {
2769 break
2770 }
2771 off2 := auxIntToInt64(v_0.AuxInt)
2772 ptr := v_0.Args[0]
2773 mem := v_1
2774 if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
2775 break
2776 }
2777 v.reset(OpMIPS64MOVBUload)
2778 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
2779 v.Aux = symToAux(sym)
2780 v.AddArg2(ptr, mem)
2781 return true
2782 }
2783
2784
2785
2786 for {
2787 off1 := auxIntToInt32(v.AuxInt)
2788 sym1 := auxToSym(v.Aux)
2789 if v_0.Op != OpMIPS64MOVVaddr {
2790 break
2791 }
2792 off2 := auxIntToInt32(v_0.AuxInt)
2793 sym2 := auxToSym(v_0.Aux)
2794 ptr := v_0.Args[0]
2795 mem := v_1
2796 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
2797 break
2798 }
2799 v.reset(OpMIPS64MOVBUload)
2800 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
2801 v.Aux = symToAux(mergeSym(sym1, sym2))
2802 v.AddArg2(ptr, mem)
2803 return true
2804 }
2805 return false
2806 }
2807 func rewriteValueMIPS64_OpMIPS64MOVBUreg(v *Value) bool {
2808 v_0 := v.Args[0]
2809
2810
2811 for {
2812 x := v_0
2813 if x.Op != OpMIPS64MOVBUload {
2814 break
2815 }
2816 v.reset(OpMIPS64MOVVreg)
2817 v.AddArg(x)
2818 return true
2819 }
2820
2821
2822 for {
2823 x := v_0
2824 if x.Op != OpMIPS64MOVBUreg {
2825 break
2826 }
2827 v.reset(OpMIPS64MOVVreg)
2828 v.AddArg(x)
2829 return true
2830 }
2831
2832
2833 for {
2834 if v_0.Op != OpMIPS64MOVVconst {
2835 break
2836 }
2837 c := auxIntToInt64(v_0.AuxInt)
2838 v.reset(OpMIPS64MOVVconst)
2839 v.AuxInt = int64ToAuxInt(int64(uint8(c)))
2840 return true
2841 }
2842 return false
2843 }
2844 func rewriteValueMIPS64_OpMIPS64MOVBload(v *Value) bool {
2845 v_1 := v.Args[1]
2846 v_0 := v.Args[0]
2847 b := v.Block
2848 config := b.Func.Config
2849
2850
2851
2852 for {
2853 off1 := auxIntToInt32(v.AuxInt)
2854 sym := auxToSym(v.Aux)
2855 if v_0.Op != OpMIPS64ADDVconst {
2856 break
2857 }
2858 off2 := auxIntToInt64(v_0.AuxInt)
2859 ptr := v_0.Args[0]
2860 mem := v_1
2861 if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
2862 break
2863 }
2864 v.reset(OpMIPS64MOVBload)
2865 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
2866 v.Aux = symToAux(sym)
2867 v.AddArg2(ptr, mem)
2868 return true
2869 }
2870
2871
2872
2873 for {
2874 off1 := auxIntToInt32(v.AuxInt)
2875 sym1 := auxToSym(v.Aux)
2876 if v_0.Op != OpMIPS64MOVVaddr {
2877 break
2878 }
2879 off2 := auxIntToInt32(v_0.AuxInt)
2880 sym2 := auxToSym(v_0.Aux)
2881 ptr := v_0.Args[0]
2882 mem := v_1
2883 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
2884 break
2885 }
2886 v.reset(OpMIPS64MOVBload)
2887 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
2888 v.Aux = symToAux(mergeSym(sym1, sym2))
2889 v.AddArg2(ptr, mem)
2890 return true
2891 }
2892
2893
2894
2895 for {
2896 off := auxIntToInt32(v.AuxInt)
2897 sym := auxToSym(v.Aux)
2898 if v_0.Op != OpSB || !(symIsRO(sym)) {
2899 break
2900 }
2901 v.reset(OpMIPS64MOVVconst)
2902 v.AuxInt = int64ToAuxInt(int64(read8(sym, int64(off))))
2903 return true
2904 }
2905 return false
2906 }
2907 func rewriteValueMIPS64_OpMIPS64MOVBreg(v *Value) bool {
2908 v_0 := v.Args[0]
2909
2910
2911 for {
2912 x := v_0
2913 if x.Op != OpMIPS64MOVBload {
2914 break
2915 }
2916 v.reset(OpMIPS64MOVVreg)
2917 v.AddArg(x)
2918 return true
2919 }
2920
2921
2922 for {
2923 x := v_0
2924 if x.Op != OpMIPS64MOVBreg {
2925 break
2926 }
2927 v.reset(OpMIPS64MOVVreg)
2928 v.AddArg(x)
2929 return true
2930 }
2931
2932
2933 for {
2934 if v_0.Op != OpMIPS64MOVVconst {
2935 break
2936 }
2937 c := auxIntToInt64(v_0.AuxInt)
2938 v.reset(OpMIPS64MOVVconst)
2939 v.AuxInt = int64ToAuxInt(int64(int8(c)))
2940 return true
2941 }
2942 return false
2943 }
2944 func rewriteValueMIPS64_OpMIPS64MOVBstore(v *Value) bool {
2945 v_2 := v.Args[2]
2946 v_1 := v.Args[1]
2947 v_0 := v.Args[0]
2948 b := v.Block
2949 config := b.Func.Config
2950
2951
2952
2953 for {
2954 off1 := auxIntToInt32(v.AuxInt)
2955 sym := auxToSym(v.Aux)
2956 if v_0.Op != OpMIPS64ADDVconst {
2957 break
2958 }
2959 off2 := auxIntToInt64(v_0.AuxInt)
2960 ptr := v_0.Args[0]
2961 val := v_1
2962 mem := v_2
2963 if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
2964 break
2965 }
2966 v.reset(OpMIPS64MOVBstore)
2967 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
2968 v.Aux = symToAux(sym)
2969 v.AddArg3(ptr, val, mem)
2970 return true
2971 }
2972
2973
2974
2975 for {
2976 off1 := auxIntToInt32(v.AuxInt)
2977 sym1 := auxToSym(v.Aux)
2978 if v_0.Op != OpMIPS64MOVVaddr {
2979 break
2980 }
2981 off2 := auxIntToInt32(v_0.AuxInt)
2982 sym2 := auxToSym(v_0.Aux)
2983 ptr := v_0.Args[0]
2984 val := v_1
2985 mem := v_2
2986 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
2987 break
2988 }
2989 v.reset(OpMIPS64MOVBstore)
2990 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
2991 v.Aux = symToAux(mergeSym(sym1, sym2))
2992 v.AddArg3(ptr, val, mem)
2993 return true
2994 }
2995
2996
2997 for {
2998 off := auxIntToInt32(v.AuxInt)
2999 sym := auxToSym(v.Aux)
3000 ptr := v_0
3001 if v_1.Op != OpMIPS64MOVVconst || auxIntToInt64(v_1.AuxInt) != 0 {
3002 break
3003 }
3004 mem := v_2
3005 v.reset(OpMIPS64MOVBstorezero)
3006 v.AuxInt = int32ToAuxInt(off)
3007 v.Aux = symToAux(sym)
3008 v.AddArg2(ptr, mem)
3009 return true
3010 }
3011
3012
3013 for {
3014 off := auxIntToInt32(v.AuxInt)
3015 sym := auxToSym(v.Aux)
3016 ptr := v_0
3017 if v_1.Op != OpMIPS64MOVBreg {
3018 break
3019 }
3020 x := v_1.Args[0]
3021 mem := v_2
3022 v.reset(OpMIPS64MOVBstore)
3023 v.AuxInt = int32ToAuxInt(off)
3024 v.Aux = symToAux(sym)
3025 v.AddArg3(ptr, x, mem)
3026 return true
3027 }
3028
3029
3030 for {
3031 off := auxIntToInt32(v.AuxInt)
3032 sym := auxToSym(v.Aux)
3033 ptr := v_0
3034 if v_1.Op != OpMIPS64MOVBUreg {
3035 break
3036 }
3037 x := v_1.Args[0]
3038 mem := v_2
3039 v.reset(OpMIPS64MOVBstore)
3040 v.AuxInt = int32ToAuxInt(off)
3041 v.Aux = symToAux(sym)
3042 v.AddArg3(ptr, x, mem)
3043 return true
3044 }
3045
3046
3047 for {
3048 off := auxIntToInt32(v.AuxInt)
3049 sym := auxToSym(v.Aux)
3050 ptr := v_0
3051 if v_1.Op != OpMIPS64MOVHreg {
3052 break
3053 }
3054 x := v_1.Args[0]
3055 mem := v_2
3056 v.reset(OpMIPS64MOVBstore)
3057 v.AuxInt = int32ToAuxInt(off)
3058 v.Aux = symToAux(sym)
3059 v.AddArg3(ptr, x, mem)
3060 return true
3061 }
3062
3063
3064 for {
3065 off := auxIntToInt32(v.AuxInt)
3066 sym := auxToSym(v.Aux)
3067 ptr := v_0
3068 if v_1.Op != OpMIPS64MOVHUreg {
3069 break
3070 }
3071 x := v_1.Args[0]
3072 mem := v_2
3073 v.reset(OpMIPS64MOVBstore)
3074 v.AuxInt = int32ToAuxInt(off)
3075 v.Aux = symToAux(sym)
3076 v.AddArg3(ptr, x, mem)
3077 return true
3078 }
3079
3080
3081 for {
3082 off := auxIntToInt32(v.AuxInt)
3083 sym := auxToSym(v.Aux)
3084 ptr := v_0
3085 if v_1.Op != OpMIPS64MOVWreg {
3086 break
3087 }
3088 x := v_1.Args[0]
3089 mem := v_2
3090 v.reset(OpMIPS64MOVBstore)
3091 v.AuxInt = int32ToAuxInt(off)
3092 v.Aux = symToAux(sym)
3093 v.AddArg3(ptr, x, mem)
3094 return true
3095 }
3096
3097
3098 for {
3099 off := auxIntToInt32(v.AuxInt)
3100 sym := auxToSym(v.Aux)
3101 ptr := v_0
3102 if v_1.Op != OpMIPS64MOVWUreg {
3103 break
3104 }
3105 x := v_1.Args[0]
3106 mem := v_2
3107 v.reset(OpMIPS64MOVBstore)
3108 v.AuxInt = int32ToAuxInt(off)
3109 v.Aux = symToAux(sym)
3110 v.AddArg3(ptr, x, mem)
3111 return true
3112 }
3113 return false
3114 }
3115 func rewriteValueMIPS64_OpMIPS64MOVBstorezero(v *Value) bool {
3116 v_1 := v.Args[1]
3117 v_0 := v.Args[0]
3118 b := v.Block
3119 config := b.Func.Config
3120
3121
3122
3123 for {
3124 off1 := auxIntToInt32(v.AuxInt)
3125 sym := auxToSym(v.Aux)
3126 if v_0.Op != OpMIPS64ADDVconst {
3127 break
3128 }
3129 off2 := auxIntToInt64(v_0.AuxInt)
3130 ptr := v_0.Args[0]
3131 mem := v_1
3132 if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
3133 break
3134 }
3135 v.reset(OpMIPS64MOVBstorezero)
3136 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
3137 v.Aux = symToAux(sym)
3138 v.AddArg2(ptr, mem)
3139 return true
3140 }
3141
3142
3143
3144 for {
3145 off1 := auxIntToInt32(v.AuxInt)
3146 sym1 := auxToSym(v.Aux)
3147 if v_0.Op != OpMIPS64MOVVaddr {
3148 break
3149 }
3150 off2 := auxIntToInt32(v_0.AuxInt)
3151 sym2 := auxToSym(v_0.Aux)
3152 ptr := v_0.Args[0]
3153 mem := v_1
3154 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
3155 break
3156 }
3157 v.reset(OpMIPS64MOVBstorezero)
3158 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
3159 v.Aux = symToAux(mergeSym(sym1, sym2))
3160 v.AddArg2(ptr, mem)
3161 return true
3162 }
3163 return false
3164 }
3165 func rewriteValueMIPS64_OpMIPS64MOVDload(v *Value) bool {
3166 v_1 := v.Args[1]
3167 v_0 := v.Args[0]
3168 b := v.Block
3169 config := b.Func.Config
3170
3171
3172 for {
3173 off := auxIntToInt32(v.AuxInt)
3174 sym := auxToSym(v.Aux)
3175 ptr := v_0
3176 if v_1.Op != OpMIPS64MOVVstore || auxIntToInt32(v_1.AuxInt) != off || auxToSym(v_1.Aux) != sym {
3177 break
3178 }
3179 val := v_1.Args[1]
3180 if ptr != v_1.Args[0] {
3181 break
3182 }
3183 v.reset(OpMIPS64MOVVgpfp)
3184 v.AddArg(val)
3185 return true
3186 }
3187
3188
3189
3190 for {
3191 off1 := auxIntToInt32(v.AuxInt)
3192 sym := auxToSym(v.Aux)
3193 if v_0.Op != OpMIPS64ADDVconst {
3194 break
3195 }
3196 off2 := auxIntToInt64(v_0.AuxInt)
3197 ptr := v_0.Args[0]
3198 mem := v_1
3199 if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
3200 break
3201 }
3202 v.reset(OpMIPS64MOVDload)
3203 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
3204 v.Aux = symToAux(sym)
3205 v.AddArg2(ptr, mem)
3206 return true
3207 }
3208
3209
3210
3211 for {
3212 off1 := auxIntToInt32(v.AuxInt)
3213 sym1 := auxToSym(v.Aux)
3214 if v_0.Op != OpMIPS64MOVVaddr {
3215 break
3216 }
3217 off2 := auxIntToInt32(v_0.AuxInt)
3218 sym2 := auxToSym(v_0.Aux)
3219 ptr := v_0.Args[0]
3220 mem := v_1
3221 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
3222 break
3223 }
3224 v.reset(OpMIPS64MOVDload)
3225 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
3226 v.Aux = symToAux(mergeSym(sym1, sym2))
3227 v.AddArg2(ptr, mem)
3228 return true
3229 }
3230 return false
3231 }
3232 func rewriteValueMIPS64_OpMIPS64MOVDstore(v *Value) bool {
3233 v_2 := v.Args[2]
3234 v_1 := v.Args[1]
3235 v_0 := v.Args[0]
3236 b := v.Block
3237 config := b.Func.Config
3238
3239
3240 for {
3241 off := auxIntToInt32(v.AuxInt)
3242 sym := auxToSym(v.Aux)
3243 ptr := v_0
3244 if v_1.Op != OpMIPS64MOVVgpfp {
3245 break
3246 }
3247 val := v_1.Args[0]
3248 mem := v_2
3249 v.reset(OpMIPS64MOVVstore)
3250 v.AuxInt = int32ToAuxInt(off)
3251 v.Aux = symToAux(sym)
3252 v.AddArg3(ptr, val, mem)
3253 return true
3254 }
3255
3256
3257
3258 for {
3259 off1 := auxIntToInt32(v.AuxInt)
3260 sym := auxToSym(v.Aux)
3261 if v_0.Op != OpMIPS64ADDVconst {
3262 break
3263 }
3264 off2 := auxIntToInt64(v_0.AuxInt)
3265 ptr := v_0.Args[0]
3266 val := v_1
3267 mem := v_2
3268 if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
3269 break
3270 }
3271 v.reset(OpMIPS64MOVDstore)
3272 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
3273 v.Aux = symToAux(sym)
3274 v.AddArg3(ptr, val, mem)
3275 return true
3276 }
3277
3278
3279
3280 for {
3281 off1 := auxIntToInt32(v.AuxInt)
3282 sym1 := auxToSym(v.Aux)
3283 if v_0.Op != OpMIPS64MOVVaddr {
3284 break
3285 }
3286 off2 := auxIntToInt32(v_0.AuxInt)
3287 sym2 := auxToSym(v_0.Aux)
3288 ptr := v_0.Args[0]
3289 val := v_1
3290 mem := v_2
3291 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
3292 break
3293 }
3294 v.reset(OpMIPS64MOVDstore)
3295 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
3296 v.Aux = symToAux(mergeSym(sym1, sym2))
3297 v.AddArg3(ptr, val, mem)
3298 return true
3299 }
3300 return false
3301 }
3302 func rewriteValueMIPS64_OpMIPS64MOVFload(v *Value) bool {
3303 v_1 := v.Args[1]
3304 v_0 := v.Args[0]
3305 b := v.Block
3306 config := b.Func.Config
3307
3308
3309 for {
3310 off := auxIntToInt32(v.AuxInt)
3311 sym := auxToSym(v.Aux)
3312 ptr := v_0
3313 if v_1.Op != OpMIPS64MOVWstore || auxIntToInt32(v_1.AuxInt) != off || auxToSym(v_1.Aux) != sym {
3314 break
3315 }
3316 val := v_1.Args[1]
3317 if ptr != v_1.Args[0] {
3318 break
3319 }
3320 v.reset(OpMIPS64MOVWgpfp)
3321 v.AddArg(val)
3322 return true
3323 }
3324
3325
3326
3327 for {
3328 off1 := auxIntToInt32(v.AuxInt)
3329 sym := auxToSym(v.Aux)
3330 if v_0.Op != OpMIPS64ADDVconst {
3331 break
3332 }
3333 off2 := auxIntToInt64(v_0.AuxInt)
3334 ptr := v_0.Args[0]
3335 mem := v_1
3336 if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
3337 break
3338 }
3339 v.reset(OpMIPS64MOVFload)
3340 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
3341 v.Aux = symToAux(sym)
3342 v.AddArg2(ptr, mem)
3343 return true
3344 }
3345
3346
3347
3348 for {
3349 off1 := auxIntToInt32(v.AuxInt)
3350 sym1 := auxToSym(v.Aux)
3351 if v_0.Op != OpMIPS64MOVVaddr {
3352 break
3353 }
3354 off2 := auxIntToInt32(v_0.AuxInt)
3355 sym2 := auxToSym(v_0.Aux)
3356 ptr := v_0.Args[0]
3357 mem := v_1
3358 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
3359 break
3360 }
3361 v.reset(OpMIPS64MOVFload)
3362 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
3363 v.Aux = symToAux(mergeSym(sym1, sym2))
3364 v.AddArg2(ptr, mem)
3365 return true
3366 }
3367 return false
3368 }
3369 func rewriteValueMIPS64_OpMIPS64MOVFstore(v *Value) bool {
3370 v_2 := v.Args[2]
3371 v_1 := v.Args[1]
3372 v_0 := v.Args[0]
3373 b := v.Block
3374 config := b.Func.Config
3375
3376
3377 for {
3378 off := auxIntToInt32(v.AuxInt)
3379 sym := auxToSym(v.Aux)
3380 ptr := v_0
3381 if v_1.Op != OpMIPS64MOVWgpfp {
3382 break
3383 }
3384 val := v_1.Args[0]
3385 mem := v_2
3386 v.reset(OpMIPS64MOVWstore)
3387 v.AuxInt = int32ToAuxInt(off)
3388 v.Aux = symToAux(sym)
3389 v.AddArg3(ptr, val, mem)
3390 return true
3391 }
3392
3393
3394
3395 for {
3396 off1 := auxIntToInt32(v.AuxInt)
3397 sym := auxToSym(v.Aux)
3398 if v_0.Op != OpMIPS64ADDVconst {
3399 break
3400 }
3401 off2 := auxIntToInt64(v_0.AuxInt)
3402 ptr := v_0.Args[0]
3403 val := v_1
3404 mem := v_2
3405 if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
3406 break
3407 }
3408 v.reset(OpMIPS64MOVFstore)
3409 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
3410 v.Aux = symToAux(sym)
3411 v.AddArg3(ptr, val, mem)
3412 return true
3413 }
3414
3415
3416
3417 for {
3418 off1 := auxIntToInt32(v.AuxInt)
3419 sym1 := auxToSym(v.Aux)
3420 if v_0.Op != OpMIPS64MOVVaddr {
3421 break
3422 }
3423 off2 := auxIntToInt32(v_0.AuxInt)
3424 sym2 := auxToSym(v_0.Aux)
3425 ptr := v_0.Args[0]
3426 val := v_1
3427 mem := v_2
3428 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
3429 break
3430 }
3431 v.reset(OpMIPS64MOVFstore)
3432 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
3433 v.Aux = symToAux(mergeSym(sym1, sym2))
3434 v.AddArg3(ptr, val, mem)
3435 return true
3436 }
3437 return false
3438 }
3439 func rewriteValueMIPS64_OpMIPS64MOVHUload(v *Value) bool {
3440 v_1 := v.Args[1]
3441 v_0 := v.Args[0]
3442 b := v.Block
3443 config := b.Func.Config
3444
3445
3446
3447 for {
3448 off1 := auxIntToInt32(v.AuxInt)
3449 sym := auxToSym(v.Aux)
3450 if v_0.Op != OpMIPS64ADDVconst {
3451 break
3452 }
3453 off2 := auxIntToInt64(v_0.AuxInt)
3454 ptr := v_0.Args[0]
3455 mem := v_1
3456 if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
3457 break
3458 }
3459 v.reset(OpMIPS64MOVHUload)
3460 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
3461 v.Aux = symToAux(sym)
3462 v.AddArg2(ptr, mem)
3463 return true
3464 }
3465
3466
3467
3468 for {
3469 off1 := auxIntToInt32(v.AuxInt)
3470 sym1 := auxToSym(v.Aux)
3471 if v_0.Op != OpMIPS64MOVVaddr {
3472 break
3473 }
3474 off2 := auxIntToInt32(v_0.AuxInt)
3475 sym2 := auxToSym(v_0.Aux)
3476 ptr := v_0.Args[0]
3477 mem := v_1
3478 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
3479 break
3480 }
3481 v.reset(OpMIPS64MOVHUload)
3482 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
3483 v.Aux = symToAux(mergeSym(sym1, sym2))
3484 v.AddArg2(ptr, mem)
3485 return true
3486 }
3487 return false
3488 }
3489 func rewriteValueMIPS64_OpMIPS64MOVHUreg(v *Value) bool {
3490 v_0 := v.Args[0]
3491
3492
3493 for {
3494 x := v_0
3495 if x.Op != OpMIPS64MOVBUload {
3496 break
3497 }
3498 v.reset(OpMIPS64MOVVreg)
3499 v.AddArg(x)
3500 return true
3501 }
3502
3503
3504 for {
3505 x := v_0
3506 if x.Op != OpMIPS64MOVHUload {
3507 break
3508 }
3509 v.reset(OpMIPS64MOVVreg)
3510 v.AddArg(x)
3511 return true
3512 }
3513
3514
3515 for {
3516 x := v_0
3517 if x.Op != OpMIPS64MOVBUreg {
3518 break
3519 }
3520 v.reset(OpMIPS64MOVVreg)
3521 v.AddArg(x)
3522 return true
3523 }
3524
3525
3526 for {
3527 x := v_0
3528 if x.Op != OpMIPS64MOVHUreg {
3529 break
3530 }
3531 v.reset(OpMIPS64MOVVreg)
3532 v.AddArg(x)
3533 return true
3534 }
3535
3536
3537 for {
3538 if v_0.Op != OpMIPS64MOVVconst {
3539 break
3540 }
3541 c := auxIntToInt64(v_0.AuxInt)
3542 v.reset(OpMIPS64MOVVconst)
3543 v.AuxInt = int64ToAuxInt(int64(uint16(c)))
3544 return true
3545 }
3546 return false
3547 }
3548 func rewriteValueMIPS64_OpMIPS64MOVHload(v *Value) bool {
3549 v_1 := v.Args[1]
3550 v_0 := v.Args[0]
3551 b := v.Block
3552 config := b.Func.Config
3553
3554
3555
3556 for {
3557 off1 := auxIntToInt32(v.AuxInt)
3558 sym := auxToSym(v.Aux)
3559 if v_0.Op != OpMIPS64ADDVconst {
3560 break
3561 }
3562 off2 := auxIntToInt64(v_0.AuxInt)
3563 ptr := v_0.Args[0]
3564 mem := v_1
3565 if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
3566 break
3567 }
3568 v.reset(OpMIPS64MOVHload)
3569 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
3570 v.Aux = symToAux(sym)
3571 v.AddArg2(ptr, mem)
3572 return true
3573 }
3574
3575
3576
3577 for {
3578 off1 := auxIntToInt32(v.AuxInt)
3579 sym1 := auxToSym(v.Aux)
3580 if v_0.Op != OpMIPS64MOVVaddr {
3581 break
3582 }
3583 off2 := auxIntToInt32(v_0.AuxInt)
3584 sym2 := auxToSym(v_0.Aux)
3585 ptr := v_0.Args[0]
3586 mem := v_1
3587 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
3588 break
3589 }
3590 v.reset(OpMIPS64MOVHload)
3591 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
3592 v.Aux = symToAux(mergeSym(sym1, sym2))
3593 v.AddArg2(ptr, mem)
3594 return true
3595 }
3596
3597
3598
3599 for {
3600 off := auxIntToInt32(v.AuxInt)
3601 sym := auxToSym(v.Aux)
3602 if v_0.Op != OpSB || !(symIsRO(sym)) {
3603 break
3604 }
3605 v.reset(OpMIPS64MOVVconst)
3606 v.AuxInt = int64ToAuxInt(int64(read16(sym, int64(off), config.ctxt.Arch.ByteOrder)))
3607 return true
3608 }
3609 return false
3610 }
3611 func rewriteValueMIPS64_OpMIPS64MOVHreg(v *Value) bool {
3612 v_0 := v.Args[0]
3613
3614
3615 for {
3616 x := v_0
3617 if x.Op != OpMIPS64MOVBload {
3618 break
3619 }
3620 v.reset(OpMIPS64MOVVreg)
3621 v.AddArg(x)
3622 return true
3623 }
3624
3625
3626 for {
3627 x := v_0
3628 if x.Op != OpMIPS64MOVBUload {
3629 break
3630 }
3631 v.reset(OpMIPS64MOVVreg)
3632 v.AddArg(x)
3633 return true
3634 }
3635
3636
3637 for {
3638 x := v_0
3639 if x.Op != OpMIPS64MOVHload {
3640 break
3641 }
3642 v.reset(OpMIPS64MOVVreg)
3643 v.AddArg(x)
3644 return true
3645 }
3646
3647
3648 for {
3649 x := v_0
3650 if x.Op != OpMIPS64MOVBreg {
3651 break
3652 }
3653 v.reset(OpMIPS64MOVVreg)
3654 v.AddArg(x)
3655 return true
3656 }
3657
3658
3659 for {
3660 x := v_0
3661 if x.Op != OpMIPS64MOVBUreg {
3662 break
3663 }
3664 v.reset(OpMIPS64MOVVreg)
3665 v.AddArg(x)
3666 return true
3667 }
3668
3669
3670 for {
3671 x := v_0
3672 if x.Op != OpMIPS64MOVHreg {
3673 break
3674 }
3675 v.reset(OpMIPS64MOVVreg)
3676 v.AddArg(x)
3677 return true
3678 }
3679
3680
3681 for {
3682 if v_0.Op != OpMIPS64MOVVconst {
3683 break
3684 }
3685 c := auxIntToInt64(v_0.AuxInt)
3686 v.reset(OpMIPS64MOVVconst)
3687 v.AuxInt = int64ToAuxInt(int64(int16(c)))
3688 return true
3689 }
3690 return false
3691 }
3692 func rewriteValueMIPS64_OpMIPS64MOVHstore(v *Value) bool {
3693 v_2 := v.Args[2]
3694 v_1 := v.Args[1]
3695 v_0 := v.Args[0]
3696 b := v.Block
3697 config := b.Func.Config
3698
3699
3700
3701 for {
3702 off1 := auxIntToInt32(v.AuxInt)
3703 sym := auxToSym(v.Aux)
3704 if v_0.Op != OpMIPS64ADDVconst {
3705 break
3706 }
3707 off2 := auxIntToInt64(v_0.AuxInt)
3708 ptr := v_0.Args[0]
3709 val := v_1
3710 mem := v_2
3711 if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
3712 break
3713 }
3714 v.reset(OpMIPS64MOVHstore)
3715 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
3716 v.Aux = symToAux(sym)
3717 v.AddArg3(ptr, val, mem)
3718 return true
3719 }
3720
3721
3722
3723 for {
3724 off1 := auxIntToInt32(v.AuxInt)
3725 sym1 := auxToSym(v.Aux)
3726 if v_0.Op != OpMIPS64MOVVaddr {
3727 break
3728 }
3729 off2 := auxIntToInt32(v_0.AuxInt)
3730 sym2 := auxToSym(v_0.Aux)
3731 ptr := v_0.Args[0]
3732 val := v_1
3733 mem := v_2
3734 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
3735 break
3736 }
3737 v.reset(OpMIPS64MOVHstore)
3738 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
3739 v.Aux = symToAux(mergeSym(sym1, sym2))
3740 v.AddArg3(ptr, val, mem)
3741 return true
3742 }
3743
3744
3745 for {
3746 off := auxIntToInt32(v.AuxInt)
3747 sym := auxToSym(v.Aux)
3748 ptr := v_0
3749 if v_1.Op != OpMIPS64MOVVconst || auxIntToInt64(v_1.AuxInt) != 0 {
3750 break
3751 }
3752 mem := v_2
3753 v.reset(OpMIPS64MOVHstorezero)
3754 v.AuxInt = int32ToAuxInt(off)
3755 v.Aux = symToAux(sym)
3756 v.AddArg2(ptr, mem)
3757 return true
3758 }
3759
3760
3761 for {
3762 off := auxIntToInt32(v.AuxInt)
3763 sym := auxToSym(v.Aux)
3764 ptr := v_0
3765 if v_1.Op != OpMIPS64MOVHreg {
3766 break
3767 }
3768 x := v_1.Args[0]
3769 mem := v_2
3770 v.reset(OpMIPS64MOVHstore)
3771 v.AuxInt = int32ToAuxInt(off)
3772 v.Aux = symToAux(sym)
3773 v.AddArg3(ptr, x, mem)
3774 return true
3775 }
3776
3777
3778 for {
3779 off := auxIntToInt32(v.AuxInt)
3780 sym := auxToSym(v.Aux)
3781 ptr := v_0
3782 if v_1.Op != OpMIPS64MOVHUreg {
3783 break
3784 }
3785 x := v_1.Args[0]
3786 mem := v_2
3787 v.reset(OpMIPS64MOVHstore)
3788 v.AuxInt = int32ToAuxInt(off)
3789 v.Aux = symToAux(sym)
3790 v.AddArg3(ptr, x, mem)
3791 return true
3792 }
3793
3794
3795 for {
3796 off := auxIntToInt32(v.AuxInt)
3797 sym := auxToSym(v.Aux)
3798 ptr := v_0
3799 if v_1.Op != OpMIPS64MOVWreg {
3800 break
3801 }
3802 x := v_1.Args[0]
3803 mem := v_2
3804 v.reset(OpMIPS64MOVHstore)
3805 v.AuxInt = int32ToAuxInt(off)
3806 v.Aux = symToAux(sym)
3807 v.AddArg3(ptr, x, mem)
3808 return true
3809 }
3810
3811
3812 for {
3813 off := auxIntToInt32(v.AuxInt)
3814 sym := auxToSym(v.Aux)
3815 ptr := v_0
3816 if v_1.Op != OpMIPS64MOVWUreg {
3817 break
3818 }
3819 x := v_1.Args[0]
3820 mem := v_2
3821 v.reset(OpMIPS64MOVHstore)
3822 v.AuxInt = int32ToAuxInt(off)
3823 v.Aux = symToAux(sym)
3824 v.AddArg3(ptr, x, mem)
3825 return true
3826 }
3827 return false
3828 }
3829 func rewriteValueMIPS64_OpMIPS64MOVHstorezero(v *Value) bool {
3830 v_1 := v.Args[1]
3831 v_0 := v.Args[0]
3832 b := v.Block
3833 config := b.Func.Config
3834
3835
3836
3837 for {
3838 off1 := auxIntToInt32(v.AuxInt)
3839 sym := auxToSym(v.Aux)
3840 if v_0.Op != OpMIPS64ADDVconst {
3841 break
3842 }
3843 off2 := auxIntToInt64(v_0.AuxInt)
3844 ptr := v_0.Args[0]
3845 mem := v_1
3846 if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
3847 break
3848 }
3849 v.reset(OpMIPS64MOVHstorezero)
3850 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
3851 v.Aux = symToAux(sym)
3852 v.AddArg2(ptr, mem)
3853 return true
3854 }
3855
3856
3857
3858 for {
3859 off1 := auxIntToInt32(v.AuxInt)
3860 sym1 := auxToSym(v.Aux)
3861 if v_0.Op != OpMIPS64MOVVaddr {
3862 break
3863 }
3864 off2 := auxIntToInt32(v_0.AuxInt)
3865 sym2 := auxToSym(v_0.Aux)
3866 ptr := v_0.Args[0]
3867 mem := v_1
3868 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
3869 break
3870 }
3871 v.reset(OpMIPS64MOVHstorezero)
3872 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
3873 v.Aux = symToAux(mergeSym(sym1, sym2))
3874 v.AddArg2(ptr, mem)
3875 return true
3876 }
3877 return false
3878 }
3879 func rewriteValueMIPS64_OpMIPS64MOVVload(v *Value) bool {
3880 v_1 := v.Args[1]
3881 v_0 := v.Args[0]
3882 b := v.Block
3883 config := b.Func.Config
3884
3885
3886 for {
3887 off := auxIntToInt32(v.AuxInt)
3888 sym := auxToSym(v.Aux)
3889 ptr := v_0
3890 if v_1.Op != OpMIPS64MOVDstore || auxIntToInt32(v_1.AuxInt) != off || auxToSym(v_1.Aux) != sym {
3891 break
3892 }
3893 val := v_1.Args[1]
3894 if ptr != v_1.Args[0] {
3895 break
3896 }
3897 v.reset(OpMIPS64MOVVfpgp)
3898 v.AddArg(val)
3899 return true
3900 }
3901
3902
3903
3904 for {
3905 off1 := auxIntToInt32(v.AuxInt)
3906 sym := auxToSym(v.Aux)
3907 if v_0.Op != OpMIPS64ADDVconst {
3908 break
3909 }
3910 off2 := auxIntToInt64(v_0.AuxInt)
3911 ptr := v_0.Args[0]
3912 mem := v_1
3913 if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
3914 break
3915 }
3916 v.reset(OpMIPS64MOVVload)
3917 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
3918 v.Aux = symToAux(sym)
3919 v.AddArg2(ptr, mem)
3920 return true
3921 }
3922
3923
3924
3925 for {
3926 off1 := auxIntToInt32(v.AuxInt)
3927 sym1 := auxToSym(v.Aux)
3928 if v_0.Op != OpMIPS64MOVVaddr {
3929 break
3930 }
3931 off2 := auxIntToInt32(v_0.AuxInt)
3932 sym2 := auxToSym(v_0.Aux)
3933 ptr := v_0.Args[0]
3934 mem := v_1
3935 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
3936 break
3937 }
3938 v.reset(OpMIPS64MOVVload)
3939 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
3940 v.Aux = symToAux(mergeSym(sym1, sym2))
3941 v.AddArg2(ptr, mem)
3942 return true
3943 }
3944
3945
3946
3947 for {
3948 off := auxIntToInt32(v.AuxInt)
3949 sym := auxToSym(v.Aux)
3950 if v_0.Op != OpSB || !(symIsRO(sym)) {
3951 break
3952 }
3953 v.reset(OpMIPS64MOVVconst)
3954 v.AuxInt = int64ToAuxInt(int64(read64(sym, int64(off), config.ctxt.Arch.ByteOrder)))
3955 return true
3956 }
3957 return false
3958 }
3959 func rewriteValueMIPS64_OpMIPS64MOVVnop(v *Value) bool {
3960 v_0 := v.Args[0]
3961
3962
3963 for {
3964 if v_0.Op != OpMIPS64MOVVconst {
3965 break
3966 }
3967 c := auxIntToInt64(v_0.AuxInt)
3968 v.reset(OpMIPS64MOVVconst)
3969 v.AuxInt = int64ToAuxInt(c)
3970 return true
3971 }
3972 return false
3973 }
3974 func rewriteValueMIPS64_OpMIPS64MOVVreg(v *Value) bool {
3975 v_0 := v.Args[0]
3976
3977
3978
3979 for {
3980 x := v_0
3981 if !(x.Uses == 1) {
3982 break
3983 }
3984 v.reset(OpMIPS64MOVVnop)
3985 v.AddArg(x)
3986 return true
3987 }
3988
3989
3990 for {
3991 if v_0.Op != OpMIPS64MOVVconst {
3992 break
3993 }
3994 c := auxIntToInt64(v_0.AuxInt)
3995 v.reset(OpMIPS64MOVVconst)
3996 v.AuxInt = int64ToAuxInt(c)
3997 return true
3998 }
3999 return false
4000 }
4001 func rewriteValueMIPS64_OpMIPS64MOVVstore(v *Value) bool {
4002 v_2 := v.Args[2]
4003 v_1 := v.Args[1]
4004 v_0 := v.Args[0]
4005 b := v.Block
4006 config := b.Func.Config
4007
4008
4009 for {
4010 off := auxIntToInt32(v.AuxInt)
4011 sym := auxToSym(v.Aux)
4012 ptr := v_0
4013 if v_1.Op != OpMIPS64MOVVfpgp {
4014 break
4015 }
4016 val := v_1.Args[0]
4017 mem := v_2
4018 v.reset(OpMIPS64MOVDstore)
4019 v.AuxInt = int32ToAuxInt(off)
4020 v.Aux = symToAux(sym)
4021 v.AddArg3(ptr, val, mem)
4022 return true
4023 }
4024
4025
4026
4027 for {
4028 off1 := auxIntToInt32(v.AuxInt)
4029 sym := auxToSym(v.Aux)
4030 if v_0.Op != OpMIPS64ADDVconst {
4031 break
4032 }
4033 off2 := auxIntToInt64(v_0.AuxInt)
4034 ptr := v_0.Args[0]
4035 val := v_1
4036 mem := v_2
4037 if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
4038 break
4039 }
4040 v.reset(OpMIPS64MOVVstore)
4041 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
4042 v.Aux = symToAux(sym)
4043 v.AddArg3(ptr, val, mem)
4044 return true
4045 }
4046
4047
4048
4049 for {
4050 off1 := auxIntToInt32(v.AuxInt)
4051 sym1 := auxToSym(v.Aux)
4052 if v_0.Op != OpMIPS64MOVVaddr {
4053 break
4054 }
4055 off2 := auxIntToInt32(v_0.AuxInt)
4056 sym2 := auxToSym(v_0.Aux)
4057 ptr := v_0.Args[0]
4058 val := v_1
4059 mem := v_2
4060 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
4061 break
4062 }
4063 v.reset(OpMIPS64MOVVstore)
4064 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
4065 v.Aux = symToAux(mergeSym(sym1, sym2))
4066 v.AddArg3(ptr, val, mem)
4067 return true
4068 }
4069
4070
4071 for {
4072 off := auxIntToInt32(v.AuxInt)
4073 sym := auxToSym(v.Aux)
4074 ptr := v_0
4075 if v_1.Op != OpMIPS64MOVVconst || auxIntToInt64(v_1.AuxInt) != 0 {
4076 break
4077 }
4078 mem := v_2
4079 v.reset(OpMIPS64MOVVstorezero)
4080 v.AuxInt = int32ToAuxInt(off)
4081 v.Aux = symToAux(sym)
4082 v.AddArg2(ptr, mem)
4083 return true
4084 }
4085 return false
4086 }
4087 func rewriteValueMIPS64_OpMIPS64MOVVstorezero(v *Value) bool {
4088 v_1 := v.Args[1]
4089 v_0 := v.Args[0]
4090 b := v.Block
4091 config := b.Func.Config
4092
4093
4094
4095 for {
4096 off1 := auxIntToInt32(v.AuxInt)
4097 sym := auxToSym(v.Aux)
4098 if v_0.Op != OpMIPS64ADDVconst {
4099 break
4100 }
4101 off2 := auxIntToInt64(v_0.AuxInt)
4102 ptr := v_0.Args[0]
4103 mem := v_1
4104 if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
4105 break
4106 }
4107 v.reset(OpMIPS64MOVVstorezero)
4108 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
4109 v.Aux = symToAux(sym)
4110 v.AddArg2(ptr, mem)
4111 return true
4112 }
4113
4114
4115
4116 for {
4117 off1 := auxIntToInt32(v.AuxInt)
4118 sym1 := auxToSym(v.Aux)
4119 if v_0.Op != OpMIPS64MOVVaddr {
4120 break
4121 }
4122 off2 := auxIntToInt32(v_0.AuxInt)
4123 sym2 := auxToSym(v_0.Aux)
4124 ptr := v_0.Args[0]
4125 mem := v_1
4126 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
4127 break
4128 }
4129 v.reset(OpMIPS64MOVVstorezero)
4130 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
4131 v.Aux = symToAux(mergeSym(sym1, sym2))
4132 v.AddArg2(ptr, mem)
4133 return true
4134 }
4135 return false
4136 }
4137 func rewriteValueMIPS64_OpMIPS64MOVWUload(v *Value) bool {
4138 v_1 := v.Args[1]
4139 v_0 := v.Args[0]
4140 b := v.Block
4141 config := b.Func.Config
4142 typ := &b.Func.Config.Types
4143
4144
4145 for {
4146 off := auxIntToInt32(v.AuxInt)
4147 sym := auxToSym(v.Aux)
4148 ptr := v_0
4149 if v_1.Op != OpMIPS64MOVFstore || auxIntToInt32(v_1.AuxInt) != off || auxToSym(v_1.Aux) != sym {
4150 break
4151 }
4152 val := v_1.Args[1]
4153 if ptr != v_1.Args[0] {
4154 break
4155 }
4156 v.reset(OpZeroExt32to64)
4157 v0 := b.NewValue0(v_1.Pos, OpMIPS64MOVWfpgp, typ.Float32)
4158 v0.AddArg(val)
4159 v.AddArg(v0)
4160 return true
4161 }
4162
4163
4164
4165 for {
4166 off1 := auxIntToInt32(v.AuxInt)
4167 sym := auxToSym(v.Aux)
4168 if v_0.Op != OpMIPS64ADDVconst {
4169 break
4170 }
4171 off2 := auxIntToInt64(v_0.AuxInt)
4172 ptr := v_0.Args[0]
4173 mem := v_1
4174 if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
4175 break
4176 }
4177 v.reset(OpMIPS64MOVWUload)
4178 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
4179 v.Aux = symToAux(sym)
4180 v.AddArg2(ptr, mem)
4181 return true
4182 }
4183
4184
4185
4186 for {
4187 off1 := auxIntToInt32(v.AuxInt)
4188 sym1 := auxToSym(v.Aux)
4189 if v_0.Op != OpMIPS64MOVVaddr {
4190 break
4191 }
4192 off2 := auxIntToInt32(v_0.AuxInt)
4193 sym2 := auxToSym(v_0.Aux)
4194 ptr := v_0.Args[0]
4195 mem := v_1
4196 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
4197 break
4198 }
4199 v.reset(OpMIPS64MOVWUload)
4200 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
4201 v.Aux = symToAux(mergeSym(sym1, sym2))
4202 v.AddArg2(ptr, mem)
4203 return true
4204 }
4205 return false
4206 }
4207 func rewriteValueMIPS64_OpMIPS64MOVWUreg(v *Value) bool {
4208 v_0 := v.Args[0]
4209
4210
4211 for {
4212 x := v_0
4213 if x.Op != OpMIPS64MOVBUload {
4214 break
4215 }
4216 v.reset(OpMIPS64MOVVreg)
4217 v.AddArg(x)
4218 return true
4219 }
4220
4221
4222 for {
4223 x := v_0
4224 if x.Op != OpMIPS64MOVHUload {
4225 break
4226 }
4227 v.reset(OpMIPS64MOVVreg)
4228 v.AddArg(x)
4229 return true
4230 }
4231
4232
4233 for {
4234 x := v_0
4235 if x.Op != OpMIPS64MOVWUload {
4236 break
4237 }
4238 v.reset(OpMIPS64MOVVreg)
4239 v.AddArg(x)
4240 return true
4241 }
4242
4243
4244 for {
4245 x := v_0
4246 if x.Op != OpMIPS64MOVBUreg {
4247 break
4248 }
4249 v.reset(OpMIPS64MOVVreg)
4250 v.AddArg(x)
4251 return true
4252 }
4253
4254
4255 for {
4256 x := v_0
4257 if x.Op != OpMIPS64MOVHUreg {
4258 break
4259 }
4260 v.reset(OpMIPS64MOVVreg)
4261 v.AddArg(x)
4262 return true
4263 }
4264
4265
4266 for {
4267 x := v_0
4268 if x.Op != OpMIPS64MOVWUreg {
4269 break
4270 }
4271 v.reset(OpMIPS64MOVVreg)
4272 v.AddArg(x)
4273 return true
4274 }
4275
4276
4277 for {
4278 if v_0.Op != OpMIPS64MOVVconst {
4279 break
4280 }
4281 c := auxIntToInt64(v_0.AuxInt)
4282 v.reset(OpMIPS64MOVVconst)
4283 v.AuxInt = int64ToAuxInt(int64(uint32(c)))
4284 return true
4285 }
4286 return false
4287 }
4288 func rewriteValueMIPS64_OpMIPS64MOVWload(v *Value) bool {
4289 v_1 := v.Args[1]
4290 v_0 := v.Args[0]
4291 b := v.Block
4292 config := b.Func.Config
4293
4294
4295
4296 for {
4297 off1 := auxIntToInt32(v.AuxInt)
4298 sym := auxToSym(v.Aux)
4299 if v_0.Op != OpMIPS64ADDVconst {
4300 break
4301 }
4302 off2 := auxIntToInt64(v_0.AuxInt)
4303 ptr := v_0.Args[0]
4304 mem := v_1
4305 if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
4306 break
4307 }
4308 v.reset(OpMIPS64MOVWload)
4309 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
4310 v.Aux = symToAux(sym)
4311 v.AddArg2(ptr, mem)
4312 return true
4313 }
4314
4315
4316
4317 for {
4318 off1 := auxIntToInt32(v.AuxInt)
4319 sym1 := auxToSym(v.Aux)
4320 if v_0.Op != OpMIPS64MOVVaddr {
4321 break
4322 }
4323 off2 := auxIntToInt32(v_0.AuxInt)
4324 sym2 := auxToSym(v_0.Aux)
4325 ptr := v_0.Args[0]
4326 mem := v_1
4327 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
4328 break
4329 }
4330 v.reset(OpMIPS64MOVWload)
4331 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
4332 v.Aux = symToAux(mergeSym(sym1, sym2))
4333 v.AddArg2(ptr, mem)
4334 return true
4335 }
4336
4337
4338
4339 for {
4340 off := auxIntToInt32(v.AuxInt)
4341 sym := auxToSym(v.Aux)
4342 if v_0.Op != OpSB || !(symIsRO(sym)) {
4343 break
4344 }
4345 v.reset(OpMIPS64MOVVconst)
4346 v.AuxInt = int64ToAuxInt(int64(read32(sym, int64(off), config.ctxt.Arch.ByteOrder)))
4347 return true
4348 }
4349 return false
4350 }
4351 func rewriteValueMIPS64_OpMIPS64MOVWreg(v *Value) bool {
4352 v_0 := v.Args[0]
4353
4354
4355 for {
4356 x := v_0
4357 if x.Op != OpMIPS64MOVBload {
4358 break
4359 }
4360 v.reset(OpMIPS64MOVVreg)
4361 v.AddArg(x)
4362 return true
4363 }
4364
4365
4366 for {
4367 x := v_0
4368 if x.Op != OpMIPS64MOVBUload {
4369 break
4370 }
4371 v.reset(OpMIPS64MOVVreg)
4372 v.AddArg(x)
4373 return true
4374 }
4375
4376
4377 for {
4378 x := v_0
4379 if x.Op != OpMIPS64MOVHload {
4380 break
4381 }
4382 v.reset(OpMIPS64MOVVreg)
4383 v.AddArg(x)
4384 return true
4385 }
4386
4387
4388 for {
4389 x := v_0
4390 if x.Op != OpMIPS64MOVHUload {
4391 break
4392 }
4393 v.reset(OpMIPS64MOVVreg)
4394 v.AddArg(x)
4395 return true
4396 }
4397
4398
4399 for {
4400 x := v_0
4401 if x.Op != OpMIPS64MOVWload {
4402 break
4403 }
4404 v.reset(OpMIPS64MOVVreg)
4405 v.AddArg(x)
4406 return true
4407 }
4408
4409
4410 for {
4411 x := v_0
4412 if x.Op != OpMIPS64MOVBreg {
4413 break
4414 }
4415 v.reset(OpMIPS64MOVVreg)
4416 v.AddArg(x)
4417 return true
4418 }
4419
4420
4421 for {
4422 x := v_0
4423 if x.Op != OpMIPS64MOVBUreg {
4424 break
4425 }
4426 v.reset(OpMIPS64MOVVreg)
4427 v.AddArg(x)
4428 return true
4429 }
4430
4431
4432 for {
4433 x := v_0
4434 if x.Op != OpMIPS64MOVHreg {
4435 break
4436 }
4437 v.reset(OpMIPS64MOVVreg)
4438 v.AddArg(x)
4439 return true
4440 }
4441
4442
4443 for {
4444 x := v_0
4445 if x.Op != OpMIPS64MOVWreg {
4446 break
4447 }
4448 v.reset(OpMIPS64MOVVreg)
4449 v.AddArg(x)
4450 return true
4451 }
4452
4453
4454 for {
4455 if v_0.Op != OpMIPS64MOVVconst {
4456 break
4457 }
4458 c := auxIntToInt64(v_0.AuxInt)
4459 v.reset(OpMIPS64MOVVconst)
4460 v.AuxInt = int64ToAuxInt(int64(int32(c)))
4461 return true
4462 }
4463 return false
4464 }
4465 func rewriteValueMIPS64_OpMIPS64MOVWstore(v *Value) bool {
4466 v_2 := v.Args[2]
4467 v_1 := v.Args[1]
4468 v_0 := v.Args[0]
4469 b := v.Block
4470 config := b.Func.Config
4471
4472
4473 for {
4474 off := auxIntToInt32(v.AuxInt)
4475 sym := auxToSym(v.Aux)
4476 ptr := v_0
4477 if v_1.Op != OpMIPS64MOVWfpgp {
4478 break
4479 }
4480 val := v_1.Args[0]
4481 mem := v_2
4482 v.reset(OpMIPS64MOVFstore)
4483 v.AuxInt = int32ToAuxInt(off)
4484 v.Aux = symToAux(sym)
4485 v.AddArg3(ptr, val, mem)
4486 return true
4487 }
4488
4489
4490
4491 for {
4492 off1 := auxIntToInt32(v.AuxInt)
4493 sym := auxToSym(v.Aux)
4494 if v_0.Op != OpMIPS64ADDVconst {
4495 break
4496 }
4497 off2 := auxIntToInt64(v_0.AuxInt)
4498 ptr := v_0.Args[0]
4499 val := v_1
4500 mem := v_2
4501 if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
4502 break
4503 }
4504 v.reset(OpMIPS64MOVWstore)
4505 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
4506 v.Aux = symToAux(sym)
4507 v.AddArg3(ptr, val, mem)
4508 return true
4509 }
4510
4511
4512
4513 for {
4514 off1 := auxIntToInt32(v.AuxInt)
4515 sym1 := auxToSym(v.Aux)
4516 if v_0.Op != OpMIPS64MOVVaddr {
4517 break
4518 }
4519 off2 := auxIntToInt32(v_0.AuxInt)
4520 sym2 := auxToSym(v_0.Aux)
4521 ptr := v_0.Args[0]
4522 val := v_1
4523 mem := v_2
4524 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
4525 break
4526 }
4527 v.reset(OpMIPS64MOVWstore)
4528 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
4529 v.Aux = symToAux(mergeSym(sym1, sym2))
4530 v.AddArg3(ptr, val, mem)
4531 return true
4532 }
4533
4534
4535 for {
4536 off := auxIntToInt32(v.AuxInt)
4537 sym := auxToSym(v.Aux)
4538 ptr := v_0
4539 if v_1.Op != OpMIPS64MOVVconst || auxIntToInt64(v_1.AuxInt) != 0 {
4540 break
4541 }
4542 mem := v_2
4543 v.reset(OpMIPS64MOVWstorezero)
4544 v.AuxInt = int32ToAuxInt(off)
4545 v.Aux = symToAux(sym)
4546 v.AddArg2(ptr, mem)
4547 return true
4548 }
4549
4550
4551 for {
4552 off := auxIntToInt32(v.AuxInt)
4553 sym := auxToSym(v.Aux)
4554 ptr := v_0
4555 if v_1.Op != OpMIPS64MOVWreg {
4556 break
4557 }
4558 x := v_1.Args[0]
4559 mem := v_2
4560 v.reset(OpMIPS64MOVWstore)
4561 v.AuxInt = int32ToAuxInt(off)
4562 v.Aux = symToAux(sym)
4563 v.AddArg3(ptr, x, mem)
4564 return true
4565 }
4566
4567
4568 for {
4569 off := auxIntToInt32(v.AuxInt)
4570 sym := auxToSym(v.Aux)
4571 ptr := v_0
4572 if v_1.Op != OpMIPS64MOVWUreg {
4573 break
4574 }
4575 x := v_1.Args[0]
4576 mem := v_2
4577 v.reset(OpMIPS64MOVWstore)
4578 v.AuxInt = int32ToAuxInt(off)
4579 v.Aux = symToAux(sym)
4580 v.AddArg3(ptr, x, mem)
4581 return true
4582 }
4583 return false
4584 }
4585 func rewriteValueMIPS64_OpMIPS64MOVWstorezero(v *Value) bool {
4586 v_1 := v.Args[1]
4587 v_0 := v.Args[0]
4588 b := v.Block
4589 config := b.Func.Config
4590
4591
4592
4593 for {
4594 off1 := auxIntToInt32(v.AuxInt)
4595 sym := auxToSym(v.Aux)
4596 if v_0.Op != OpMIPS64ADDVconst {
4597 break
4598 }
4599 off2 := auxIntToInt64(v_0.AuxInt)
4600 ptr := v_0.Args[0]
4601 mem := v_1
4602 if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
4603 break
4604 }
4605 v.reset(OpMIPS64MOVWstorezero)
4606 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
4607 v.Aux = symToAux(sym)
4608 v.AddArg2(ptr, mem)
4609 return true
4610 }
4611
4612
4613
4614 for {
4615 off1 := auxIntToInt32(v.AuxInt)
4616 sym1 := auxToSym(v.Aux)
4617 if v_0.Op != OpMIPS64MOVVaddr {
4618 break
4619 }
4620 off2 := auxIntToInt32(v_0.AuxInt)
4621 sym2 := auxToSym(v_0.Aux)
4622 ptr := v_0.Args[0]
4623 mem := v_1
4624 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
4625 break
4626 }
4627 v.reset(OpMIPS64MOVWstorezero)
4628 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
4629 v.Aux = symToAux(mergeSym(sym1, sym2))
4630 v.AddArg2(ptr, mem)
4631 return true
4632 }
4633 return false
4634 }
4635 func rewriteValueMIPS64_OpMIPS64NEGV(v *Value) bool {
4636 v_0 := v.Args[0]
4637
4638
4639 for {
4640 if v_0.Op != OpMIPS64MOVVconst {
4641 break
4642 }
4643 c := auxIntToInt64(v_0.AuxInt)
4644 v.reset(OpMIPS64MOVVconst)
4645 v.AuxInt = int64ToAuxInt(-c)
4646 return true
4647 }
4648 return false
4649 }
4650 func rewriteValueMIPS64_OpMIPS64NOR(v *Value) bool {
4651 v_1 := v.Args[1]
4652 v_0 := v.Args[0]
4653
4654
4655
4656 for {
4657 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
4658 x := v_0
4659 if v_1.Op != OpMIPS64MOVVconst {
4660 continue
4661 }
4662 c := auxIntToInt64(v_1.AuxInt)
4663 if !(is32Bit(c)) {
4664 continue
4665 }
4666 v.reset(OpMIPS64NORconst)
4667 v.AuxInt = int64ToAuxInt(c)
4668 v.AddArg(x)
4669 return true
4670 }
4671 break
4672 }
4673 return false
4674 }
4675 func rewriteValueMIPS64_OpMIPS64NORconst(v *Value) bool {
4676 v_0 := v.Args[0]
4677
4678
4679 for {
4680 c := auxIntToInt64(v.AuxInt)
4681 if v_0.Op != OpMIPS64MOVVconst {
4682 break
4683 }
4684 d := auxIntToInt64(v_0.AuxInt)
4685 v.reset(OpMIPS64MOVVconst)
4686 v.AuxInt = int64ToAuxInt(^(c | d))
4687 return true
4688 }
4689 return false
4690 }
4691 func rewriteValueMIPS64_OpMIPS64OR(v *Value) bool {
4692 v_1 := v.Args[1]
4693 v_0 := v.Args[0]
4694
4695
4696
4697 for {
4698 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
4699 x := v_0
4700 if v_1.Op != OpMIPS64MOVVconst {
4701 continue
4702 }
4703 c := auxIntToInt64(v_1.AuxInt)
4704 if !(is32Bit(c)) {
4705 continue
4706 }
4707 v.reset(OpMIPS64ORconst)
4708 v.AuxInt = int64ToAuxInt(c)
4709 v.AddArg(x)
4710 return true
4711 }
4712 break
4713 }
4714
4715
4716 for {
4717 x := v_0
4718 if x != v_1 {
4719 break
4720 }
4721 v.copyOf(x)
4722 return true
4723 }
4724 return false
4725 }
4726 func rewriteValueMIPS64_OpMIPS64ORconst(v *Value) bool {
4727 v_0 := v.Args[0]
4728
4729
4730 for {
4731 if auxIntToInt64(v.AuxInt) != 0 {
4732 break
4733 }
4734 x := v_0
4735 v.copyOf(x)
4736 return true
4737 }
4738
4739
4740 for {
4741 if auxIntToInt64(v.AuxInt) != -1 {
4742 break
4743 }
4744 v.reset(OpMIPS64MOVVconst)
4745 v.AuxInt = int64ToAuxInt(-1)
4746 return true
4747 }
4748
4749
4750 for {
4751 c := auxIntToInt64(v.AuxInt)
4752 if v_0.Op != OpMIPS64MOVVconst {
4753 break
4754 }
4755 d := auxIntToInt64(v_0.AuxInt)
4756 v.reset(OpMIPS64MOVVconst)
4757 v.AuxInt = int64ToAuxInt(c | d)
4758 return true
4759 }
4760
4761
4762
4763 for {
4764 c := auxIntToInt64(v.AuxInt)
4765 if v_0.Op != OpMIPS64ORconst {
4766 break
4767 }
4768 d := auxIntToInt64(v_0.AuxInt)
4769 x := v_0.Args[0]
4770 if !(is32Bit(c | d)) {
4771 break
4772 }
4773 v.reset(OpMIPS64ORconst)
4774 v.AuxInt = int64ToAuxInt(c | d)
4775 v.AddArg(x)
4776 return true
4777 }
4778 return false
4779 }
4780 func rewriteValueMIPS64_OpMIPS64SGT(v *Value) bool {
4781 v_1 := v.Args[1]
4782 v_0 := v.Args[0]
4783
4784
4785
4786 for {
4787 if v_0.Op != OpMIPS64MOVVconst {
4788 break
4789 }
4790 c := auxIntToInt64(v_0.AuxInt)
4791 x := v_1
4792 if !(is32Bit(c)) {
4793 break
4794 }
4795 v.reset(OpMIPS64SGTconst)
4796 v.AuxInt = int64ToAuxInt(c)
4797 v.AddArg(x)
4798 return true
4799 }
4800
4801
4802 for {
4803 x := v_0
4804 if x != v_1 {
4805 break
4806 }
4807 v.reset(OpMIPS64MOVVconst)
4808 v.AuxInt = int64ToAuxInt(0)
4809 return true
4810 }
4811 return false
4812 }
4813 func rewriteValueMIPS64_OpMIPS64SGTU(v *Value) bool {
4814 v_1 := v.Args[1]
4815 v_0 := v.Args[0]
4816
4817
4818
4819 for {
4820 if v_0.Op != OpMIPS64MOVVconst {
4821 break
4822 }
4823 c := auxIntToInt64(v_0.AuxInt)
4824 x := v_1
4825 if !(is32Bit(c)) {
4826 break
4827 }
4828 v.reset(OpMIPS64SGTUconst)
4829 v.AuxInt = int64ToAuxInt(c)
4830 v.AddArg(x)
4831 return true
4832 }
4833
4834
4835 for {
4836 x := v_0
4837 if x != v_1 {
4838 break
4839 }
4840 v.reset(OpMIPS64MOVVconst)
4841 v.AuxInt = int64ToAuxInt(0)
4842 return true
4843 }
4844 return false
4845 }
4846 func rewriteValueMIPS64_OpMIPS64SGTUconst(v *Value) bool {
4847 v_0 := v.Args[0]
4848
4849
4850
4851 for {
4852 c := auxIntToInt64(v.AuxInt)
4853 if v_0.Op != OpMIPS64MOVVconst {
4854 break
4855 }
4856 d := auxIntToInt64(v_0.AuxInt)
4857 if !(uint64(c) > uint64(d)) {
4858 break
4859 }
4860 v.reset(OpMIPS64MOVVconst)
4861 v.AuxInt = int64ToAuxInt(1)
4862 return true
4863 }
4864
4865
4866
4867 for {
4868 c := auxIntToInt64(v.AuxInt)
4869 if v_0.Op != OpMIPS64MOVVconst {
4870 break
4871 }
4872 d := auxIntToInt64(v_0.AuxInt)
4873 if !(uint64(c) <= uint64(d)) {
4874 break
4875 }
4876 v.reset(OpMIPS64MOVVconst)
4877 v.AuxInt = int64ToAuxInt(0)
4878 return true
4879 }
4880
4881
4882
4883 for {
4884 c := auxIntToInt64(v.AuxInt)
4885 if v_0.Op != OpMIPS64MOVBUreg || !(0xff < uint64(c)) {
4886 break
4887 }
4888 v.reset(OpMIPS64MOVVconst)
4889 v.AuxInt = int64ToAuxInt(1)
4890 return true
4891 }
4892
4893
4894
4895 for {
4896 c := auxIntToInt64(v.AuxInt)
4897 if v_0.Op != OpMIPS64MOVHUreg || !(0xffff < uint64(c)) {
4898 break
4899 }
4900 v.reset(OpMIPS64MOVVconst)
4901 v.AuxInt = int64ToAuxInt(1)
4902 return true
4903 }
4904
4905
4906
4907 for {
4908 c := auxIntToInt64(v.AuxInt)
4909 if v_0.Op != OpMIPS64ANDconst {
4910 break
4911 }
4912 m := auxIntToInt64(v_0.AuxInt)
4913 if !(uint64(m) < uint64(c)) {
4914 break
4915 }
4916 v.reset(OpMIPS64MOVVconst)
4917 v.AuxInt = int64ToAuxInt(1)
4918 return true
4919 }
4920
4921
4922
4923 for {
4924 c := auxIntToInt64(v.AuxInt)
4925 if v_0.Op != OpMIPS64SRLVconst {
4926 break
4927 }
4928 d := auxIntToInt64(v_0.AuxInt)
4929 if !(0 < d && d <= 63 && 0xffffffffffffffff>>uint64(d) < uint64(c)) {
4930 break
4931 }
4932 v.reset(OpMIPS64MOVVconst)
4933 v.AuxInt = int64ToAuxInt(1)
4934 return true
4935 }
4936 return false
4937 }
4938 func rewriteValueMIPS64_OpMIPS64SGTconst(v *Value) bool {
4939 v_0 := v.Args[0]
4940
4941
4942
4943 for {
4944 c := auxIntToInt64(v.AuxInt)
4945 if v_0.Op != OpMIPS64MOVVconst {
4946 break
4947 }
4948 d := auxIntToInt64(v_0.AuxInt)
4949 if !(c > d) {
4950 break
4951 }
4952 v.reset(OpMIPS64MOVVconst)
4953 v.AuxInt = int64ToAuxInt(1)
4954 return true
4955 }
4956
4957
4958
4959 for {
4960 c := auxIntToInt64(v.AuxInt)
4961 if v_0.Op != OpMIPS64MOVVconst {
4962 break
4963 }
4964 d := auxIntToInt64(v_0.AuxInt)
4965 if !(c <= d) {
4966 break
4967 }
4968 v.reset(OpMIPS64MOVVconst)
4969 v.AuxInt = int64ToAuxInt(0)
4970 return true
4971 }
4972
4973
4974
4975 for {
4976 c := auxIntToInt64(v.AuxInt)
4977 if v_0.Op != OpMIPS64MOVBreg || !(0x7f < c) {
4978 break
4979 }
4980 v.reset(OpMIPS64MOVVconst)
4981 v.AuxInt = int64ToAuxInt(1)
4982 return true
4983 }
4984
4985
4986
4987 for {
4988 c := auxIntToInt64(v.AuxInt)
4989 if v_0.Op != OpMIPS64MOVBreg || !(c <= -0x80) {
4990 break
4991 }
4992 v.reset(OpMIPS64MOVVconst)
4993 v.AuxInt = int64ToAuxInt(0)
4994 return true
4995 }
4996
4997
4998
4999 for {
5000 c := auxIntToInt64(v.AuxInt)
5001 if v_0.Op != OpMIPS64MOVBUreg || !(0xff < c) {
5002 break
5003 }
5004 v.reset(OpMIPS64MOVVconst)
5005 v.AuxInt = int64ToAuxInt(1)
5006 return true
5007 }
5008
5009
5010
5011 for {
5012 c := auxIntToInt64(v.AuxInt)
5013 if v_0.Op != OpMIPS64MOVBUreg || !(c < 0) {
5014 break
5015 }
5016 v.reset(OpMIPS64MOVVconst)
5017 v.AuxInt = int64ToAuxInt(0)
5018 return true
5019 }
5020
5021
5022
5023 for {
5024 c := auxIntToInt64(v.AuxInt)
5025 if v_0.Op != OpMIPS64MOVHreg || !(0x7fff < c) {
5026 break
5027 }
5028 v.reset(OpMIPS64MOVVconst)
5029 v.AuxInt = int64ToAuxInt(1)
5030 return true
5031 }
5032
5033
5034
5035 for {
5036 c := auxIntToInt64(v.AuxInt)
5037 if v_0.Op != OpMIPS64MOVHreg || !(c <= -0x8000) {
5038 break
5039 }
5040 v.reset(OpMIPS64MOVVconst)
5041 v.AuxInt = int64ToAuxInt(0)
5042 return true
5043 }
5044
5045
5046
5047 for {
5048 c := auxIntToInt64(v.AuxInt)
5049 if v_0.Op != OpMIPS64MOVHUreg || !(0xffff < c) {
5050 break
5051 }
5052 v.reset(OpMIPS64MOVVconst)
5053 v.AuxInt = int64ToAuxInt(1)
5054 return true
5055 }
5056
5057
5058
5059 for {
5060 c := auxIntToInt64(v.AuxInt)
5061 if v_0.Op != OpMIPS64MOVHUreg || !(c < 0) {
5062 break
5063 }
5064 v.reset(OpMIPS64MOVVconst)
5065 v.AuxInt = int64ToAuxInt(0)
5066 return true
5067 }
5068
5069
5070
5071 for {
5072 c := auxIntToInt64(v.AuxInt)
5073 if v_0.Op != OpMIPS64MOVWUreg || !(c < 0) {
5074 break
5075 }
5076 v.reset(OpMIPS64MOVVconst)
5077 v.AuxInt = int64ToAuxInt(0)
5078 return true
5079 }
5080
5081
5082
5083 for {
5084 c := auxIntToInt64(v.AuxInt)
5085 if v_0.Op != OpMIPS64ANDconst {
5086 break
5087 }
5088 m := auxIntToInt64(v_0.AuxInt)
5089 if !(0 <= m && m < c) {
5090 break
5091 }
5092 v.reset(OpMIPS64MOVVconst)
5093 v.AuxInt = int64ToAuxInt(1)
5094 return true
5095 }
5096
5097
5098
5099 for {
5100 c := auxIntToInt64(v.AuxInt)
5101 if v_0.Op != OpMIPS64SRLVconst {
5102 break
5103 }
5104 d := auxIntToInt64(v_0.AuxInt)
5105 if !(0 <= c && 0 < d && d <= 63 && 0xffffffffffffffff>>uint64(d) < uint64(c)) {
5106 break
5107 }
5108 v.reset(OpMIPS64MOVVconst)
5109 v.AuxInt = int64ToAuxInt(1)
5110 return true
5111 }
5112 return false
5113 }
5114 func rewriteValueMIPS64_OpMIPS64SLLV(v *Value) bool {
5115 v_1 := v.Args[1]
5116 v_0 := v.Args[0]
5117
5118
5119
5120 for {
5121 if v_1.Op != OpMIPS64MOVVconst {
5122 break
5123 }
5124 c := auxIntToInt64(v_1.AuxInt)
5125 if !(uint64(c) >= 64) {
5126 break
5127 }
5128 v.reset(OpMIPS64MOVVconst)
5129 v.AuxInt = int64ToAuxInt(0)
5130 return true
5131 }
5132
5133
5134 for {
5135 x := v_0
5136 if v_1.Op != OpMIPS64MOVVconst {
5137 break
5138 }
5139 c := auxIntToInt64(v_1.AuxInt)
5140 v.reset(OpMIPS64SLLVconst)
5141 v.AuxInt = int64ToAuxInt(c)
5142 v.AddArg(x)
5143 return true
5144 }
5145 return false
5146 }
5147 func rewriteValueMIPS64_OpMIPS64SLLVconst(v *Value) bool {
5148 v_0 := v.Args[0]
5149
5150
5151 for {
5152 c := auxIntToInt64(v.AuxInt)
5153 if v_0.Op != OpMIPS64MOVVconst {
5154 break
5155 }
5156 d := auxIntToInt64(v_0.AuxInt)
5157 v.reset(OpMIPS64MOVVconst)
5158 v.AuxInt = int64ToAuxInt(d << uint64(c))
5159 return true
5160 }
5161 return false
5162 }
5163 func rewriteValueMIPS64_OpMIPS64SRAV(v *Value) bool {
5164 v_1 := v.Args[1]
5165 v_0 := v.Args[0]
5166
5167
5168
5169 for {
5170 x := v_0
5171 if v_1.Op != OpMIPS64MOVVconst {
5172 break
5173 }
5174 c := auxIntToInt64(v_1.AuxInt)
5175 if !(uint64(c) >= 64) {
5176 break
5177 }
5178 v.reset(OpMIPS64SRAVconst)
5179 v.AuxInt = int64ToAuxInt(63)
5180 v.AddArg(x)
5181 return true
5182 }
5183
5184
5185 for {
5186 x := v_0
5187 if v_1.Op != OpMIPS64MOVVconst {
5188 break
5189 }
5190 c := auxIntToInt64(v_1.AuxInt)
5191 v.reset(OpMIPS64SRAVconst)
5192 v.AuxInt = int64ToAuxInt(c)
5193 v.AddArg(x)
5194 return true
5195 }
5196 return false
5197 }
5198 func rewriteValueMIPS64_OpMIPS64SRAVconst(v *Value) bool {
5199 v_0 := v.Args[0]
5200
5201
5202 for {
5203 c := auxIntToInt64(v.AuxInt)
5204 if v_0.Op != OpMIPS64MOVVconst {
5205 break
5206 }
5207 d := auxIntToInt64(v_0.AuxInt)
5208 v.reset(OpMIPS64MOVVconst)
5209 v.AuxInt = int64ToAuxInt(d >> uint64(c))
5210 return true
5211 }
5212 return false
5213 }
5214 func rewriteValueMIPS64_OpMIPS64SRLV(v *Value) bool {
5215 v_1 := v.Args[1]
5216 v_0 := v.Args[0]
5217
5218
5219
5220 for {
5221 if v_1.Op != OpMIPS64MOVVconst {
5222 break
5223 }
5224 c := auxIntToInt64(v_1.AuxInt)
5225 if !(uint64(c) >= 64) {
5226 break
5227 }
5228 v.reset(OpMIPS64MOVVconst)
5229 v.AuxInt = int64ToAuxInt(0)
5230 return true
5231 }
5232
5233
5234 for {
5235 x := v_0
5236 if v_1.Op != OpMIPS64MOVVconst {
5237 break
5238 }
5239 c := auxIntToInt64(v_1.AuxInt)
5240 v.reset(OpMIPS64SRLVconst)
5241 v.AuxInt = int64ToAuxInt(c)
5242 v.AddArg(x)
5243 return true
5244 }
5245 return false
5246 }
5247 func rewriteValueMIPS64_OpMIPS64SRLVconst(v *Value) bool {
5248 v_0 := v.Args[0]
5249
5250
5251 for {
5252 c := auxIntToInt64(v.AuxInt)
5253 if v_0.Op != OpMIPS64MOVVconst {
5254 break
5255 }
5256 d := auxIntToInt64(v_0.AuxInt)
5257 v.reset(OpMIPS64MOVVconst)
5258 v.AuxInt = int64ToAuxInt(int64(uint64(d) >> uint64(c)))
5259 return true
5260 }
5261 return false
5262 }
5263 func rewriteValueMIPS64_OpMIPS64SUBV(v *Value) bool {
5264 v_1 := v.Args[1]
5265 v_0 := v.Args[0]
5266
5267
5268
5269 for {
5270 x := v_0
5271 if v_1.Op != OpMIPS64MOVVconst {
5272 break
5273 }
5274 c := auxIntToInt64(v_1.AuxInt)
5275 if !(is32Bit(c)) {
5276 break
5277 }
5278 v.reset(OpMIPS64SUBVconst)
5279 v.AuxInt = int64ToAuxInt(c)
5280 v.AddArg(x)
5281 return true
5282 }
5283
5284
5285 for {
5286 x := v_0
5287 if x != v_1 {
5288 break
5289 }
5290 v.reset(OpMIPS64MOVVconst)
5291 v.AuxInt = int64ToAuxInt(0)
5292 return true
5293 }
5294
5295
5296 for {
5297 if v_0.Op != OpMIPS64MOVVconst || auxIntToInt64(v_0.AuxInt) != 0 {
5298 break
5299 }
5300 x := v_1
5301 v.reset(OpMIPS64NEGV)
5302 v.AddArg(x)
5303 return true
5304 }
5305 return false
5306 }
5307 func rewriteValueMIPS64_OpMIPS64SUBVconst(v *Value) bool {
5308 v_0 := v.Args[0]
5309
5310
5311 for {
5312 if auxIntToInt64(v.AuxInt) != 0 {
5313 break
5314 }
5315 x := v_0
5316 v.copyOf(x)
5317 return true
5318 }
5319
5320
5321 for {
5322 c := auxIntToInt64(v.AuxInt)
5323 if v_0.Op != OpMIPS64MOVVconst {
5324 break
5325 }
5326 d := auxIntToInt64(v_0.AuxInt)
5327 v.reset(OpMIPS64MOVVconst)
5328 v.AuxInt = int64ToAuxInt(d - c)
5329 return true
5330 }
5331
5332
5333
5334 for {
5335 c := auxIntToInt64(v.AuxInt)
5336 if v_0.Op != OpMIPS64SUBVconst {
5337 break
5338 }
5339 d := auxIntToInt64(v_0.AuxInt)
5340 x := v_0.Args[0]
5341 if !(is32Bit(-c - d)) {
5342 break
5343 }
5344 v.reset(OpMIPS64ADDVconst)
5345 v.AuxInt = int64ToAuxInt(-c - d)
5346 v.AddArg(x)
5347 return true
5348 }
5349
5350
5351
5352 for {
5353 c := auxIntToInt64(v.AuxInt)
5354 if v_0.Op != OpMIPS64ADDVconst {
5355 break
5356 }
5357 d := auxIntToInt64(v_0.AuxInt)
5358 x := v_0.Args[0]
5359 if !(is32Bit(-c + d)) {
5360 break
5361 }
5362 v.reset(OpMIPS64ADDVconst)
5363 v.AuxInt = int64ToAuxInt(-c + d)
5364 v.AddArg(x)
5365 return true
5366 }
5367 return false
5368 }
5369 func rewriteValueMIPS64_OpMIPS64XOR(v *Value) bool {
5370 v_1 := v.Args[1]
5371 v_0 := v.Args[0]
5372
5373
5374
5375 for {
5376 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
5377 x := v_0
5378 if v_1.Op != OpMIPS64MOVVconst {
5379 continue
5380 }
5381 c := auxIntToInt64(v_1.AuxInt)
5382 if !(is32Bit(c)) {
5383 continue
5384 }
5385 v.reset(OpMIPS64XORconst)
5386 v.AuxInt = int64ToAuxInt(c)
5387 v.AddArg(x)
5388 return true
5389 }
5390 break
5391 }
5392
5393
5394 for {
5395 x := v_0
5396 if x != v_1 {
5397 break
5398 }
5399 v.reset(OpMIPS64MOVVconst)
5400 v.AuxInt = int64ToAuxInt(0)
5401 return true
5402 }
5403 return false
5404 }
5405 func rewriteValueMIPS64_OpMIPS64XORconst(v *Value) bool {
5406 v_0 := v.Args[0]
5407
5408
5409 for {
5410 if auxIntToInt64(v.AuxInt) != 0 {
5411 break
5412 }
5413 x := v_0
5414 v.copyOf(x)
5415 return true
5416 }
5417
5418
5419 for {
5420 if auxIntToInt64(v.AuxInt) != -1 {
5421 break
5422 }
5423 x := v_0
5424 v.reset(OpMIPS64NORconst)
5425 v.AuxInt = int64ToAuxInt(0)
5426 v.AddArg(x)
5427 return true
5428 }
5429
5430
5431 for {
5432 c := auxIntToInt64(v.AuxInt)
5433 if v_0.Op != OpMIPS64MOVVconst {
5434 break
5435 }
5436 d := auxIntToInt64(v_0.AuxInt)
5437 v.reset(OpMIPS64MOVVconst)
5438 v.AuxInt = int64ToAuxInt(c ^ d)
5439 return true
5440 }
5441
5442
5443
5444 for {
5445 c := auxIntToInt64(v.AuxInt)
5446 if v_0.Op != OpMIPS64XORconst {
5447 break
5448 }
5449 d := auxIntToInt64(v_0.AuxInt)
5450 x := v_0.Args[0]
5451 if !(is32Bit(c ^ d)) {
5452 break
5453 }
5454 v.reset(OpMIPS64XORconst)
5455 v.AuxInt = int64ToAuxInt(c ^ d)
5456 v.AddArg(x)
5457 return true
5458 }
5459 return false
5460 }
5461 func rewriteValueMIPS64_OpMod16(v *Value) bool {
5462 v_1 := v.Args[1]
5463 v_0 := v.Args[0]
5464 b := v.Block
5465 typ := &b.Func.Config.Types
5466
5467
5468 for {
5469 x := v_0
5470 y := v_1
5471 v.reset(OpSelect0)
5472 v0 := b.NewValue0(v.Pos, OpMIPS64DIVV, types.NewTuple(typ.Int64, typ.Int64))
5473 v1 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
5474 v1.AddArg(x)
5475 v2 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
5476 v2.AddArg(y)
5477 v0.AddArg2(v1, v2)
5478 v.AddArg(v0)
5479 return true
5480 }
5481 }
5482 func rewriteValueMIPS64_OpMod16u(v *Value) bool {
5483 v_1 := v.Args[1]
5484 v_0 := v.Args[0]
5485 b := v.Block
5486 typ := &b.Func.Config.Types
5487
5488
5489 for {
5490 x := v_0
5491 y := v_1
5492 v.reset(OpSelect0)
5493 v0 := b.NewValue0(v.Pos, OpMIPS64DIVVU, types.NewTuple(typ.UInt64, typ.UInt64))
5494 v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
5495 v1.AddArg(x)
5496 v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
5497 v2.AddArg(y)
5498 v0.AddArg2(v1, v2)
5499 v.AddArg(v0)
5500 return true
5501 }
5502 }
5503 func rewriteValueMIPS64_OpMod32(v *Value) bool {
5504 v_1 := v.Args[1]
5505 v_0 := v.Args[0]
5506 b := v.Block
5507 typ := &b.Func.Config.Types
5508
5509
5510 for {
5511 x := v_0
5512 y := v_1
5513 v.reset(OpSelect0)
5514 v0 := b.NewValue0(v.Pos, OpMIPS64DIVV, types.NewTuple(typ.Int64, typ.Int64))
5515 v1 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
5516 v1.AddArg(x)
5517 v2 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
5518 v2.AddArg(y)
5519 v0.AddArg2(v1, v2)
5520 v.AddArg(v0)
5521 return true
5522 }
5523 }
5524 func rewriteValueMIPS64_OpMod32u(v *Value) bool {
5525 v_1 := v.Args[1]
5526 v_0 := v.Args[0]
5527 b := v.Block
5528 typ := &b.Func.Config.Types
5529
5530
5531 for {
5532 x := v_0
5533 y := v_1
5534 v.reset(OpSelect0)
5535 v0 := b.NewValue0(v.Pos, OpMIPS64DIVVU, types.NewTuple(typ.UInt64, typ.UInt64))
5536 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
5537 v1.AddArg(x)
5538 v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
5539 v2.AddArg(y)
5540 v0.AddArg2(v1, v2)
5541 v.AddArg(v0)
5542 return true
5543 }
5544 }
5545 func rewriteValueMIPS64_OpMod64(v *Value) bool {
5546 v_1 := v.Args[1]
5547 v_0 := v.Args[0]
5548 b := v.Block
5549 typ := &b.Func.Config.Types
5550
5551
5552 for {
5553 x := v_0
5554 y := v_1
5555 v.reset(OpSelect0)
5556 v0 := b.NewValue0(v.Pos, OpMIPS64DIVV, types.NewTuple(typ.Int64, typ.Int64))
5557 v0.AddArg2(x, y)
5558 v.AddArg(v0)
5559 return true
5560 }
5561 }
5562 func rewriteValueMIPS64_OpMod64u(v *Value) bool {
5563 v_1 := v.Args[1]
5564 v_0 := v.Args[0]
5565 b := v.Block
5566 typ := &b.Func.Config.Types
5567
5568
5569 for {
5570 x := v_0
5571 y := v_1
5572 v.reset(OpSelect0)
5573 v0 := b.NewValue0(v.Pos, OpMIPS64DIVVU, types.NewTuple(typ.UInt64, typ.UInt64))
5574 v0.AddArg2(x, y)
5575 v.AddArg(v0)
5576 return true
5577 }
5578 }
5579 func rewriteValueMIPS64_OpMod8(v *Value) bool {
5580 v_1 := v.Args[1]
5581 v_0 := v.Args[0]
5582 b := v.Block
5583 typ := &b.Func.Config.Types
5584
5585
5586 for {
5587 x := v_0
5588 y := v_1
5589 v.reset(OpSelect0)
5590 v0 := b.NewValue0(v.Pos, OpMIPS64DIVV, types.NewTuple(typ.Int64, typ.Int64))
5591 v1 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
5592 v1.AddArg(x)
5593 v2 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
5594 v2.AddArg(y)
5595 v0.AddArg2(v1, v2)
5596 v.AddArg(v0)
5597 return true
5598 }
5599 }
5600 func rewriteValueMIPS64_OpMod8u(v *Value) bool {
5601 v_1 := v.Args[1]
5602 v_0 := v.Args[0]
5603 b := v.Block
5604 typ := &b.Func.Config.Types
5605
5606
5607 for {
5608 x := v_0
5609 y := v_1
5610 v.reset(OpSelect0)
5611 v0 := b.NewValue0(v.Pos, OpMIPS64DIVVU, types.NewTuple(typ.UInt64, typ.UInt64))
5612 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
5613 v1.AddArg(x)
5614 v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
5615 v2.AddArg(y)
5616 v0.AddArg2(v1, v2)
5617 v.AddArg(v0)
5618 return true
5619 }
5620 }
5621 func rewriteValueMIPS64_OpMove(v *Value) bool {
5622 v_2 := v.Args[2]
5623 v_1 := v.Args[1]
5624 v_0 := v.Args[0]
5625 b := v.Block
5626 config := b.Func.Config
5627 typ := &b.Func.Config.Types
5628
5629
5630 for {
5631 if auxIntToInt64(v.AuxInt) != 0 {
5632 break
5633 }
5634 mem := v_2
5635 v.copyOf(mem)
5636 return true
5637 }
5638
5639
5640 for {
5641 if auxIntToInt64(v.AuxInt) != 1 {
5642 break
5643 }
5644 dst := v_0
5645 src := v_1
5646 mem := v_2
5647 v.reset(OpMIPS64MOVBstore)
5648 v0 := b.NewValue0(v.Pos, OpMIPS64MOVBload, typ.Int8)
5649 v0.AddArg2(src, mem)
5650 v.AddArg3(dst, v0, mem)
5651 return true
5652 }
5653
5654
5655
5656 for {
5657 if auxIntToInt64(v.AuxInt) != 2 {
5658 break
5659 }
5660 t := auxToType(v.Aux)
5661 dst := v_0
5662 src := v_1
5663 mem := v_2
5664 if !(t.Alignment()%2 == 0) {
5665 break
5666 }
5667 v.reset(OpMIPS64MOVHstore)
5668 v0 := b.NewValue0(v.Pos, OpMIPS64MOVHload, typ.Int16)
5669 v0.AddArg2(src, mem)
5670 v.AddArg3(dst, v0, mem)
5671 return true
5672 }
5673
5674
5675 for {
5676 if auxIntToInt64(v.AuxInt) != 2 {
5677 break
5678 }
5679 dst := v_0
5680 src := v_1
5681 mem := v_2
5682 v.reset(OpMIPS64MOVBstore)
5683 v.AuxInt = int32ToAuxInt(1)
5684 v0 := b.NewValue0(v.Pos, OpMIPS64MOVBload, typ.Int8)
5685 v0.AuxInt = int32ToAuxInt(1)
5686 v0.AddArg2(src, mem)
5687 v1 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem)
5688 v2 := b.NewValue0(v.Pos, OpMIPS64MOVBload, typ.Int8)
5689 v2.AddArg2(src, mem)
5690 v1.AddArg3(dst, v2, mem)
5691 v.AddArg3(dst, v0, v1)
5692 return true
5693 }
5694
5695
5696
5697 for {
5698 if auxIntToInt64(v.AuxInt) != 4 {
5699 break
5700 }
5701 t := auxToType(v.Aux)
5702 dst := v_0
5703 src := v_1
5704 mem := v_2
5705 if !(t.Alignment()%4 == 0) {
5706 break
5707 }
5708 v.reset(OpMIPS64MOVWstore)
5709 v0 := b.NewValue0(v.Pos, OpMIPS64MOVWload, typ.Int32)
5710 v0.AddArg2(src, mem)
5711 v.AddArg3(dst, v0, mem)
5712 return true
5713 }
5714
5715
5716
5717 for {
5718 if auxIntToInt64(v.AuxInt) != 4 {
5719 break
5720 }
5721 t := auxToType(v.Aux)
5722 dst := v_0
5723 src := v_1
5724 mem := v_2
5725 if !(t.Alignment()%2 == 0) {
5726 break
5727 }
5728 v.reset(OpMIPS64MOVHstore)
5729 v.AuxInt = int32ToAuxInt(2)
5730 v0 := b.NewValue0(v.Pos, OpMIPS64MOVHload, typ.Int16)
5731 v0.AuxInt = int32ToAuxInt(2)
5732 v0.AddArg2(src, mem)
5733 v1 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem)
5734 v2 := b.NewValue0(v.Pos, OpMIPS64MOVHload, typ.Int16)
5735 v2.AddArg2(src, mem)
5736 v1.AddArg3(dst, v2, mem)
5737 v.AddArg3(dst, v0, v1)
5738 return true
5739 }
5740
5741
5742 for {
5743 if auxIntToInt64(v.AuxInt) != 4 {
5744 break
5745 }
5746 dst := v_0
5747 src := v_1
5748 mem := v_2
5749 v.reset(OpMIPS64MOVBstore)
5750 v.AuxInt = int32ToAuxInt(3)
5751 v0 := b.NewValue0(v.Pos, OpMIPS64MOVBload, typ.Int8)
5752 v0.AuxInt = int32ToAuxInt(3)
5753 v0.AddArg2(src, mem)
5754 v1 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem)
5755 v1.AuxInt = int32ToAuxInt(2)
5756 v2 := b.NewValue0(v.Pos, OpMIPS64MOVBload, typ.Int8)
5757 v2.AuxInt = int32ToAuxInt(2)
5758 v2.AddArg2(src, mem)
5759 v3 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem)
5760 v3.AuxInt = int32ToAuxInt(1)
5761 v4 := b.NewValue0(v.Pos, OpMIPS64MOVBload, typ.Int8)
5762 v4.AuxInt = int32ToAuxInt(1)
5763 v4.AddArg2(src, mem)
5764 v5 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem)
5765 v6 := b.NewValue0(v.Pos, OpMIPS64MOVBload, typ.Int8)
5766 v6.AddArg2(src, mem)
5767 v5.AddArg3(dst, v6, mem)
5768 v3.AddArg3(dst, v4, v5)
5769 v1.AddArg3(dst, v2, v3)
5770 v.AddArg3(dst, v0, v1)
5771 return true
5772 }
5773
5774
5775
5776 for {
5777 if auxIntToInt64(v.AuxInt) != 8 {
5778 break
5779 }
5780 t := auxToType(v.Aux)
5781 dst := v_0
5782 src := v_1
5783 mem := v_2
5784 if !(t.Alignment()%8 == 0) {
5785 break
5786 }
5787 v.reset(OpMIPS64MOVVstore)
5788 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVload, typ.UInt64)
5789 v0.AddArg2(src, mem)
5790 v.AddArg3(dst, v0, mem)
5791 return true
5792 }
5793
5794
5795
5796 for {
5797 if auxIntToInt64(v.AuxInt) != 8 {
5798 break
5799 }
5800 t := auxToType(v.Aux)
5801 dst := v_0
5802 src := v_1
5803 mem := v_2
5804 if !(t.Alignment()%4 == 0) {
5805 break
5806 }
5807 v.reset(OpMIPS64MOVWstore)
5808 v.AuxInt = int32ToAuxInt(4)
5809 v0 := b.NewValue0(v.Pos, OpMIPS64MOVWload, typ.Int32)
5810 v0.AuxInt = int32ToAuxInt(4)
5811 v0.AddArg2(src, mem)
5812 v1 := b.NewValue0(v.Pos, OpMIPS64MOVWstore, types.TypeMem)
5813 v2 := b.NewValue0(v.Pos, OpMIPS64MOVWload, typ.Int32)
5814 v2.AddArg2(src, mem)
5815 v1.AddArg3(dst, v2, mem)
5816 v.AddArg3(dst, v0, v1)
5817 return true
5818 }
5819
5820
5821
5822 for {
5823 if auxIntToInt64(v.AuxInt) != 8 {
5824 break
5825 }
5826 t := auxToType(v.Aux)
5827 dst := v_0
5828 src := v_1
5829 mem := v_2
5830 if !(t.Alignment()%2 == 0) {
5831 break
5832 }
5833 v.reset(OpMIPS64MOVHstore)
5834 v.AuxInt = int32ToAuxInt(6)
5835 v0 := b.NewValue0(v.Pos, OpMIPS64MOVHload, typ.Int16)
5836 v0.AuxInt = int32ToAuxInt(6)
5837 v0.AddArg2(src, mem)
5838 v1 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem)
5839 v1.AuxInt = int32ToAuxInt(4)
5840 v2 := b.NewValue0(v.Pos, OpMIPS64MOVHload, typ.Int16)
5841 v2.AuxInt = int32ToAuxInt(4)
5842 v2.AddArg2(src, mem)
5843 v3 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem)
5844 v3.AuxInt = int32ToAuxInt(2)
5845 v4 := b.NewValue0(v.Pos, OpMIPS64MOVHload, typ.Int16)
5846 v4.AuxInt = int32ToAuxInt(2)
5847 v4.AddArg2(src, mem)
5848 v5 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem)
5849 v6 := b.NewValue0(v.Pos, OpMIPS64MOVHload, typ.Int16)
5850 v6.AddArg2(src, mem)
5851 v5.AddArg3(dst, v6, mem)
5852 v3.AddArg3(dst, v4, v5)
5853 v1.AddArg3(dst, v2, v3)
5854 v.AddArg3(dst, v0, v1)
5855 return true
5856 }
5857
5858
5859 for {
5860 if auxIntToInt64(v.AuxInt) != 3 {
5861 break
5862 }
5863 dst := v_0
5864 src := v_1
5865 mem := v_2
5866 v.reset(OpMIPS64MOVBstore)
5867 v.AuxInt = int32ToAuxInt(2)
5868 v0 := b.NewValue0(v.Pos, OpMIPS64MOVBload, typ.Int8)
5869 v0.AuxInt = int32ToAuxInt(2)
5870 v0.AddArg2(src, mem)
5871 v1 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem)
5872 v1.AuxInt = int32ToAuxInt(1)
5873 v2 := b.NewValue0(v.Pos, OpMIPS64MOVBload, typ.Int8)
5874 v2.AuxInt = int32ToAuxInt(1)
5875 v2.AddArg2(src, mem)
5876 v3 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem)
5877 v4 := b.NewValue0(v.Pos, OpMIPS64MOVBload, typ.Int8)
5878 v4.AddArg2(src, mem)
5879 v3.AddArg3(dst, v4, mem)
5880 v1.AddArg3(dst, v2, v3)
5881 v.AddArg3(dst, v0, v1)
5882 return true
5883 }
5884
5885
5886
5887 for {
5888 if auxIntToInt64(v.AuxInt) != 6 {
5889 break
5890 }
5891 t := auxToType(v.Aux)
5892 dst := v_0
5893 src := v_1
5894 mem := v_2
5895 if !(t.Alignment()%2 == 0) {
5896 break
5897 }
5898 v.reset(OpMIPS64MOVHstore)
5899 v.AuxInt = int32ToAuxInt(4)
5900 v0 := b.NewValue0(v.Pos, OpMIPS64MOVHload, typ.Int16)
5901 v0.AuxInt = int32ToAuxInt(4)
5902 v0.AddArg2(src, mem)
5903 v1 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem)
5904 v1.AuxInt = int32ToAuxInt(2)
5905 v2 := b.NewValue0(v.Pos, OpMIPS64MOVHload, typ.Int16)
5906 v2.AuxInt = int32ToAuxInt(2)
5907 v2.AddArg2(src, mem)
5908 v3 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem)
5909 v4 := b.NewValue0(v.Pos, OpMIPS64MOVHload, typ.Int16)
5910 v4.AddArg2(src, mem)
5911 v3.AddArg3(dst, v4, mem)
5912 v1.AddArg3(dst, v2, v3)
5913 v.AddArg3(dst, v0, v1)
5914 return true
5915 }
5916
5917
5918
5919 for {
5920 if auxIntToInt64(v.AuxInt) != 12 {
5921 break
5922 }
5923 t := auxToType(v.Aux)
5924 dst := v_0
5925 src := v_1
5926 mem := v_2
5927 if !(t.Alignment()%4 == 0) {
5928 break
5929 }
5930 v.reset(OpMIPS64MOVWstore)
5931 v.AuxInt = int32ToAuxInt(8)
5932 v0 := b.NewValue0(v.Pos, OpMIPS64MOVWload, typ.Int32)
5933 v0.AuxInt = int32ToAuxInt(8)
5934 v0.AddArg2(src, mem)
5935 v1 := b.NewValue0(v.Pos, OpMIPS64MOVWstore, types.TypeMem)
5936 v1.AuxInt = int32ToAuxInt(4)
5937 v2 := b.NewValue0(v.Pos, OpMIPS64MOVWload, typ.Int32)
5938 v2.AuxInt = int32ToAuxInt(4)
5939 v2.AddArg2(src, mem)
5940 v3 := b.NewValue0(v.Pos, OpMIPS64MOVWstore, types.TypeMem)
5941 v4 := b.NewValue0(v.Pos, OpMIPS64MOVWload, typ.Int32)
5942 v4.AddArg2(src, mem)
5943 v3.AddArg3(dst, v4, mem)
5944 v1.AddArg3(dst, v2, v3)
5945 v.AddArg3(dst, v0, v1)
5946 return true
5947 }
5948
5949
5950
5951 for {
5952 if auxIntToInt64(v.AuxInt) != 16 {
5953 break
5954 }
5955 t := auxToType(v.Aux)
5956 dst := v_0
5957 src := v_1
5958 mem := v_2
5959 if !(t.Alignment()%8 == 0) {
5960 break
5961 }
5962 v.reset(OpMIPS64MOVVstore)
5963 v.AuxInt = int32ToAuxInt(8)
5964 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVload, typ.UInt64)
5965 v0.AuxInt = int32ToAuxInt(8)
5966 v0.AddArg2(src, mem)
5967 v1 := b.NewValue0(v.Pos, OpMIPS64MOVVstore, types.TypeMem)
5968 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVload, typ.UInt64)
5969 v2.AddArg2(src, mem)
5970 v1.AddArg3(dst, v2, mem)
5971 v.AddArg3(dst, v0, v1)
5972 return true
5973 }
5974
5975
5976
5977 for {
5978 if auxIntToInt64(v.AuxInt) != 24 {
5979 break
5980 }
5981 t := auxToType(v.Aux)
5982 dst := v_0
5983 src := v_1
5984 mem := v_2
5985 if !(t.Alignment()%8 == 0) {
5986 break
5987 }
5988 v.reset(OpMIPS64MOVVstore)
5989 v.AuxInt = int32ToAuxInt(16)
5990 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVload, typ.UInt64)
5991 v0.AuxInt = int32ToAuxInt(16)
5992 v0.AddArg2(src, mem)
5993 v1 := b.NewValue0(v.Pos, OpMIPS64MOVVstore, types.TypeMem)
5994 v1.AuxInt = int32ToAuxInt(8)
5995 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVload, typ.UInt64)
5996 v2.AuxInt = int32ToAuxInt(8)
5997 v2.AddArg2(src, mem)
5998 v3 := b.NewValue0(v.Pos, OpMIPS64MOVVstore, types.TypeMem)
5999 v4 := b.NewValue0(v.Pos, OpMIPS64MOVVload, typ.UInt64)
6000 v4.AddArg2(src, mem)
6001 v3.AddArg3(dst, v4, mem)
6002 v1.AddArg3(dst, v2, v3)
6003 v.AddArg3(dst, v0, v1)
6004 return true
6005 }
6006
6007
6008
6009 for {
6010 s := auxIntToInt64(v.AuxInt)
6011 t := auxToType(v.Aux)
6012 dst := v_0
6013 src := v_1
6014 mem := v_2
6015 if !(s%8 == 0 && s >= 24 && s <= 8*128 && t.Alignment()%8 == 0 && !config.noDuffDevice && logLargeCopy(v, s)) {
6016 break
6017 }
6018 v.reset(OpMIPS64DUFFCOPY)
6019 v.AuxInt = int64ToAuxInt(16 * (128 - s/8))
6020 v.AddArg3(dst, src, mem)
6021 return true
6022 }
6023
6024
6025
6026 for {
6027 s := auxIntToInt64(v.AuxInt)
6028 t := auxToType(v.Aux)
6029 dst := v_0
6030 src := v_1
6031 mem := v_2
6032 if !(s > 24 && logLargeCopy(v, s) || t.Alignment()%8 != 0) {
6033 break
6034 }
6035 v.reset(OpMIPS64LoweredMove)
6036 v.AuxInt = int64ToAuxInt(t.Alignment())
6037 v0 := b.NewValue0(v.Pos, OpMIPS64ADDVconst, src.Type)
6038 v0.AuxInt = int64ToAuxInt(s - moveSize(t.Alignment(), config))
6039 v0.AddArg(src)
6040 v.AddArg4(dst, src, v0, mem)
6041 return true
6042 }
6043 return false
6044 }
6045 func rewriteValueMIPS64_OpMul16(v *Value) bool {
6046 v_1 := v.Args[1]
6047 v_0 := v.Args[0]
6048 b := v.Block
6049 typ := &b.Func.Config.Types
6050
6051
6052 for {
6053 x := v_0
6054 y := v_1
6055 v.reset(OpSelect1)
6056 v0 := b.NewValue0(v.Pos, OpMIPS64MULVU, types.NewTuple(typ.UInt64, typ.UInt64))
6057 v0.AddArg2(x, y)
6058 v.AddArg(v0)
6059 return true
6060 }
6061 }
6062 func rewriteValueMIPS64_OpMul32(v *Value) bool {
6063 v_1 := v.Args[1]
6064 v_0 := v.Args[0]
6065 b := v.Block
6066 typ := &b.Func.Config.Types
6067
6068
6069 for {
6070 x := v_0
6071 y := v_1
6072 v.reset(OpSelect1)
6073 v0 := b.NewValue0(v.Pos, OpMIPS64MULVU, types.NewTuple(typ.UInt64, typ.UInt64))
6074 v0.AddArg2(x, y)
6075 v.AddArg(v0)
6076 return true
6077 }
6078 }
6079 func rewriteValueMIPS64_OpMul64(v *Value) bool {
6080 v_1 := v.Args[1]
6081 v_0 := v.Args[0]
6082 b := v.Block
6083 typ := &b.Func.Config.Types
6084
6085
6086 for {
6087 x := v_0
6088 y := v_1
6089 v.reset(OpSelect1)
6090 v0 := b.NewValue0(v.Pos, OpMIPS64MULVU, types.NewTuple(typ.UInt64, typ.UInt64))
6091 v0.AddArg2(x, y)
6092 v.AddArg(v0)
6093 return true
6094 }
6095 }
6096 func rewriteValueMIPS64_OpMul8(v *Value) bool {
6097 v_1 := v.Args[1]
6098 v_0 := v.Args[0]
6099 b := v.Block
6100 typ := &b.Func.Config.Types
6101
6102
6103 for {
6104 x := v_0
6105 y := v_1
6106 v.reset(OpSelect1)
6107 v0 := b.NewValue0(v.Pos, OpMIPS64MULVU, types.NewTuple(typ.UInt64, typ.UInt64))
6108 v0.AddArg2(x, y)
6109 v.AddArg(v0)
6110 return true
6111 }
6112 }
6113 func rewriteValueMIPS64_OpNeq16(v *Value) bool {
6114 v_1 := v.Args[1]
6115 v_0 := v.Args[0]
6116 b := v.Block
6117 typ := &b.Func.Config.Types
6118
6119
6120 for {
6121 x := v_0
6122 y := v_1
6123 v.reset(OpMIPS64SGTU)
6124 v0 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.UInt64)
6125 v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
6126 v1.AddArg(x)
6127 v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
6128 v2.AddArg(y)
6129 v0.AddArg2(v1, v2)
6130 v3 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6131 v3.AuxInt = int64ToAuxInt(0)
6132 v.AddArg2(v0, v3)
6133 return true
6134 }
6135 }
6136 func rewriteValueMIPS64_OpNeq32(v *Value) bool {
6137 v_1 := v.Args[1]
6138 v_0 := v.Args[0]
6139 b := v.Block
6140 typ := &b.Func.Config.Types
6141
6142
6143 for {
6144 x := v_0
6145 y := v_1
6146 v.reset(OpMIPS64SGTU)
6147 v0 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.UInt64)
6148 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
6149 v1.AddArg(x)
6150 v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
6151 v2.AddArg(y)
6152 v0.AddArg2(v1, v2)
6153 v3 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6154 v3.AuxInt = int64ToAuxInt(0)
6155 v.AddArg2(v0, v3)
6156 return true
6157 }
6158 }
6159 func rewriteValueMIPS64_OpNeq32F(v *Value) bool {
6160 v_1 := v.Args[1]
6161 v_0 := v.Args[0]
6162 b := v.Block
6163
6164
6165 for {
6166 x := v_0
6167 y := v_1
6168 v.reset(OpMIPS64FPFlagFalse)
6169 v0 := b.NewValue0(v.Pos, OpMIPS64CMPEQF, types.TypeFlags)
6170 v0.AddArg2(x, y)
6171 v.AddArg(v0)
6172 return true
6173 }
6174 }
6175 func rewriteValueMIPS64_OpNeq64(v *Value) bool {
6176 v_1 := v.Args[1]
6177 v_0 := v.Args[0]
6178 b := v.Block
6179 typ := &b.Func.Config.Types
6180
6181
6182 for {
6183 x := v_0
6184 y := v_1
6185 v.reset(OpMIPS64SGTU)
6186 v0 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.UInt64)
6187 v0.AddArg2(x, y)
6188 v1 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6189 v1.AuxInt = int64ToAuxInt(0)
6190 v.AddArg2(v0, v1)
6191 return true
6192 }
6193 }
6194 func rewriteValueMIPS64_OpNeq64F(v *Value) bool {
6195 v_1 := v.Args[1]
6196 v_0 := v.Args[0]
6197 b := v.Block
6198
6199
6200 for {
6201 x := v_0
6202 y := v_1
6203 v.reset(OpMIPS64FPFlagFalse)
6204 v0 := b.NewValue0(v.Pos, OpMIPS64CMPEQD, types.TypeFlags)
6205 v0.AddArg2(x, y)
6206 v.AddArg(v0)
6207 return true
6208 }
6209 }
6210 func rewriteValueMIPS64_OpNeq8(v *Value) bool {
6211 v_1 := v.Args[1]
6212 v_0 := v.Args[0]
6213 b := v.Block
6214 typ := &b.Func.Config.Types
6215
6216
6217 for {
6218 x := v_0
6219 y := v_1
6220 v.reset(OpMIPS64SGTU)
6221 v0 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.UInt64)
6222 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
6223 v1.AddArg(x)
6224 v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
6225 v2.AddArg(y)
6226 v0.AddArg2(v1, v2)
6227 v3 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6228 v3.AuxInt = int64ToAuxInt(0)
6229 v.AddArg2(v0, v3)
6230 return true
6231 }
6232 }
6233 func rewriteValueMIPS64_OpNeqPtr(v *Value) bool {
6234 v_1 := v.Args[1]
6235 v_0 := v.Args[0]
6236 b := v.Block
6237 typ := &b.Func.Config.Types
6238
6239
6240 for {
6241 x := v_0
6242 y := v_1
6243 v.reset(OpMIPS64SGTU)
6244 v0 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.UInt64)
6245 v0.AddArg2(x, y)
6246 v1 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6247 v1.AuxInt = int64ToAuxInt(0)
6248 v.AddArg2(v0, v1)
6249 return true
6250 }
6251 }
6252 func rewriteValueMIPS64_OpNot(v *Value) bool {
6253 v_0 := v.Args[0]
6254
6255
6256 for {
6257 x := v_0
6258 v.reset(OpMIPS64XORconst)
6259 v.AuxInt = int64ToAuxInt(1)
6260 v.AddArg(x)
6261 return true
6262 }
6263 }
6264 func rewriteValueMIPS64_OpOffPtr(v *Value) bool {
6265 v_0 := v.Args[0]
6266
6267
6268
6269 for {
6270 off := auxIntToInt64(v.AuxInt)
6271 ptr := v_0
6272 if ptr.Op != OpSP || !(is32Bit(off)) {
6273 break
6274 }
6275 v.reset(OpMIPS64MOVVaddr)
6276 v.AuxInt = int32ToAuxInt(int32(off))
6277 v.AddArg(ptr)
6278 return true
6279 }
6280
6281
6282 for {
6283 off := auxIntToInt64(v.AuxInt)
6284 ptr := v_0
6285 v.reset(OpMIPS64ADDVconst)
6286 v.AuxInt = int64ToAuxInt(off)
6287 v.AddArg(ptr)
6288 return true
6289 }
6290 }
6291 func rewriteValueMIPS64_OpPanicBounds(v *Value) bool {
6292 v_2 := v.Args[2]
6293 v_1 := v.Args[1]
6294 v_0 := v.Args[0]
6295
6296
6297
6298 for {
6299 kind := auxIntToInt64(v.AuxInt)
6300 x := v_0
6301 y := v_1
6302 mem := v_2
6303 if !(boundsABI(kind) == 0) {
6304 break
6305 }
6306 v.reset(OpMIPS64LoweredPanicBoundsA)
6307 v.AuxInt = int64ToAuxInt(kind)
6308 v.AddArg3(x, y, mem)
6309 return true
6310 }
6311
6312
6313
6314 for {
6315 kind := auxIntToInt64(v.AuxInt)
6316 x := v_0
6317 y := v_1
6318 mem := v_2
6319 if !(boundsABI(kind) == 1) {
6320 break
6321 }
6322 v.reset(OpMIPS64LoweredPanicBoundsB)
6323 v.AuxInt = int64ToAuxInt(kind)
6324 v.AddArg3(x, y, mem)
6325 return true
6326 }
6327
6328
6329
6330 for {
6331 kind := auxIntToInt64(v.AuxInt)
6332 x := v_0
6333 y := v_1
6334 mem := v_2
6335 if !(boundsABI(kind) == 2) {
6336 break
6337 }
6338 v.reset(OpMIPS64LoweredPanicBoundsC)
6339 v.AuxInt = int64ToAuxInt(kind)
6340 v.AddArg3(x, y, mem)
6341 return true
6342 }
6343 return false
6344 }
6345 func rewriteValueMIPS64_OpRotateLeft16(v *Value) bool {
6346 v_1 := v.Args[1]
6347 v_0 := v.Args[0]
6348 b := v.Block
6349 typ := &b.Func.Config.Types
6350
6351
6352 for {
6353 t := v.Type
6354 x := v_0
6355 if v_1.Op != OpMIPS64MOVVconst {
6356 break
6357 }
6358 c := auxIntToInt64(v_1.AuxInt)
6359 v.reset(OpOr16)
6360 v0 := b.NewValue0(v.Pos, OpLsh16x64, t)
6361 v1 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6362 v1.AuxInt = int64ToAuxInt(c & 15)
6363 v0.AddArg2(x, v1)
6364 v2 := b.NewValue0(v.Pos, OpRsh16Ux64, t)
6365 v3 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6366 v3.AuxInt = int64ToAuxInt(-c & 15)
6367 v2.AddArg2(x, v3)
6368 v.AddArg2(v0, v2)
6369 return true
6370 }
6371 return false
6372 }
6373 func rewriteValueMIPS64_OpRotateLeft32(v *Value) bool {
6374 v_1 := v.Args[1]
6375 v_0 := v.Args[0]
6376 b := v.Block
6377 typ := &b.Func.Config.Types
6378
6379
6380 for {
6381 t := v.Type
6382 x := v_0
6383 if v_1.Op != OpMIPS64MOVVconst {
6384 break
6385 }
6386 c := auxIntToInt64(v_1.AuxInt)
6387 v.reset(OpOr32)
6388 v0 := b.NewValue0(v.Pos, OpLsh32x64, t)
6389 v1 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6390 v1.AuxInt = int64ToAuxInt(c & 31)
6391 v0.AddArg2(x, v1)
6392 v2 := b.NewValue0(v.Pos, OpRsh32Ux64, t)
6393 v3 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6394 v3.AuxInt = int64ToAuxInt(-c & 31)
6395 v2.AddArg2(x, v3)
6396 v.AddArg2(v0, v2)
6397 return true
6398 }
6399 return false
6400 }
6401 func rewriteValueMIPS64_OpRotateLeft64(v *Value) bool {
6402 v_1 := v.Args[1]
6403 v_0 := v.Args[0]
6404 b := v.Block
6405 typ := &b.Func.Config.Types
6406
6407
6408 for {
6409 t := v.Type
6410 x := v_0
6411 if v_1.Op != OpMIPS64MOVVconst {
6412 break
6413 }
6414 c := auxIntToInt64(v_1.AuxInt)
6415 v.reset(OpOr64)
6416 v0 := b.NewValue0(v.Pos, OpLsh64x64, t)
6417 v1 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6418 v1.AuxInt = int64ToAuxInt(c & 63)
6419 v0.AddArg2(x, v1)
6420 v2 := b.NewValue0(v.Pos, OpRsh64Ux64, t)
6421 v3 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6422 v3.AuxInt = int64ToAuxInt(-c & 63)
6423 v2.AddArg2(x, v3)
6424 v.AddArg2(v0, v2)
6425 return true
6426 }
6427 return false
6428 }
6429 func rewriteValueMIPS64_OpRotateLeft8(v *Value) bool {
6430 v_1 := v.Args[1]
6431 v_0 := v.Args[0]
6432 b := v.Block
6433 typ := &b.Func.Config.Types
6434
6435
6436 for {
6437 t := v.Type
6438 x := v_0
6439 if v_1.Op != OpMIPS64MOVVconst {
6440 break
6441 }
6442 c := auxIntToInt64(v_1.AuxInt)
6443 v.reset(OpOr8)
6444 v0 := b.NewValue0(v.Pos, OpLsh8x64, t)
6445 v1 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6446 v1.AuxInt = int64ToAuxInt(c & 7)
6447 v0.AddArg2(x, v1)
6448 v2 := b.NewValue0(v.Pos, OpRsh8Ux64, t)
6449 v3 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6450 v3.AuxInt = int64ToAuxInt(-c & 7)
6451 v2.AddArg2(x, v3)
6452 v.AddArg2(v0, v2)
6453 return true
6454 }
6455 return false
6456 }
6457 func rewriteValueMIPS64_OpRsh16Ux16(v *Value) bool {
6458 v_1 := v.Args[1]
6459 v_0 := v.Args[0]
6460 b := v.Block
6461 typ := &b.Func.Config.Types
6462
6463
6464 for {
6465 t := v.Type
6466 x := v_0
6467 y := v_1
6468 v.reset(OpMIPS64AND)
6469 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
6470 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
6471 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6472 v2.AuxInt = int64ToAuxInt(64)
6473 v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
6474 v3.AddArg(y)
6475 v1.AddArg2(v2, v3)
6476 v0.AddArg(v1)
6477 v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
6478 v5 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
6479 v5.AddArg(x)
6480 v4.AddArg2(v5, v3)
6481 v.AddArg2(v0, v4)
6482 return true
6483 }
6484 }
6485 func rewriteValueMIPS64_OpRsh16Ux32(v *Value) bool {
6486 v_1 := v.Args[1]
6487 v_0 := v.Args[0]
6488 b := v.Block
6489 typ := &b.Func.Config.Types
6490
6491
6492 for {
6493 t := v.Type
6494 x := v_0
6495 y := v_1
6496 v.reset(OpMIPS64AND)
6497 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
6498 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
6499 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6500 v2.AuxInt = int64ToAuxInt(64)
6501 v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
6502 v3.AddArg(y)
6503 v1.AddArg2(v2, v3)
6504 v0.AddArg(v1)
6505 v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
6506 v5 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
6507 v5.AddArg(x)
6508 v4.AddArg2(v5, v3)
6509 v.AddArg2(v0, v4)
6510 return true
6511 }
6512 }
6513 func rewriteValueMIPS64_OpRsh16Ux64(v *Value) bool {
6514 v_1 := v.Args[1]
6515 v_0 := v.Args[0]
6516 b := v.Block
6517 typ := &b.Func.Config.Types
6518
6519
6520 for {
6521 t := v.Type
6522 x := v_0
6523 y := v_1
6524 v.reset(OpMIPS64AND)
6525 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
6526 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
6527 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6528 v2.AuxInt = int64ToAuxInt(64)
6529 v1.AddArg2(v2, y)
6530 v0.AddArg(v1)
6531 v3 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
6532 v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
6533 v4.AddArg(x)
6534 v3.AddArg2(v4, y)
6535 v.AddArg2(v0, v3)
6536 return true
6537 }
6538 }
6539 func rewriteValueMIPS64_OpRsh16Ux8(v *Value) bool {
6540 v_1 := v.Args[1]
6541 v_0 := v.Args[0]
6542 b := v.Block
6543 typ := &b.Func.Config.Types
6544
6545
6546 for {
6547 t := v.Type
6548 x := v_0
6549 y := v_1
6550 v.reset(OpMIPS64AND)
6551 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
6552 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
6553 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6554 v2.AuxInt = int64ToAuxInt(64)
6555 v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
6556 v3.AddArg(y)
6557 v1.AddArg2(v2, v3)
6558 v0.AddArg(v1)
6559 v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
6560 v5 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
6561 v5.AddArg(x)
6562 v4.AddArg2(v5, v3)
6563 v.AddArg2(v0, v4)
6564 return true
6565 }
6566 }
6567 func rewriteValueMIPS64_OpRsh16x16(v *Value) bool {
6568 v_1 := v.Args[1]
6569 v_0 := v.Args[0]
6570 b := v.Block
6571 typ := &b.Func.Config.Types
6572
6573
6574 for {
6575 t := v.Type
6576 x := v_0
6577 y := v_1
6578 v.reset(OpMIPS64SRAV)
6579 v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
6580 v0.AddArg(x)
6581 v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
6582 v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
6583 v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
6584 v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
6585 v4.AddArg(y)
6586 v5 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6587 v5.AuxInt = int64ToAuxInt(63)
6588 v3.AddArg2(v4, v5)
6589 v2.AddArg(v3)
6590 v1.AddArg2(v2, v4)
6591 v.AddArg2(v0, v1)
6592 return true
6593 }
6594 }
6595 func rewriteValueMIPS64_OpRsh16x32(v *Value) bool {
6596 v_1 := v.Args[1]
6597 v_0 := v.Args[0]
6598 b := v.Block
6599 typ := &b.Func.Config.Types
6600
6601
6602 for {
6603 t := v.Type
6604 x := v_0
6605 y := v_1
6606 v.reset(OpMIPS64SRAV)
6607 v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
6608 v0.AddArg(x)
6609 v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
6610 v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
6611 v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
6612 v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
6613 v4.AddArg(y)
6614 v5 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6615 v5.AuxInt = int64ToAuxInt(63)
6616 v3.AddArg2(v4, v5)
6617 v2.AddArg(v3)
6618 v1.AddArg2(v2, v4)
6619 v.AddArg2(v0, v1)
6620 return true
6621 }
6622 }
6623 func rewriteValueMIPS64_OpRsh16x64(v *Value) bool {
6624 v_1 := v.Args[1]
6625 v_0 := v.Args[0]
6626 b := v.Block
6627 typ := &b.Func.Config.Types
6628
6629
6630 for {
6631 t := v.Type
6632 x := v_0
6633 y := v_1
6634 v.reset(OpMIPS64SRAV)
6635 v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
6636 v0.AddArg(x)
6637 v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
6638 v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
6639 v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
6640 v4 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6641 v4.AuxInt = int64ToAuxInt(63)
6642 v3.AddArg2(y, v4)
6643 v2.AddArg(v3)
6644 v1.AddArg2(v2, y)
6645 v.AddArg2(v0, v1)
6646 return true
6647 }
6648 }
6649 func rewriteValueMIPS64_OpRsh16x8(v *Value) bool {
6650 v_1 := v.Args[1]
6651 v_0 := v.Args[0]
6652 b := v.Block
6653 typ := &b.Func.Config.Types
6654
6655
6656 for {
6657 t := v.Type
6658 x := v_0
6659 y := v_1
6660 v.reset(OpMIPS64SRAV)
6661 v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
6662 v0.AddArg(x)
6663 v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
6664 v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
6665 v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
6666 v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
6667 v4.AddArg(y)
6668 v5 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6669 v5.AuxInt = int64ToAuxInt(63)
6670 v3.AddArg2(v4, v5)
6671 v2.AddArg(v3)
6672 v1.AddArg2(v2, v4)
6673 v.AddArg2(v0, v1)
6674 return true
6675 }
6676 }
6677 func rewriteValueMIPS64_OpRsh32Ux16(v *Value) bool {
6678 v_1 := v.Args[1]
6679 v_0 := v.Args[0]
6680 b := v.Block
6681 typ := &b.Func.Config.Types
6682
6683
6684 for {
6685 t := v.Type
6686 x := v_0
6687 y := v_1
6688 v.reset(OpMIPS64AND)
6689 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
6690 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
6691 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6692 v2.AuxInt = int64ToAuxInt(64)
6693 v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
6694 v3.AddArg(y)
6695 v1.AddArg2(v2, v3)
6696 v0.AddArg(v1)
6697 v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
6698 v5 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
6699 v5.AddArg(x)
6700 v4.AddArg2(v5, v3)
6701 v.AddArg2(v0, v4)
6702 return true
6703 }
6704 }
6705 func rewriteValueMIPS64_OpRsh32Ux32(v *Value) bool {
6706 v_1 := v.Args[1]
6707 v_0 := v.Args[0]
6708 b := v.Block
6709 typ := &b.Func.Config.Types
6710
6711
6712 for {
6713 t := v.Type
6714 x := v_0
6715 y := v_1
6716 v.reset(OpMIPS64AND)
6717 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
6718 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
6719 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6720 v2.AuxInt = int64ToAuxInt(64)
6721 v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
6722 v3.AddArg(y)
6723 v1.AddArg2(v2, v3)
6724 v0.AddArg(v1)
6725 v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
6726 v5 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
6727 v5.AddArg(x)
6728 v4.AddArg2(v5, v3)
6729 v.AddArg2(v0, v4)
6730 return true
6731 }
6732 }
6733 func rewriteValueMIPS64_OpRsh32Ux64(v *Value) bool {
6734 v_1 := v.Args[1]
6735 v_0 := v.Args[0]
6736 b := v.Block
6737 typ := &b.Func.Config.Types
6738
6739
6740 for {
6741 t := v.Type
6742 x := v_0
6743 y := v_1
6744 v.reset(OpMIPS64AND)
6745 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
6746 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
6747 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6748 v2.AuxInt = int64ToAuxInt(64)
6749 v1.AddArg2(v2, y)
6750 v0.AddArg(v1)
6751 v3 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
6752 v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
6753 v4.AddArg(x)
6754 v3.AddArg2(v4, y)
6755 v.AddArg2(v0, v3)
6756 return true
6757 }
6758 }
6759 func rewriteValueMIPS64_OpRsh32Ux8(v *Value) bool {
6760 v_1 := v.Args[1]
6761 v_0 := v.Args[0]
6762 b := v.Block
6763 typ := &b.Func.Config.Types
6764
6765
6766 for {
6767 t := v.Type
6768 x := v_0
6769 y := v_1
6770 v.reset(OpMIPS64AND)
6771 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
6772 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
6773 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6774 v2.AuxInt = int64ToAuxInt(64)
6775 v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
6776 v3.AddArg(y)
6777 v1.AddArg2(v2, v3)
6778 v0.AddArg(v1)
6779 v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
6780 v5 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
6781 v5.AddArg(x)
6782 v4.AddArg2(v5, v3)
6783 v.AddArg2(v0, v4)
6784 return true
6785 }
6786 }
6787 func rewriteValueMIPS64_OpRsh32x16(v *Value) bool {
6788 v_1 := v.Args[1]
6789 v_0 := v.Args[0]
6790 b := v.Block
6791 typ := &b.Func.Config.Types
6792
6793
6794 for {
6795 t := v.Type
6796 x := v_0
6797 y := v_1
6798 v.reset(OpMIPS64SRAV)
6799 v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
6800 v0.AddArg(x)
6801 v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
6802 v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
6803 v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
6804 v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
6805 v4.AddArg(y)
6806 v5 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6807 v5.AuxInt = int64ToAuxInt(63)
6808 v3.AddArg2(v4, v5)
6809 v2.AddArg(v3)
6810 v1.AddArg2(v2, v4)
6811 v.AddArg2(v0, v1)
6812 return true
6813 }
6814 }
6815 func rewriteValueMIPS64_OpRsh32x32(v *Value) bool {
6816 v_1 := v.Args[1]
6817 v_0 := v.Args[0]
6818 b := v.Block
6819 typ := &b.Func.Config.Types
6820
6821
6822 for {
6823 t := v.Type
6824 x := v_0
6825 y := v_1
6826 v.reset(OpMIPS64SRAV)
6827 v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
6828 v0.AddArg(x)
6829 v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
6830 v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
6831 v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
6832 v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
6833 v4.AddArg(y)
6834 v5 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6835 v5.AuxInt = int64ToAuxInt(63)
6836 v3.AddArg2(v4, v5)
6837 v2.AddArg(v3)
6838 v1.AddArg2(v2, v4)
6839 v.AddArg2(v0, v1)
6840 return true
6841 }
6842 }
6843 func rewriteValueMIPS64_OpRsh32x64(v *Value) bool {
6844 v_1 := v.Args[1]
6845 v_0 := v.Args[0]
6846 b := v.Block
6847 typ := &b.Func.Config.Types
6848
6849
6850 for {
6851 t := v.Type
6852 x := v_0
6853 y := v_1
6854 v.reset(OpMIPS64SRAV)
6855 v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
6856 v0.AddArg(x)
6857 v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
6858 v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
6859 v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
6860 v4 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6861 v4.AuxInt = int64ToAuxInt(63)
6862 v3.AddArg2(y, v4)
6863 v2.AddArg(v3)
6864 v1.AddArg2(v2, y)
6865 v.AddArg2(v0, v1)
6866 return true
6867 }
6868 }
6869 func rewriteValueMIPS64_OpRsh32x8(v *Value) bool {
6870 v_1 := v.Args[1]
6871 v_0 := v.Args[0]
6872 b := v.Block
6873 typ := &b.Func.Config.Types
6874
6875
6876 for {
6877 t := v.Type
6878 x := v_0
6879 y := v_1
6880 v.reset(OpMIPS64SRAV)
6881 v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
6882 v0.AddArg(x)
6883 v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
6884 v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
6885 v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
6886 v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
6887 v4.AddArg(y)
6888 v5 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6889 v5.AuxInt = int64ToAuxInt(63)
6890 v3.AddArg2(v4, v5)
6891 v2.AddArg(v3)
6892 v1.AddArg2(v2, v4)
6893 v.AddArg2(v0, v1)
6894 return true
6895 }
6896 }
6897 func rewriteValueMIPS64_OpRsh64Ux16(v *Value) bool {
6898 v_1 := v.Args[1]
6899 v_0 := v.Args[0]
6900 b := v.Block
6901 typ := &b.Func.Config.Types
6902
6903
6904 for {
6905 t := v.Type
6906 x := v_0
6907 y := v_1
6908 v.reset(OpMIPS64AND)
6909 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
6910 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
6911 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6912 v2.AuxInt = int64ToAuxInt(64)
6913 v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
6914 v3.AddArg(y)
6915 v1.AddArg2(v2, v3)
6916 v0.AddArg(v1)
6917 v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
6918 v4.AddArg2(x, v3)
6919 v.AddArg2(v0, v4)
6920 return true
6921 }
6922 }
6923 func rewriteValueMIPS64_OpRsh64Ux32(v *Value) bool {
6924 v_1 := v.Args[1]
6925 v_0 := v.Args[0]
6926 b := v.Block
6927 typ := &b.Func.Config.Types
6928
6929
6930 for {
6931 t := v.Type
6932 x := v_0
6933 y := v_1
6934 v.reset(OpMIPS64AND)
6935 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
6936 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
6937 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6938 v2.AuxInt = int64ToAuxInt(64)
6939 v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
6940 v3.AddArg(y)
6941 v1.AddArg2(v2, v3)
6942 v0.AddArg(v1)
6943 v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
6944 v4.AddArg2(x, v3)
6945 v.AddArg2(v0, v4)
6946 return true
6947 }
6948 }
6949 func rewriteValueMIPS64_OpRsh64Ux64(v *Value) bool {
6950 v_1 := v.Args[1]
6951 v_0 := v.Args[0]
6952 b := v.Block
6953 typ := &b.Func.Config.Types
6954
6955
6956 for {
6957 t := v.Type
6958 x := v_0
6959 y := v_1
6960 v.reset(OpMIPS64AND)
6961 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
6962 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
6963 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6964 v2.AuxInt = int64ToAuxInt(64)
6965 v1.AddArg2(v2, y)
6966 v0.AddArg(v1)
6967 v3 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
6968 v3.AddArg2(x, y)
6969 v.AddArg2(v0, v3)
6970 return true
6971 }
6972 }
6973 func rewriteValueMIPS64_OpRsh64Ux8(v *Value) bool {
6974 v_1 := v.Args[1]
6975 v_0 := v.Args[0]
6976 b := v.Block
6977 typ := &b.Func.Config.Types
6978
6979
6980 for {
6981 t := v.Type
6982 x := v_0
6983 y := v_1
6984 v.reset(OpMIPS64AND)
6985 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
6986 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
6987 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6988 v2.AuxInt = int64ToAuxInt(64)
6989 v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
6990 v3.AddArg(y)
6991 v1.AddArg2(v2, v3)
6992 v0.AddArg(v1)
6993 v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
6994 v4.AddArg2(x, v3)
6995 v.AddArg2(v0, v4)
6996 return true
6997 }
6998 }
6999 func rewriteValueMIPS64_OpRsh64x16(v *Value) bool {
7000 v_1 := v.Args[1]
7001 v_0 := v.Args[0]
7002 b := v.Block
7003 typ := &b.Func.Config.Types
7004
7005
7006 for {
7007 t := v.Type
7008 x := v_0
7009 y := v_1
7010 v.reset(OpMIPS64SRAV)
7011 v0 := b.NewValue0(v.Pos, OpMIPS64OR, t)
7012 v1 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
7013 v2 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
7014 v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
7015 v3.AddArg(y)
7016 v4 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
7017 v4.AuxInt = int64ToAuxInt(63)
7018 v2.AddArg2(v3, v4)
7019 v1.AddArg(v2)
7020 v0.AddArg2(v1, v3)
7021 v.AddArg2(x, v0)
7022 return true
7023 }
7024 }
7025 func rewriteValueMIPS64_OpRsh64x32(v *Value) bool {
7026 v_1 := v.Args[1]
7027 v_0 := v.Args[0]
7028 b := v.Block
7029 typ := &b.Func.Config.Types
7030
7031
7032 for {
7033 t := v.Type
7034 x := v_0
7035 y := v_1
7036 v.reset(OpMIPS64SRAV)
7037 v0 := b.NewValue0(v.Pos, OpMIPS64OR, t)
7038 v1 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
7039 v2 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
7040 v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
7041 v3.AddArg(y)
7042 v4 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
7043 v4.AuxInt = int64ToAuxInt(63)
7044 v2.AddArg2(v3, v4)
7045 v1.AddArg(v2)
7046 v0.AddArg2(v1, v3)
7047 v.AddArg2(x, v0)
7048 return true
7049 }
7050 }
7051 func rewriteValueMIPS64_OpRsh64x64(v *Value) bool {
7052 v_1 := v.Args[1]
7053 v_0 := v.Args[0]
7054 b := v.Block
7055 typ := &b.Func.Config.Types
7056
7057
7058 for {
7059 t := v.Type
7060 x := v_0
7061 y := v_1
7062 v.reset(OpMIPS64SRAV)
7063 v0 := b.NewValue0(v.Pos, OpMIPS64OR, t)
7064 v1 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
7065 v2 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
7066 v3 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
7067 v3.AuxInt = int64ToAuxInt(63)
7068 v2.AddArg2(y, v3)
7069 v1.AddArg(v2)
7070 v0.AddArg2(v1, y)
7071 v.AddArg2(x, v0)
7072 return true
7073 }
7074 }
7075 func rewriteValueMIPS64_OpRsh64x8(v *Value) bool {
7076 v_1 := v.Args[1]
7077 v_0 := v.Args[0]
7078 b := v.Block
7079 typ := &b.Func.Config.Types
7080
7081
7082 for {
7083 t := v.Type
7084 x := v_0
7085 y := v_1
7086 v.reset(OpMIPS64SRAV)
7087 v0 := b.NewValue0(v.Pos, OpMIPS64OR, t)
7088 v1 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
7089 v2 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
7090 v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
7091 v3.AddArg(y)
7092 v4 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
7093 v4.AuxInt = int64ToAuxInt(63)
7094 v2.AddArg2(v3, v4)
7095 v1.AddArg(v2)
7096 v0.AddArg2(v1, v3)
7097 v.AddArg2(x, v0)
7098 return true
7099 }
7100 }
7101 func rewriteValueMIPS64_OpRsh8Ux16(v *Value) bool {
7102 v_1 := v.Args[1]
7103 v_0 := v.Args[0]
7104 b := v.Block
7105 typ := &b.Func.Config.Types
7106
7107
7108 for {
7109 t := v.Type
7110 x := v_0
7111 y := v_1
7112 v.reset(OpMIPS64AND)
7113 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
7114 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
7115 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
7116 v2.AuxInt = int64ToAuxInt(64)
7117 v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
7118 v3.AddArg(y)
7119 v1.AddArg2(v2, v3)
7120 v0.AddArg(v1)
7121 v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
7122 v5 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
7123 v5.AddArg(x)
7124 v4.AddArg2(v5, v3)
7125 v.AddArg2(v0, v4)
7126 return true
7127 }
7128 }
7129 func rewriteValueMIPS64_OpRsh8Ux32(v *Value) bool {
7130 v_1 := v.Args[1]
7131 v_0 := v.Args[0]
7132 b := v.Block
7133 typ := &b.Func.Config.Types
7134
7135
7136 for {
7137 t := v.Type
7138 x := v_0
7139 y := v_1
7140 v.reset(OpMIPS64AND)
7141 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
7142 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
7143 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
7144 v2.AuxInt = int64ToAuxInt(64)
7145 v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
7146 v3.AddArg(y)
7147 v1.AddArg2(v2, v3)
7148 v0.AddArg(v1)
7149 v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
7150 v5 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
7151 v5.AddArg(x)
7152 v4.AddArg2(v5, v3)
7153 v.AddArg2(v0, v4)
7154 return true
7155 }
7156 }
7157 func rewriteValueMIPS64_OpRsh8Ux64(v *Value) bool {
7158 v_1 := v.Args[1]
7159 v_0 := v.Args[0]
7160 b := v.Block
7161 typ := &b.Func.Config.Types
7162
7163
7164 for {
7165 t := v.Type
7166 x := v_0
7167 y := v_1
7168 v.reset(OpMIPS64AND)
7169 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
7170 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
7171 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
7172 v2.AuxInt = int64ToAuxInt(64)
7173 v1.AddArg2(v2, y)
7174 v0.AddArg(v1)
7175 v3 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
7176 v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
7177 v4.AddArg(x)
7178 v3.AddArg2(v4, y)
7179 v.AddArg2(v0, v3)
7180 return true
7181 }
7182 }
7183 func rewriteValueMIPS64_OpRsh8Ux8(v *Value) bool {
7184 v_1 := v.Args[1]
7185 v_0 := v.Args[0]
7186 b := v.Block
7187 typ := &b.Func.Config.Types
7188
7189
7190 for {
7191 t := v.Type
7192 x := v_0
7193 y := v_1
7194 v.reset(OpMIPS64AND)
7195 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
7196 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
7197 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
7198 v2.AuxInt = int64ToAuxInt(64)
7199 v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
7200 v3.AddArg(y)
7201 v1.AddArg2(v2, v3)
7202 v0.AddArg(v1)
7203 v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
7204 v5 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
7205 v5.AddArg(x)
7206 v4.AddArg2(v5, v3)
7207 v.AddArg2(v0, v4)
7208 return true
7209 }
7210 }
7211 func rewriteValueMIPS64_OpRsh8x16(v *Value) bool {
7212 v_1 := v.Args[1]
7213 v_0 := v.Args[0]
7214 b := v.Block
7215 typ := &b.Func.Config.Types
7216
7217
7218 for {
7219 t := v.Type
7220 x := v_0
7221 y := v_1
7222 v.reset(OpMIPS64SRAV)
7223 v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
7224 v0.AddArg(x)
7225 v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
7226 v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
7227 v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
7228 v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
7229 v4.AddArg(y)
7230 v5 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
7231 v5.AuxInt = int64ToAuxInt(63)
7232 v3.AddArg2(v4, v5)
7233 v2.AddArg(v3)
7234 v1.AddArg2(v2, v4)
7235 v.AddArg2(v0, v1)
7236 return true
7237 }
7238 }
7239 func rewriteValueMIPS64_OpRsh8x32(v *Value) bool {
7240 v_1 := v.Args[1]
7241 v_0 := v.Args[0]
7242 b := v.Block
7243 typ := &b.Func.Config.Types
7244
7245
7246 for {
7247 t := v.Type
7248 x := v_0
7249 y := v_1
7250 v.reset(OpMIPS64SRAV)
7251 v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
7252 v0.AddArg(x)
7253 v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
7254 v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
7255 v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
7256 v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
7257 v4.AddArg(y)
7258 v5 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
7259 v5.AuxInt = int64ToAuxInt(63)
7260 v3.AddArg2(v4, v5)
7261 v2.AddArg(v3)
7262 v1.AddArg2(v2, v4)
7263 v.AddArg2(v0, v1)
7264 return true
7265 }
7266 }
7267 func rewriteValueMIPS64_OpRsh8x64(v *Value) bool {
7268 v_1 := v.Args[1]
7269 v_0 := v.Args[0]
7270 b := v.Block
7271 typ := &b.Func.Config.Types
7272
7273
7274 for {
7275 t := v.Type
7276 x := v_0
7277 y := v_1
7278 v.reset(OpMIPS64SRAV)
7279 v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
7280 v0.AddArg(x)
7281 v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
7282 v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
7283 v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
7284 v4 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
7285 v4.AuxInt = int64ToAuxInt(63)
7286 v3.AddArg2(y, v4)
7287 v2.AddArg(v3)
7288 v1.AddArg2(v2, y)
7289 v.AddArg2(v0, v1)
7290 return true
7291 }
7292 }
7293 func rewriteValueMIPS64_OpRsh8x8(v *Value) bool {
7294 v_1 := v.Args[1]
7295 v_0 := v.Args[0]
7296 b := v.Block
7297 typ := &b.Func.Config.Types
7298
7299
7300 for {
7301 t := v.Type
7302 x := v_0
7303 y := v_1
7304 v.reset(OpMIPS64SRAV)
7305 v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
7306 v0.AddArg(x)
7307 v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
7308 v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
7309 v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
7310 v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
7311 v4.AddArg(y)
7312 v5 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
7313 v5.AuxInt = int64ToAuxInt(63)
7314 v3.AddArg2(v4, v5)
7315 v2.AddArg(v3)
7316 v1.AddArg2(v2, v4)
7317 v.AddArg2(v0, v1)
7318 return true
7319 }
7320 }
7321 func rewriteValueMIPS64_OpSelect0(v *Value) bool {
7322 v_0 := v.Args[0]
7323 b := v.Block
7324 typ := &b.Func.Config.Types
7325
7326
7327 for {
7328 if v_0.Op != OpMul64uover {
7329 break
7330 }
7331 y := v_0.Args[1]
7332 x := v_0.Args[0]
7333 v.reset(OpSelect1)
7334 v.Type = typ.UInt64
7335 v0 := b.NewValue0(v.Pos, OpMIPS64MULVU, types.NewTuple(typ.UInt64, typ.UInt64))
7336 v0.AddArg2(x, y)
7337 v.AddArg(v0)
7338 return true
7339 }
7340
7341
7342 for {
7343 t := v.Type
7344 if v_0.Op != OpAdd64carry {
7345 break
7346 }
7347 c := v_0.Args[2]
7348 x := v_0.Args[0]
7349 y := v_0.Args[1]
7350 v.reset(OpMIPS64ADDV)
7351 v0 := b.NewValue0(v.Pos, OpMIPS64ADDV, t)
7352 v0.AddArg2(x, y)
7353 v.AddArg2(v0, c)
7354 return true
7355 }
7356
7357
7358 for {
7359 t := v.Type
7360 if v_0.Op != OpSub64borrow {
7361 break
7362 }
7363 c := v_0.Args[2]
7364 x := v_0.Args[0]
7365 y := v_0.Args[1]
7366 v.reset(OpMIPS64SUBV)
7367 v0 := b.NewValue0(v.Pos, OpMIPS64SUBV, t)
7368 v0.AddArg2(x, y)
7369 v.AddArg2(v0, c)
7370 return true
7371 }
7372
7373
7374 for {
7375 if v_0.Op != OpMIPS64DIVVU {
7376 break
7377 }
7378 _ = v_0.Args[1]
7379 v_0_1 := v_0.Args[1]
7380 if v_0_1.Op != OpMIPS64MOVVconst || auxIntToInt64(v_0_1.AuxInt) != 1 {
7381 break
7382 }
7383 v.reset(OpMIPS64MOVVconst)
7384 v.AuxInt = int64ToAuxInt(0)
7385 return true
7386 }
7387
7388
7389
7390 for {
7391 if v_0.Op != OpMIPS64DIVVU {
7392 break
7393 }
7394 _ = v_0.Args[1]
7395 x := v_0.Args[0]
7396 v_0_1 := v_0.Args[1]
7397 if v_0_1.Op != OpMIPS64MOVVconst {
7398 break
7399 }
7400 c := auxIntToInt64(v_0_1.AuxInt)
7401 if !(isPowerOfTwo64(c)) {
7402 break
7403 }
7404 v.reset(OpMIPS64ANDconst)
7405 v.AuxInt = int64ToAuxInt(c - 1)
7406 v.AddArg(x)
7407 return true
7408 }
7409
7410
7411
7412 for {
7413 if v_0.Op != OpMIPS64DIVV {
7414 break
7415 }
7416 _ = v_0.Args[1]
7417 v_0_0 := v_0.Args[0]
7418 if v_0_0.Op != OpMIPS64MOVVconst {
7419 break
7420 }
7421 c := auxIntToInt64(v_0_0.AuxInt)
7422 v_0_1 := v_0.Args[1]
7423 if v_0_1.Op != OpMIPS64MOVVconst {
7424 break
7425 }
7426 d := auxIntToInt64(v_0_1.AuxInt)
7427 if !(d != 0) {
7428 break
7429 }
7430 v.reset(OpMIPS64MOVVconst)
7431 v.AuxInt = int64ToAuxInt(c % d)
7432 return true
7433 }
7434
7435
7436
7437 for {
7438 if v_0.Op != OpMIPS64DIVVU {
7439 break
7440 }
7441 _ = v_0.Args[1]
7442 v_0_0 := v_0.Args[0]
7443 if v_0_0.Op != OpMIPS64MOVVconst {
7444 break
7445 }
7446 c := auxIntToInt64(v_0_0.AuxInt)
7447 v_0_1 := v_0.Args[1]
7448 if v_0_1.Op != OpMIPS64MOVVconst {
7449 break
7450 }
7451 d := auxIntToInt64(v_0_1.AuxInt)
7452 if !(d != 0) {
7453 break
7454 }
7455 v.reset(OpMIPS64MOVVconst)
7456 v.AuxInt = int64ToAuxInt(int64(uint64(c) % uint64(d)))
7457 return true
7458 }
7459 return false
7460 }
7461 func rewriteValueMIPS64_OpSelect1(v *Value) bool {
7462 v_0 := v.Args[0]
7463 b := v.Block
7464 typ := &b.Func.Config.Types
7465
7466
7467 for {
7468 if v_0.Op != OpMul64uover {
7469 break
7470 }
7471 y := v_0.Args[1]
7472 x := v_0.Args[0]
7473 v.reset(OpMIPS64SGTU)
7474 v.Type = typ.Bool
7475 v0 := b.NewValue0(v.Pos, OpSelect0, typ.UInt64)
7476 v1 := b.NewValue0(v.Pos, OpMIPS64MULVU, types.NewTuple(typ.UInt64, typ.UInt64))
7477 v1.AddArg2(x, y)
7478 v0.AddArg(v1)
7479 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
7480 v2.AuxInt = int64ToAuxInt(0)
7481 v.AddArg2(v0, v2)
7482 return true
7483 }
7484
7485
7486 for {
7487 t := v.Type
7488 if v_0.Op != OpAdd64carry {
7489 break
7490 }
7491 c := v_0.Args[2]
7492 x := v_0.Args[0]
7493 y := v_0.Args[1]
7494 v.reset(OpMIPS64OR)
7495 v0 := b.NewValue0(v.Pos, OpMIPS64SGTU, t)
7496 s := b.NewValue0(v.Pos, OpMIPS64ADDV, t)
7497 s.AddArg2(x, y)
7498 v0.AddArg2(x, s)
7499 v2 := b.NewValue0(v.Pos, OpMIPS64SGTU, t)
7500 v3 := b.NewValue0(v.Pos, OpMIPS64ADDV, t)
7501 v3.AddArg2(s, c)
7502 v2.AddArg2(s, v3)
7503 v.AddArg2(v0, v2)
7504 return true
7505 }
7506
7507
7508 for {
7509 t := v.Type
7510 if v_0.Op != OpSub64borrow {
7511 break
7512 }
7513 c := v_0.Args[2]
7514 x := v_0.Args[0]
7515 y := v_0.Args[1]
7516 v.reset(OpMIPS64OR)
7517 v0 := b.NewValue0(v.Pos, OpMIPS64SGTU, t)
7518 s := b.NewValue0(v.Pos, OpMIPS64SUBV, t)
7519 s.AddArg2(x, y)
7520 v0.AddArg2(s, x)
7521 v2 := b.NewValue0(v.Pos, OpMIPS64SGTU, t)
7522 v3 := b.NewValue0(v.Pos, OpMIPS64SUBV, t)
7523 v3.AddArg2(s, c)
7524 v2.AddArg2(v3, s)
7525 v.AddArg2(v0, v2)
7526 return true
7527 }
7528
7529
7530 for {
7531 if v_0.Op != OpMIPS64MULVU {
7532 break
7533 }
7534 _ = v_0.Args[1]
7535 v_0_0 := v_0.Args[0]
7536 v_0_1 := v_0.Args[1]
7537 for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
7538 x := v_0_0
7539 if v_0_1.Op != OpMIPS64MOVVconst || auxIntToInt64(v_0_1.AuxInt) != -1 {
7540 continue
7541 }
7542 v.reset(OpMIPS64NEGV)
7543 v.AddArg(x)
7544 return true
7545 }
7546 break
7547 }
7548
7549
7550 for {
7551 if v_0.Op != OpMIPS64MULVU {
7552 break
7553 }
7554 _ = v_0.Args[1]
7555 v_0_0 := v_0.Args[0]
7556 v_0_1 := v_0.Args[1]
7557 for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
7558 if v_0_1.Op != OpMIPS64MOVVconst || auxIntToInt64(v_0_1.AuxInt) != 0 {
7559 continue
7560 }
7561 v.reset(OpMIPS64MOVVconst)
7562 v.AuxInt = int64ToAuxInt(0)
7563 return true
7564 }
7565 break
7566 }
7567
7568
7569 for {
7570 if v_0.Op != OpMIPS64MULVU {
7571 break
7572 }
7573 _ = v_0.Args[1]
7574 v_0_0 := v_0.Args[0]
7575 v_0_1 := v_0.Args[1]
7576 for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
7577 x := v_0_0
7578 if v_0_1.Op != OpMIPS64MOVVconst || auxIntToInt64(v_0_1.AuxInt) != 1 {
7579 continue
7580 }
7581 v.copyOf(x)
7582 return true
7583 }
7584 break
7585 }
7586
7587
7588
7589 for {
7590 if v_0.Op != OpMIPS64MULVU {
7591 break
7592 }
7593 _ = v_0.Args[1]
7594 v_0_0 := v_0.Args[0]
7595 v_0_1 := v_0.Args[1]
7596 for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
7597 x := v_0_0
7598 if v_0_1.Op != OpMIPS64MOVVconst {
7599 continue
7600 }
7601 c := auxIntToInt64(v_0_1.AuxInt)
7602 if !(isPowerOfTwo64(c)) {
7603 continue
7604 }
7605 v.reset(OpMIPS64SLLVconst)
7606 v.AuxInt = int64ToAuxInt(log64(c))
7607 v.AddArg(x)
7608 return true
7609 }
7610 break
7611 }
7612
7613
7614 for {
7615 if v_0.Op != OpMIPS64DIVVU {
7616 break
7617 }
7618 _ = v_0.Args[1]
7619 x := v_0.Args[0]
7620 v_0_1 := v_0.Args[1]
7621 if v_0_1.Op != OpMIPS64MOVVconst || auxIntToInt64(v_0_1.AuxInt) != 1 {
7622 break
7623 }
7624 v.copyOf(x)
7625 return true
7626 }
7627
7628
7629
7630 for {
7631 if v_0.Op != OpMIPS64DIVVU {
7632 break
7633 }
7634 _ = v_0.Args[1]
7635 x := v_0.Args[0]
7636 v_0_1 := v_0.Args[1]
7637 if v_0_1.Op != OpMIPS64MOVVconst {
7638 break
7639 }
7640 c := auxIntToInt64(v_0_1.AuxInt)
7641 if !(isPowerOfTwo64(c)) {
7642 break
7643 }
7644 v.reset(OpMIPS64SRLVconst)
7645 v.AuxInt = int64ToAuxInt(log64(c))
7646 v.AddArg(x)
7647 return true
7648 }
7649
7650
7651 for {
7652 if v_0.Op != OpMIPS64MULVU {
7653 break
7654 }
7655 _ = v_0.Args[1]
7656 v_0_0 := v_0.Args[0]
7657 v_0_1 := v_0.Args[1]
7658 for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
7659 if v_0_0.Op != OpMIPS64MOVVconst {
7660 continue
7661 }
7662 c := auxIntToInt64(v_0_0.AuxInt)
7663 if v_0_1.Op != OpMIPS64MOVVconst {
7664 continue
7665 }
7666 d := auxIntToInt64(v_0_1.AuxInt)
7667 v.reset(OpMIPS64MOVVconst)
7668 v.AuxInt = int64ToAuxInt(c * d)
7669 return true
7670 }
7671 break
7672 }
7673
7674
7675
7676 for {
7677 if v_0.Op != OpMIPS64DIVV {
7678 break
7679 }
7680 _ = v_0.Args[1]
7681 v_0_0 := v_0.Args[0]
7682 if v_0_0.Op != OpMIPS64MOVVconst {
7683 break
7684 }
7685 c := auxIntToInt64(v_0_0.AuxInt)
7686 v_0_1 := v_0.Args[1]
7687 if v_0_1.Op != OpMIPS64MOVVconst {
7688 break
7689 }
7690 d := auxIntToInt64(v_0_1.AuxInt)
7691 if !(d != 0) {
7692 break
7693 }
7694 v.reset(OpMIPS64MOVVconst)
7695 v.AuxInt = int64ToAuxInt(c / d)
7696 return true
7697 }
7698
7699
7700
7701 for {
7702 if v_0.Op != OpMIPS64DIVVU {
7703 break
7704 }
7705 _ = v_0.Args[1]
7706 v_0_0 := v_0.Args[0]
7707 if v_0_0.Op != OpMIPS64MOVVconst {
7708 break
7709 }
7710 c := auxIntToInt64(v_0_0.AuxInt)
7711 v_0_1 := v_0.Args[1]
7712 if v_0_1.Op != OpMIPS64MOVVconst {
7713 break
7714 }
7715 d := auxIntToInt64(v_0_1.AuxInt)
7716 if !(d != 0) {
7717 break
7718 }
7719 v.reset(OpMIPS64MOVVconst)
7720 v.AuxInt = int64ToAuxInt(int64(uint64(c) / uint64(d)))
7721 return true
7722 }
7723 return false
7724 }
7725 func rewriteValueMIPS64_OpSlicemask(v *Value) bool {
7726 v_0 := v.Args[0]
7727 b := v.Block
7728
7729
7730 for {
7731 t := v.Type
7732 x := v_0
7733 v.reset(OpMIPS64SRAVconst)
7734 v.AuxInt = int64ToAuxInt(63)
7735 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
7736 v0.AddArg(x)
7737 v.AddArg(v0)
7738 return true
7739 }
7740 }
7741 func rewriteValueMIPS64_OpStore(v *Value) bool {
7742 v_2 := v.Args[2]
7743 v_1 := v.Args[1]
7744 v_0 := v.Args[0]
7745
7746
7747
7748 for {
7749 t := auxToType(v.Aux)
7750 ptr := v_0
7751 val := v_1
7752 mem := v_2
7753 if !(t.Size() == 1) {
7754 break
7755 }
7756 v.reset(OpMIPS64MOVBstore)
7757 v.AddArg3(ptr, val, mem)
7758 return true
7759 }
7760
7761
7762
7763 for {
7764 t := auxToType(v.Aux)
7765 ptr := v_0
7766 val := v_1
7767 mem := v_2
7768 if !(t.Size() == 2) {
7769 break
7770 }
7771 v.reset(OpMIPS64MOVHstore)
7772 v.AddArg3(ptr, val, mem)
7773 return true
7774 }
7775
7776
7777
7778 for {
7779 t := auxToType(v.Aux)
7780 ptr := v_0
7781 val := v_1
7782 mem := v_2
7783 if !(t.Size() == 4 && !t.IsFloat()) {
7784 break
7785 }
7786 v.reset(OpMIPS64MOVWstore)
7787 v.AddArg3(ptr, val, mem)
7788 return true
7789 }
7790
7791
7792
7793 for {
7794 t := auxToType(v.Aux)
7795 ptr := v_0
7796 val := v_1
7797 mem := v_2
7798 if !(t.Size() == 8 && !t.IsFloat()) {
7799 break
7800 }
7801 v.reset(OpMIPS64MOVVstore)
7802 v.AddArg3(ptr, val, mem)
7803 return true
7804 }
7805
7806
7807
7808 for {
7809 t := auxToType(v.Aux)
7810 ptr := v_0
7811 val := v_1
7812 mem := v_2
7813 if !(t.Size() == 4 && t.IsFloat()) {
7814 break
7815 }
7816 v.reset(OpMIPS64MOVFstore)
7817 v.AddArg3(ptr, val, mem)
7818 return true
7819 }
7820
7821
7822
7823 for {
7824 t := auxToType(v.Aux)
7825 ptr := v_0
7826 val := v_1
7827 mem := v_2
7828 if !(t.Size() == 8 && t.IsFloat()) {
7829 break
7830 }
7831 v.reset(OpMIPS64MOVDstore)
7832 v.AddArg3(ptr, val, mem)
7833 return true
7834 }
7835 return false
7836 }
7837 func rewriteValueMIPS64_OpZero(v *Value) bool {
7838 v_1 := v.Args[1]
7839 v_0 := v.Args[0]
7840 b := v.Block
7841 config := b.Func.Config
7842 typ := &b.Func.Config.Types
7843
7844
7845 for {
7846 if auxIntToInt64(v.AuxInt) != 0 {
7847 break
7848 }
7849 mem := v_1
7850 v.copyOf(mem)
7851 return true
7852 }
7853
7854
7855 for {
7856 if auxIntToInt64(v.AuxInt) != 1 {
7857 break
7858 }
7859 ptr := v_0
7860 mem := v_1
7861 v.reset(OpMIPS64MOVBstore)
7862 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
7863 v0.AuxInt = int64ToAuxInt(0)
7864 v.AddArg3(ptr, v0, mem)
7865 return true
7866 }
7867
7868
7869
7870 for {
7871 if auxIntToInt64(v.AuxInt) != 2 {
7872 break
7873 }
7874 t := auxToType(v.Aux)
7875 ptr := v_0
7876 mem := v_1
7877 if !(t.Alignment()%2 == 0) {
7878 break
7879 }
7880 v.reset(OpMIPS64MOVHstore)
7881 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
7882 v0.AuxInt = int64ToAuxInt(0)
7883 v.AddArg3(ptr, v0, mem)
7884 return true
7885 }
7886
7887
7888 for {
7889 if auxIntToInt64(v.AuxInt) != 2 {
7890 break
7891 }
7892 ptr := v_0
7893 mem := v_1
7894 v.reset(OpMIPS64MOVBstore)
7895 v.AuxInt = int32ToAuxInt(1)
7896 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
7897 v0.AuxInt = int64ToAuxInt(0)
7898 v1 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem)
7899 v1.AuxInt = int32ToAuxInt(0)
7900 v1.AddArg3(ptr, v0, mem)
7901 v.AddArg3(ptr, v0, v1)
7902 return true
7903 }
7904
7905
7906
7907 for {
7908 if auxIntToInt64(v.AuxInt) != 4 {
7909 break
7910 }
7911 t := auxToType(v.Aux)
7912 ptr := v_0
7913 mem := v_1
7914 if !(t.Alignment()%4 == 0) {
7915 break
7916 }
7917 v.reset(OpMIPS64MOVWstore)
7918 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
7919 v0.AuxInt = int64ToAuxInt(0)
7920 v.AddArg3(ptr, v0, mem)
7921 return true
7922 }
7923
7924
7925
7926 for {
7927 if auxIntToInt64(v.AuxInt) != 4 {
7928 break
7929 }
7930 t := auxToType(v.Aux)
7931 ptr := v_0
7932 mem := v_1
7933 if !(t.Alignment()%2 == 0) {
7934 break
7935 }
7936 v.reset(OpMIPS64MOVHstore)
7937 v.AuxInt = int32ToAuxInt(2)
7938 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
7939 v0.AuxInt = int64ToAuxInt(0)
7940 v1 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem)
7941 v1.AuxInt = int32ToAuxInt(0)
7942 v1.AddArg3(ptr, v0, mem)
7943 v.AddArg3(ptr, v0, v1)
7944 return true
7945 }
7946
7947
7948 for {
7949 if auxIntToInt64(v.AuxInt) != 4 {
7950 break
7951 }
7952 ptr := v_0
7953 mem := v_1
7954 v.reset(OpMIPS64MOVBstore)
7955 v.AuxInt = int32ToAuxInt(3)
7956 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
7957 v0.AuxInt = int64ToAuxInt(0)
7958 v1 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem)
7959 v1.AuxInt = int32ToAuxInt(2)
7960 v2 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem)
7961 v2.AuxInt = int32ToAuxInt(1)
7962 v3 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem)
7963 v3.AuxInt = int32ToAuxInt(0)
7964 v3.AddArg3(ptr, v0, mem)
7965 v2.AddArg3(ptr, v0, v3)
7966 v1.AddArg3(ptr, v0, v2)
7967 v.AddArg3(ptr, v0, v1)
7968 return true
7969 }
7970
7971
7972
7973 for {
7974 if auxIntToInt64(v.AuxInt) != 8 {
7975 break
7976 }
7977 t := auxToType(v.Aux)
7978 ptr := v_0
7979 mem := v_1
7980 if !(t.Alignment()%8 == 0) {
7981 break
7982 }
7983 v.reset(OpMIPS64MOVVstore)
7984 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
7985 v0.AuxInt = int64ToAuxInt(0)
7986 v.AddArg3(ptr, v0, mem)
7987 return true
7988 }
7989
7990
7991
7992 for {
7993 if auxIntToInt64(v.AuxInt) != 8 {
7994 break
7995 }
7996 t := auxToType(v.Aux)
7997 ptr := v_0
7998 mem := v_1
7999 if !(t.Alignment()%4 == 0) {
8000 break
8001 }
8002 v.reset(OpMIPS64MOVWstore)
8003 v.AuxInt = int32ToAuxInt(4)
8004 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
8005 v0.AuxInt = int64ToAuxInt(0)
8006 v1 := b.NewValue0(v.Pos, OpMIPS64MOVWstore, types.TypeMem)
8007 v1.AuxInt = int32ToAuxInt(0)
8008 v1.AddArg3(ptr, v0, mem)
8009 v.AddArg3(ptr, v0, v1)
8010 return true
8011 }
8012
8013
8014
8015 for {
8016 if auxIntToInt64(v.AuxInt) != 8 {
8017 break
8018 }
8019 t := auxToType(v.Aux)
8020 ptr := v_0
8021 mem := v_1
8022 if !(t.Alignment()%2 == 0) {
8023 break
8024 }
8025 v.reset(OpMIPS64MOVHstore)
8026 v.AuxInt = int32ToAuxInt(6)
8027 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
8028 v0.AuxInt = int64ToAuxInt(0)
8029 v1 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem)
8030 v1.AuxInt = int32ToAuxInt(4)
8031 v2 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem)
8032 v2.AuxInt = int32ToAuxInt(2)
8033 v3 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem)
8034 v3.AuxInt = int32ToAuxInt(0)
8035 v3.AddArg3(ptr, v0, mem)
8036 v2.AddArg3(ptr, v0, v3)
8037 v1.AddArg3(ptr, v0, v2)
8038 v.AddArg3(ptr, v0, v1)
8039 return true
8040 }
8041
8042
8043 for {
8044 if auxIntToInt64(v.AuxInt) != 3 {
8045 break
8046 }
8047 ptr := v_0
8048 mem := v_1
8049 v.reset(OpMIPS64MOVBstore)
8050 v.AuxInt = int32ToAuxInt(2)
8051 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
8052 v0.AuxInt = int64ToAuxInt(0)
8053 v1 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem)
8054 v1.AuxInt = int32ToAuxInt(1)
8055 v2 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem)
8056 v2.AuxInt = int32ToAuxInt(0)
8057 v2.AddArg3(ptr, v0, mem)
8058 v1.AddArg3(ptr, v0, v2)
8059 v.AddArg3(ptr, v0, v1)
8060 return true
8061 }
8062
8063
8064
8065 for {
8066 if auxIntToInt64(v.AuxInt) != 6 {
8067 break
8068 }
8069 t := auxToType(v.Aux)
8070 ptr := v_0
8071 mem := v_1
8072 if !(t.Alignment()%2 == 0) {
8073 break
8074 }
8075 v.reset(OpMIPS64MOVHstore)
8076 v.AuxInt = int32ToAuxInt(4)
8077 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
8078 v0.AuxInt = int64ToAuxInt(0)
8079 v1 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem)
8080 v1.AuxInt = int32ToAuxInt(2)
8081 v2 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem)
8082 v2.AuxInt = int32ToAuxInt(0)
8083 v2.AddArg3(ptr, v0, mem)
8084 v1.AddArg3(ptr, v0, v2)
8085 v.AddArg3(ptr, v0, v1)
8086 return true
8087 }
8088
8089
8090
8091 for {
8092 if auxIntToInt64(v.AuxInt) != 12 {
8093 break
8094 }
8095 t := auxToType(v.Aux)
8096 ptr := v_0
8097 mem := v_1
8098 if !(t.Alignment()%4 == 0) {
8099 break
8100 }
8101 v.reset(OpMIPS64MOVWstore)
8102 v.AuxInt = int32ToAuxInt(8)
8103 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
8104 v0.AuxInt = int64ToAuxInt(0)
8105 v1 := b.NewValue0(v.Pos, OpMIPS64MOVWstore, types.TypeMem)
8106 v1.AuxInt = int32ToAuxInt(4)
8107 v2 := b.NewValue0(v.Pos, OpMIPS64MOVWstore, types.TypeMem)
8108 v2.AuxInt = int32ToAuxInt(0)
8109 v2.AddArg3(ptr, v0, mem)
8110 v1.AddArg3(ptr, v0, v2)
8111 v.AddArg3(ptr, v0, v1)
8112 return true
8113 }
8114
8115
8116
8117 for {
8118 if auxIntToInt64(v.AuxInt) != 16 {
8119 break
8120 }
8121 t := auxToType(v.Aux)
8122 ptr := v_0
8123 mem := v_1
8124 if !(t.Alignment()%8 == 0) {
8125 break
8126 }
8127 v.reset(OpMIPS64MOVVstore)
8128 v.AuxInt = int32ToAuxInt(8)
8129 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
8130 v0.AuxInt = int64ToAuxInt(0)
8131 v1 := b.NewValue0(v.Pos, OpMIPS64MOVVstore, types.TypeMem)
8132 v1.AuxInt = int32ToAuxInt(0)
8133 v1.AddArg3(ptr, v0, mem)
8134 v.AddArg3(ptr, v0, v1)
8135 return true
8136 }
8137
8138
8139
8140 for {
8141 if auxIntToInt64(v.AuxInt) != 24 {
8142 break
8143 }
8144 t := auxToType(v.Aux)
8145 ptr := v_0
8146 mem := v_1
8147 if !(t.Alignment()%8 == 0) {
8148 break
8149 }
8150 v.reset(OpMIPS64MOVVstore)
8151 v.AuxInt = int32ToAuxInt(16)
8152 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
8153 v0.AuxInt = int64ToAuxInt(0)
8154 v1 := b.NewValue0(v.Pos, OpMIPS64MOVVstore, types.TypeMem)
8155 v1.AuxInt = int32ToAuxInt(8)
8156 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVstore, types.TypeMem)
8157 v2.AuxInt = int32ToAuxInt(0)
8158 v2.AddArg3(ptr, v0, mem)
8159 v1.AddArg3(ptr, v0, v2)
8160 v.AddArg3(ptr, v0, v1)
8161 return true
8162 }
8163
8164
8165
8166 for {
8167 s := auxIntToInt64(v.AuxInt)
8168 t := auxToType(v.Aux)
8169 ptr := v_0
8170 mem := v_1
8171 if !(s%8 == 0 && s > 24 && s <= 8*128 && t.Alignment()%8 == 0 && !config.noDuffDevice) {
8172 break
8173 }
8174 v.reset(OpMIPS64DUFFZERO)
8175 v.AuxInt = int64ToAuxInt(8 * (128 - s/8))
8176 v.AddArg2(ptr, mem)
8177 return true
8178 }
8179
8180
8181
8182 for {
8183 s := auxIntToInt64(v.AuxInt)
8184 t := auxToType(v.Aux)
8185 ptr := v_0
8186 mem := v_1
8187 if !((s > 8*128 || config.noDuffDevice) || t.Alignment()%8 != 0) {
8188 break
8189 }
8190 v.reset(OpMIPS64LoweredZero)
8191 v.AuxInt = int64ToAuxInt(t.Alignment())
8192 v0 := b.NewValue0(v.Pos, OpMIPS64ADDVconst, ptr.Type)
8193 v0.AuxInt = int64ToAuxInt(s - moveSize(t.Alignment(), config))
8194 v0.AddArg(ptr)
8195 v.AddArg3(ptr, v0, mem)
8196 return true
8197 }
8198 return false
8199 }
8200 func rewriteBlockMIPS64(b *Block) bool {
8201 switch b.Kind {
8202 case BlockMIPS64EQ:
8203
8204
8205 for b.Controls[0].Op == OpMIPS64FPFlagTrue {
8206 v_0 := b.Controls[0]
8207 cmp := v_0.Args[0]
8208 b.resetWithControl(BlockMIPS64FPF, cmp)
8209 return true
8210 }
8211
8212
8213 for b.Controls[0].Op == OpMIPS64FPFlagFalse {
8214 v_0 := b.Controls[0]
8215 cmp := v_0.Args[0]
8216 b.resetWithControl(BlockMIPS64FPT, cmp)
8217 return true
8218 }
8219
8220
8221 for b.Controls[0].Op == OpMIPS64XORconst {
8222 v_0 := b.Controls[0]
8223 if auxIntToInt64(v_0.AuxInt) != 1 {
8224 break
8225 }
8226 cmp := v_0.Args[0]
8227 if cmp.Op != OpMIPS64SGT {
8228 break
8229 }
8230 b.resetWithControl(BlockMIPS64NE, cmp)
8231 return true
8232 }
8233
8234
8235 for b.Controls[0].Op == OpMIPS64XORconst {
8236 v_0 := b.Controls[0]
8237 if auxIntToInt64(v_0.AuxInt) != 1 {
8238 break
8239 }
8240 cmp := v_0.Args[0]
8241 if cmp.Op != OpMIPS64SGTU {
8242 break
8243 }
8244 b.resetWithControl(BlockMIPS64NE, cmp)
8245 return true
8246 }
8247
8248
8249 for b.Controls[0].Op == OpMIPS64XORconst {
8250 v_0 := b.Controls[0]
8251 if auxIntToInt64(v_0.AuxInt) != 1 {
8252 break
8253 }
8254 cmp := v_0.Args[0]
8255 if cmp.Op != OpMIPS64SGTconst {
8256 break
8257 }
8258 b.resetWithControl(BlockMIPS64NE, cmp)
8259 return true
8260 }
8261
8262
8263 for b.Controls[0].Op == OpMIPS64XORconst {
8264 v_0 := b.Controls[0]
8265 if auxIntToInt64(v_0.AuxInt) != 1 {
8266 break
8267 }
8268 cmp := v_0.Args[0]
8269 if cmp.Op != OpMIPS64SGTUconst {
8270 break
8271 }
8272 b.resetWithControl(BlockMIPS64NE, cmp)
8273 return true
8274 }
8275
8276
8277 for b.Controls[0].Op == OpMIPS64SGTUconst {
8278 v_0 := b.Controls[0]
8279 if auxIntToInt64(v_0.AuxInt) != 1 {
8280 break
8281 }
8282 x := v_0.Args[0]
8283 b.resetWithControl(BlockMIPS64NE, x)
8284 return true
8285 }
8286
8287
8288 for b.Controls[0].Op == OpMIPS64SGTU {
8289 v_0 := b.Controls[0]
8290 _ = v_0.Args[1]
8291 x := v_0.Args[0]
8292 v_0_1 := v_0.Args[1]
8293 if v_0_1.Op != OpMIPS64MOVVconst || auxIntToInt64(v_0_1.AuxInt) != 0 {
8294 break
8295 }
8296 b.resetWithControl(BlockMIPS64EQ, x)
8297 return true
8298 }
8299
8300
8301 for b.Controls[0].Op == OpMIPS64SGTconst {
8302 v_0 := b.Controls[0]
8303 if auxIntToInt64(v_0.AuxInt) != 0 {
8304 break
8305 }
8306 x := v_0.Args[0]
8307 b.resetWithControl(BlockMIPS64GEZ, x)
8308 return true
8309 }
8310
8311
8312 for b.Controls[0].Op == OpMIPS64SGT {
8313 v_0 := b.Controls[0]
8314 _ = v_0.Args[1]
8315 x := v_0.Args[0]
8316 v_0_1 := v_0.Args[1]
8317 if v_0_1.Op != OpMIPS64MOVVconst || auxIntToInt64(v_0_1.AuxInt) != 0 {
8318 break
8319 }
8320 b.resetWithControl(BlockMIPS64LEZ, x)
8321 return true
8322 }
8323
8324
8325 for b.Controls[0].Op == OpMIPS64MOVVconst {
8326 v_0 := b.Controls[0]
8327 if auxIntToInt64(v_0.AuxInt) != 0 {
8328 break
8329 }
8330 b.Reset(BlockFirst)
8331 return true
8332 }
8333
8334
8335
8336 for b.Controls[0].Op == OpMIPS64MOVVconst {
8337 v_0 := b.Controls[0]
8338 c := auxIntToInt64(v_0.AuxInt)
8339 if !(c != 0) {
8340 break
8341 }
8342 b.Reset(BlockFirst)
8343 b.swapSuccessors()
8344 return true
8345 }
8346 case BlockMIPS64GEZ:
8347
8348
8349
8350 for b.Controls[0].Op == OpMIPS64MOVVconst {
8351 v_0 := b.Controls[0]
8352 c := auxIntToInt64(v_0.AuxInt)
8353 if !(c >= 0) {
8354 break
8355 }
8356 b.Reset(BlockFirst)
8357 return true
8358 }
8359
8360
8361
8362 for b.Controls[0].Op == OpMIPS64MOVVconst {
8363 v_0 := b.Controls[0]
8364 c := auxIntToInt64(v_0.AuxInt)
8365 if !(c < 0) {
8366 break
8367 }
8368 b.Reset(BlockFirst)
8369 b.swapSuccessors()
8370 return true
8371 }
8372 case BlockMIPS64GTZ:
8373
8374
8375
8376 for b.Controls[0].Op == OpMIPS64MOVVconst {
8377 v_0 := b.Controls[0]
8378 c := auxIntToInt64(v_0.AuxInt)
8379 if !(c > 0) {
8380 break
8381 }
8382 b.Reset(BlockFirst)
8383 return true
8384 }
8385
8386
8387
8388 for b.Controls[0].Op == OpMIPS64MOVVconst {
8389 v_0 := b.Controls[0]
8390 c := auxIntToInt64(v_0.AuxInt)
8391 if !(c <= 0) {
8392 break
8393 }
8394 b.Reset(BlockFirst)
8395 b.swapSuccessors()
8396 return true
8397 }
8398 case BlockIf:
8399
8400
8401 for {
8402 cond := b.Controls[0]
8403 b.resetWithControl(BlockMIPS64NE, cond)
8404 return true
8405 }
8406 case BlockMIPS64LEZ:
8407
8408
8409
8410 for b.Controls[0].Op == OpMIPS64MOVVconst {
8411 v_0 := b.Controls[0]
8412 c := auxIntToInt64(v_0.AuxInt)
8413 if !(c <= 0) {
8414 break
8415 }
8416 b.Reset(BlockFirst)
8417 return true
8418 }
8419
8420
8421
8422 for b.Controls[0].Op == OpMIPS64MOVVconst {
8423 v_0 := b.Controls[0]
8424 c := auxIntToInt64(v_0.AuxInt)
8425 if !(c > 0) {
8426 break
8427 }
8428 b.Reset(BlockFirst)
8429 b.swapSuccessors()
8430 return true
8431 }
8432 case BlockMIPS64LTZ:
8433
8434
8435
8436 for b.Controls[0].Op == OpMIPS64MOVVconst {
8437 v_0 := b.Controls[0]
8438 c := auxIntToInt64(v_0.AuxInt)
8439 if !(c < 0) {
8440 break
8441 }
8442 b.Reset(BlockFirst)
8443 return true
8444 }
8445
8446
8447
8448 for b.Controls[0].Op == OpMIPS64MOVVconst {
8449 v_0 := b.Controls[0]
8450 c := auxIntToInt64(v_0.AuxInt)
8451 if !(c >= 0) {
8452 break
8453 }
8454 b.Reset(BlockFirst)
8455 b.swapSuccessors()
8456 return true
8457 }
8458 case BlockMIPS64NE:
8459
8460
8461 for b.Controls[0].Op == OpMIPS64FPFlagTrue {
8462 v_0 := b.Controls[0]
8463 cmp := v_0.Args[0]
8464 b.resetWithControl(BlockMIPS64FPT, cmp)
8465 return true
8466 }
8467
8468
8469 for b.Controls[0].Op == OpMIPS64FPFlagFalse {
8470 v_0 := b.Controls[0]
8471 cmp := v_0.Args[0]
8472 b.resetWithControl(BlockMIPS64FPF, cmp)
8473 return true
8474 }
8475
8476
8477 for b.Controls[0].Op == OpMIPS64XORconst {
8478 v_0 := b.Controls[0]
8479 if auxIntToInt64(v_0.AuxInt) != 1 {
8480 break
8481 }
8482 cmp := v_0.Args[0]
8483 if cmp.Op != OpMIPS64SGT {
8484 break
8485 }
8486 b.resetWithControl(BlockMIPS64EQ, cmp)
8487 return true
8488 }
8489
8490
8491 for b.Controls[0].Op == OpMIPS64XORconst {
8492 v_0 := b.Controls[0]
8493 if auxIntToInt64(v_0.AuxInt) != 1 {
8494 break
8495 }
8496 cmp := v_0.Args[0]
8497 if cmp.Op != OpMIPS64SGTU {
8498 break
8499 }
8500 b.resetWithControl(BlockMIPS64EQ, cmp)
8501 return true
8502 }
8503
8504
8505 for b.Controls[0].Op == OpMIPS64XORconst {
8506 v_0 := b.Controls[0]
8507 if auxIntToInt64(v_0.AuxInt) != 1 {
8508 break
8509 }
8510 cmp := v_0.Args[0]
8511 if cmp.Op != OpMIPS64SGTconst {
8512 break
8513 }
8514 b.resetWithControl(BlockMIPS64EQ, cmp)
8515 return true
8516 }
8517
8518
8519 for b.Controls[0].Op == OpMIPS64XORconst {
8520 v_0 := b.Controls[0]
8521 if auxIntToInt64(v_0.AuxInt) != 1 {
8522 break
8523 }
8524 cmp := v_0.Args[0]
8525 if cmp.Op != OpMIPS64SGTUconst {
8526 break
8527 }
8528 b.resetWithControl(BlockMIPS64EQ, cmp)
8529 return true
8530 }
8531
8532
8533 for b.Controls[0].Op == OpMIPS64SGTUconst {
8534 v_0 := b.Controls[0]
8535 if auxIntToInt64(v_0.AuxInt) != 1 {
8536 break
8537 }
8538 x := v_0.Args[0]
8539 b.resetWithControl(BlockMIPS64EQ, x)
8540 return true
8541 }
8542
8543
8544 for b.Controls[0].Op == OpMIPS64SGTU {
8545 v_0 := b.Controls[0]
8546 _ = v_0.Args[1]
8547 x := v_0.Args[0]
8548 v_0_1 := v_0.Args[1]
8549 if v_0_1.Op != OpMIPS64MOVVconst || auxIntToInt64(v_0_1.AuxInt) != 0 {
8550 break
8551 }
8552 b.resetWithControl(BlockMIPS64NE, x)
8553 return true
8554 }
8555
8556
8557 for b.Controls[0].Op == OpMIPS64SGTconst {
8558 v_0 := b.Controls[0]
8559 if auxIntToInt64(v_0.AuxInt) != 0 {
8560 break
8561 }
8562 x := v_0.Args[0]
8563 b.resetWithControl(BlockMIPS64LTZ, x)
8564 return true
8565 }
8566
8567
8568 for b.Controls[0].Op == OpMIPS64SGT {
8569 v_0 := b.Controls[0]
8570 _ = v_0.Args[1]
8571 x := v_0.Args[0]
8572 v_0_1 := v_0.Args[1]
8573 if v_0_1.Op != OpMIPS64MOVVconst || auxIntToInt64(v_0_1.AuxInt) != 0 {
8574 break
8575 }
8576 b.resetWithControl(BlockMIPS64GTZ, x)
8577 return true
8578 }
8579
8580
8581 for b.Controls[0].Op == OpMIPS64MOVVconst {
8582 v_0 := b.Controls[0]
8583 if auxIntToInt64(v_0.AuxInt) != 0 {
8584 break
8585 }
8586 b.Reset(BlockFirst)
8587 b.swapSuccessors()
8588 return true
8589 }
8590
8591
8592
8593 for b.Controls[0].Op == OpMIPS64MOVVconst {
8594 v_0 := b.Controls[0]
8595 c := auxIntToInt64(v_0.AuxInt)
8596 if !(c != 0) {
8597 break
8598 }
8599 b.Reset(BlockFirst)
8600 return true
8601 }
8602 }
8603 return false
8604 }
8605
View as plain text