Lines Matching full:n

31   asm volatile("{\n\t"  in S4_storerhnew_rr()
32 " r0 = %0\n\n" in S4_storerhnew_rr()
33 " memh(%1+%2<<#2) = r0.new\n\t" in S4_storerhnew_rr()
34 "}\n" in S4_storerhnew_rr()
43 asm volatile("{\n\t" in S4_storerbnew_ap()
44 " r0 = %1\n\n" in S4_storerbnew_ap()
45 " memb(%0 = ##data) = r0.new\n\t" in S4_storerbnew_ap()
46 "}\n" in S4_storerbnew_ap()
56 asm volatile("{\n\t" in S4_storerhnew_ap()
57 " r0 = %1\n\n" in S4_storerhnew_ap()
58 " memh(%0 = ##data) = r0.new\n\t" in S4_storerhnew_ap()
59 "}\n" in S4_storerhnew_ap()
69 asm volatile("{\n\t" in S4_storerinew_ap()
70 " r0 = %1\n\n" in S4_storerinew_ap()
71 " memw(%0 = ##data) = r0.new\n\t" in S4_storerinew_ap()
72 "}\n" in S4_storerinew_ap()
81 asm volatile("p0 = cmp.eq(%0, #1)\n\t" in S4_storeirbt_io()
82 "if (p0) memb(%1+#4)=#27\n\t" in S4_storeirbt_io()
89 asm volatile("p0 = cmp.eq(%0, #1)\n\t" in S4_storeirbf_io()
90 "if (!p0) memb(%1+#4)=#27\n\t" in S4_storeirbf_io()
97 asm volatile("{\n\t" in S4_storeirbtnew_io()
98 " p0 = cmp.eq(%0, #1)\n\t" in S4_storeirbtnew_io()
99 " if (p0.new) memb(%1+#4)=#27\n\t" in S4_storeirbtnew_io()
100 "}\n\t" in S4_storeirbtnew_io()
107 asm volatile("{\n\t" in S4_storeirbfnew_io()
108 " p0 = cmp.eq(%0, #1)\n\t" in S4_storeirbfnew_io()
109 " if (!p0.new) memb(%1+#4)=#27\n\t" in S4_storeirbfnew_io()
110 "}\n\t" in S4_storeirbfnew_io()
117 asm volatile("p0 = cmp.eq(%0, #1)\n\t" in S4_storeirht_io()
118 "if (p0) memh(%1+#4)=#27\n\t" in S4_storeirht_io()
125 asm volatile("p0 = cmp.eq(%0, #1)\n\t" in S4_storeirhf_io()
126 "if (!p0) memh(%1+#4)=#27\n\t" in S4_storeirhf_io()
133 asm volatile("{\n\t" in S4_storeirhtnew_io()
134 " p0 = cmp.eq(%0, #1)\n\t" in S4_storeirhtnew_io()
135 " if (p0.new) memh(%1+#4)=#27\n\t" in S4_storeirhtnew_io()
136 "}\n\t" in S4_storeirhtnew_io()
143 asm volatile("{\n\t" in S4_storeirhfnew_io()
144 " p0 = cmp.eq(%0, #1)\n\t" in S4_storeirhfnew_io()
145 " if (!p0.new) memh(%1+#4)=#27\n\t" in S4_storeirhfnew_io()
146 "}\n\t" in S4_storeirhfnew_io()
153 asm volatile("p0 = cmp.eq(%0, #1)\n\t" in S4_storeirit_io()
154 "if (p0) memw(%1+#4)=#27\n\t" in S4_storeirit_io()
161 asm volatile("p0 = cmp.eq(%0, #1)\n\t" in S4_storeirif_io()
162 "if (!p0) memw(%1+#4)=#27\n\t" in S4_storeirif_io()
169 asm volatile("{\n\t" in S4_storeiritnew_io()
170 " p0 = cmp.eq(%0, #1)\n\t" in S4_storeiritnew_io()
171 " if (p0.new) memw(%1+#4)=#27\n\t" in S4_storeiritnew_io()
172 "}\n\t" in S4_storeiritnew_io()
179 asm volatile("{\n\t" in S4_storeirifnew_io()
180 " p0 = cmp.eq(%0, #1)\n\t" in S4_storeirifnew_io()
181 " if (!p0.new) memw(%1+#4)=#27\n\t" in S4_storeirifnew_io()
182 "}\n\t" in S4_storeirifnew_io()
190 asm volatile("%0 = #31\n\t" in L2_ploadrifnew_pi()
191 "{\n\t" in L2_ploadrifnew_pi()
192 " p0 = cmp.eq(%2, #1)\n\t" in L2_ploadrifnew_pi()
193 " if (!p0.new) %0 = memw(%1++#4)\n\t" in L2_ploadrifnew_pi()
194 "}\n\t" in L2_ploadrifnew_pi()
209 asm ("r5 = #7\n\t" in cmpnd_cmp_jump()
210 "r6 = #9\n\t" in cmpnd_cmp_jump()
211 "{\n\t" in cmpnd_cmp_jump()
212 " p0 = cmp.eq(r5, #7)\n\t" in cmpnd_cmp_jump()
213 " if (p0.new) jump:nt 1f\n\t" in cmpnd_cmp_jump()
214 " p0 = cmp.eq(r6, #7)\n\t" in cmpnd_cmp_jump()
215 "}\n\t" in cmpnd_cmp_jump()
216 "%0 = #12\n\t" in cmpnd_cmp_jump()
217 "jump 2f\n\t" in cmpnd_cmp_jump()
218 "1:\n\t" in cmpnd_cmp_jump()
219 "%0 = #13\n\t" in cmpnd_cmp_jump()
220 "2:\n\t" in cmpnd_cmp_jump()
228 asm volatile("r5 = %2\n\t" in test_clrtnew()
229 "{\n\t" in test_clrtnew()
230 "p0 = cmp.eq(%1, #1)\n\t" in test_clrtnew()
231 "if (p0.new) r5=#0\n\t" in test_clrtnew()
232 "}\n\t" in test_clrtnew()
233 "%0 = r5\n\t" in test_clrtnew()
250 asm ("SL2_return_tnew:\n\t"
251 " allocframe(#0)\n\t"
252 " r1 = #1\n\t"
253 " memw(##early_exit) = r1\n\t"
254 " {\n\t"
255 " p0 = cmp.eq(r0, #1)\n\t"
256 " if (p0.new) dealloc_return:nt\n\t" /* SL2_return_tnew */
257 " }\n\t"
258 " r1 = #0\n\t"
259 " memw(##early_exit) = r1\n\t"
260 " dealloc_return\n\t"
266 asm ("m0 = %1\n\t" in creg_pair()
267 "m1 = %2\n\t" in creg_pair()
268 "%0 = c7:6\n\t" in creg_pair()
277 asm ("%0 = decbin(%2, %3)\n\t" in decbin()
278 "%1 = p0\n\t" in decbin()
289 asm ("r5 = #1\n\t" in auto_and()
290 "{\n\t" in auto_and()
291 " p0 = cmp.eq(r1, #1)\n\t" in auto_and()
292 " p0 = cmp.eq(r1, #2)\n\t" in auto_and()
293 "}\n\t" in auto_and()
294 "%0 = p0\n\t" in auto_and()
305 asm("r0 = #2\n\t" in test_lsbnew()
306 "r1 = #5\n\t" in test_lsbnew()
307 "{\n\t" in test_lsbnew()
308 " p0 = r0\n\t" in test_lsbnew()
309 " if (p0.new) r1 = #3\n\t" in test_lsbnew()
310 "}\n\t" in test_lsbnew()
311 "%0 = r1\n\t" in test_lsbnew()
319 asm volatile ("l2fetch(r0, r1)\n\t" in test_l2fetch()
320 "l2fetch(r0, r3:2)\n\t"); in test_l2fetch()
326 asm("%0 = ct0(%1)\n\t" : "=r"(res) : "r"(x)); in ct0()
333 asm("%0 = ct1(%1)\n\t" : "=r"(res) : "r"(x)); in ct1()
340 asm("%0 = ct0(%1)\n\t" : "=r"(res) : "r"(x)); in ct0p()
347 asm("%0 = ct1(%1)\n\t" : "=r"(res) : "r"(x)); in ct1p()
377 asm("%0 = mpy(%1, %2):rnd\n\t" : "=r"(res) : "r"(x), "r"(y)); in dpmpyss_rnd_s0()