Tue Sep 23 08:50:11 2008 UTC ()
stts: don't modify %cr0 if TS is already set.


(ad)
diff -r1.12 -r1.13 src/sys/arch/i386/i386/cpufunc.S

cvs diff -r1.12 -r1.13 src/sys/arch/i386/i386/cpufunc.S (switch to unified diff)

--- src/sys/arch/i386/i386/cpufunc.S 2008/05/25 15:56:12 1.12
+++ src/sys/arch/i386/i386/cpufunc.S 2008/09/23 08:50:11 1.13
@@ -1,503 +1,506 @@ @@ -1,503 +1,506 @@
1/* $NetBSD: cpufunc.S,v 1.12 2008/05/25 15:56:12 chs Exp $ */ 1/* $NetBSD: cpufunc.S,v 1.13 2008/09/23 08:50:11 ad Exp $ */
2 2
3/*- 3/*-
4 * Copyright (c) 1998, 2007 The NetBSD Foundation, Inc. 4 * Copyright (c) 1998, 2007 The NetBSD Foundation, Inc.
5 * All rights reserved. 5 * All rights reserved.
6 * 6 *
7 * This code is derived from software contributed to The NetBSD Foundation 7 * This code is derived from software contributed to The NetBSD Foundation
8 * by Charles M. Hannum, and by Andrew Doran. 8 * by Charles M. Hannum, and by Andrew Doran.
9 * 9 *
10 * Redistribution and use in source and binary forms, with or without 10 * Redistribution and use in source and binary forms, with or without
11 * modification, are permitted provided that the following conditions 11 * modification, are permitted provided that the following conditions
12 * are met: 12 * are met:
13 * 1. Redistributions of source code must retain the above copyright 13 * 1. Redistributions of source code must retain the above copyright
14 * notice, this list of conditions and the following disclaimer. 14 * notice, this list of conditions and the following disclaimer.
15 * 2. Redistributions in binary form must reproduce the above copyright 15 * 2. Redistributions in binary form must reproduce the above copyright
16 * notice, this list of conditions and the following disclaimer in the 16 * notice, this list of conditions and the following disclaimer in the
17 * documentation and/or other materials provided with the distribution. 17 * documentation and/or other materials provided with the distribution.
18 * 18 *
19 * THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS 19 * THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS
20 * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED 20 * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
21 * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR 21 * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
22 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS 22 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS
23 * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR 23 * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
24 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF 24 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
25 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS 25 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
26 * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN 26 * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
27 * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) 27 * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
28 * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE 28 * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
29 * POSSIBILITY OF SUCH DAMAGE. 29 * POSSIBILITY OF SUCH DAMAGE.
30 */ 30 */
31 31
32/* 32/*
33 * Functions to provide access to i386-specific instructions. 33 * Functions to provide access to i386-specific instructions.
34 * 34 *
35 * These are shared with NetBSD/xen. 35 * These are shared with NetBSD/xen.
36 */ 36 */
37 37
38#include <machine/asm.h> 38#include <machine/asm.h>
39__KERNEL_RCSID(0, "$NetBSD: cpufunc.S,v 1.12 2008/05/25 15:56:12 chs Exp $"); 39__KERNEL_RCSID(0, "$NetBSD: cpufunc.S,v 1.13 2008/09/23 08:50:11 ad Exp $");
40 40
41#include "opt_xen.h" 41#include "opt_xen.h"
42 42
43#include <machine/specialreg.h> 43#include <machine/specialreg.h>
44#include <machine/segments.h> 44#include <machine/segments.h>
45 45
46#include "assym.h" 46#include "assym.h"
47 47
48/* Small and slow, so align less. */ 48/* Small and slow, so align less. */
49#undef _ALIGN_TEXT 49#undef _ALIGN_TEXT
50#define _ALIGN_TEXT .align 8 50#define _ALIGN_TEXT .align 8
51 51
52ENTRY(x86_lfence) 52ENTRY(x86_lfence)
53 lock 53 lock
54 addl $0, -4(%esp) 54 addl $0, -4(%esp)
55 ret 55 ret
56END(x86_lfence) 56END(x86_lfence)
57 57
58ENTRY(x86_sfence) 58ENTRY(x86_sfence)
59 lock 59 lock
60 addl $0, -4(%esp) 60 addl $0, -4(%esp)
61 ret 61 ret
62END(x86_sfence) 62END(x86_sfence)
63 63
64ENTRY(x86_mfence) 64ENTRY(x86_mfence)
65 lock 65 lock
66 addl $0, -4(%esp) 66 addl $0, -4(%esp)
67 ret 67 ret
68END(x86_mfence) 68END(x86_mfence)
69 69
70ENTRY(lidt) 70ENTRY(lidt)
71 movl 4(%esp), %eax 71 movl 4(%esp), %eax
72 lidt (%eax) 72 lidt (%eax)
73 ret 73 ret
74END(lidt) 74END(lidt)
75 75
76ENTRY(rcr3) 76ENTRY(rcr3)
77 movl %cr3, %eax 77 movl %cr3, %eax
78 ret 78 ret
79END(rcr3) 79END(rcr3)
80 80
81ENTRY(lcr4) 81ENTRY(lcr4)
82 movl 4(%esp), %eax 82 movl 4(%esp), %eax
83 movl %eax, %cr4 83 movl %eax, %cr4
84 ret 84 ret
85END(lcr4) 85END(lcr4)
86 86
87ENTRY(rcr4) 87ENTRY(rcr4)
88 movl %cr4, %eax 88 movl %cr4, %eax
89 ret 89 ret
90END(rcr4) 90END(rcr4)
91 91
92ENTRY(x86_read_flags) 92ENTRY(x86_read_flags)
93 pushfl 93 pushfl
94 popl %eax 94 popl %eax
95 ret 95 ret
96END(x86_read_flags) 96END(x86_read_flags)
97 97
98ENTRY(x86_write_flags) 98ENTRY(x86_write_flags)
99 movl 4(%esp), %eax 99 movl 4(%esp), %eax
100 pushl %eax 100 pushl %eax
101 popfl 101 popfl
102 ret 102 ret
103END(x86_write_flags) 103END(x86_write_flags)
104 104
105#ifndef XEN 105#ifndef XEN
106STRONG_ALIAS(x86_write_psl,x86_write_flags) 106STRONG_ALIAS(x86_write_psl,x86_write_flags)
107STRONG_ALIAS(x86_read_psl,x86_read_flags) 107STRONG_ALIAS(x86_read_psl,x86_read_flags)
108#endif /* XEN */ 108#endif /* XEN */
109 109
110ENTRY(rdmsr) 110ENTRY(rdmsr)
111 movl 4(%esp), %ecx 111 movl 4(%esp), %ecx
112 rdmsr 112 rdmsr
113 ret 113 ret
114END(rdmsr) 114END(rdmsr)
115 115
116ENTRY(wrmsr) 116ENTRY(wrmsr)
117 movl 4(%esp), %ecx 117 movl 4(%esp), %ecx
118 movl 8(%esp), %eax 118 movl 8(%esp), %eax
119 movl 12(%esp), %edx 119 movl 12(%esp), %edx
120 wrmsr 120 wrmsr
121 ret 121 ret
122END(wrmsr) 122END(wrmsr)
123 123
124ENTRY(rdmsr_locked) 124ENTRY(rdmsr_locked)
125 movl 4(%esp), %ecx 125 movl 4(%esp), %ecx
126 pushl %edi 126 pushl %edi
127 movl $OPTERON_MSR_PASSCODE, %edi 127 movl $OPTERON_MSR_PASSCODE, %edi
128 rdmsr 128 rdmsr
129 popl %edi 129 popl %edi
130 ret 130 ret
131END(rdmsr_locked) 131END(rdmsr_locked)
132 132
133ENTRY(wrmsr_locked) 133ENTRY(wrmsr_locked)
134 movl 4(%esp), %ecx 134 movl 4(%esp), %ecx
135 movl 8(%esp), %eax 135 movl 8(%esp), %eax
136 movl 12(%esp), %edx 136 movl 12(%esp), %edx
137 pushl %edi 137 pushl %edi
138 movl $OPTERON_MSR_PASSCODE, %edi 138 movl $OPTERON_MSR_PASSCODE, %edi
139 wrmsr 139 wrmsr
140 popl %edi 140 popl %edi
141 ret 141 ret
142END(wrmsr_locked) 142END(wrmsr_locked)
143 143
144ENTRY(cpu_counter) 144ENTRY(cpu_counter)
145 rdtsc 145 rdtsc
146 addl CPUVAR(CC_SKEW), %eax 146 addl CPUVAR(CC_SKEW), %eax
147 adcl CPUVAR(CC_SKEW+4), %edx 147 adcl CPUVAR(CC_SKEW+4), %edx
148 ret 148 ret
149END(cpu_counter) 149END(cpu_counter)
150 150
151ENTRY(cpu_counter32) 151ENTRY(cpu_counter32)
152 rdtsc 152 rdtsc
153 addl CPUVAR(CC_SKEW), %eax 153 addl CPUVAR(CC_SKEW), %eax
154 ret 154 ret
155END(cpu_counter32) 155END(cpu_counter32)
156 156
157ENTRY(rdpmc) 157ENTRY(rdpmc)
158 movl 4(%esp), %ecx 158 movl 4(%esp), %ecx
159 rdpmc 159 rdpmc
160 ret 160 ret
161END(rdpmc) 161END(rdpmc)
162 162
163ENTRY(breakpoint) 163ENTRY(breakpoint)
164 pushl %ebp 164 pushl %ebp
165 movl %esp, %ebp 165 movl %esp, %ebp
166 int $0x03 /* paranoid, not 'int3' */ 166 int $0x03 /* paranoid, not 'int3' */
167 popl %ebp 167 popl %ebp
168 ret 168 ret
169END(breakpoint) 169END(breakpoint)
170 170
171ENTRY(x86_atomic_testset_ul) 171ENTRY(x86_atomic_testset_ul)
172 movl 4(%esp), %ecx 172 movl 4(%esp), %ecx
173 movl 8(%esp), %eax 173 movl 8(%esp), %eax
174 xchgl %eax, (%ecx) 174 xchgl %eax, (%ecx)
175 ret 175 ret
176END(x86_atomic_testset_ul) 176END(x86_atomic_testset_ul)
177 177
178ENTRY(x86_atomic_testset_i) 178ENTRY(x86_atomic_testset_i)
179 movl 4(%esp), %ecx 179 movl 4(%esp), %ecx
180 movl 8(%esp), %eax 180 movl 8(%esp), %eax
181 xchgl %eax, (%ecx) 181 xchgl %eax, (%ecx)
182 ret 182 ret
183END(x86_atomic_testset_i) 183END(x86_atomic_testset_i)
184 184
185ENTRY(x86_atomic_testset_b) 185ENTRY(x86_atomic_testset_b)
186 movl 4(%esp), %ecx 186 movl 4(%esp), %ecx
187 movl 8(%esp), %eax 187 movl 8(%esp), %eax
188 xchgb %al, (%ecx) 188 xchgb %al, (%ecx)
189 andl $0xff, %eax 189 andl $0xff, %eax
190 ret 190 ret
191END(x86_atomic_testset_b) 191END(x86_atomic_testset_b)
192 192
193ENTRY(x86_atomic_setbits_l) 193ENTRY(x86_atomic_setbits_l)
194 movl 4(%esp), %ecx 194 movl 4(%esp), %ecx
195 movl 8(%esp), %eax 195 movl 8(%esp), %eax
196 lock 196 lock
197 orl %eax, (%ecx) 197 orl %eax, (%ecx)
198 ret 198 ret
199END(x86_atomic_setbits_l) 199END(x86_atomic_setbits_l)
200 200
201ENTRY(x86_atomic_clearbits_l) 201ENTRY(x86_atomic_clearbits_l)
202 movl 4(%esp), %ecx 202 movl 4(%esp), %ecx
203 movl 8(%esp), %eax 203 movl 8(%esp), %eax
204 notl %eax 204 notl %eax
205 lock 205 lock
206 andl %eax, (%ecx) 206 andl %eax, (%ecx)
207 ret 207 ret
208END(x86_atomic_clearbits_l) 208END(x86_atomic_clearbits_l)
209 209
210ENTRY(x86_curcpu) 210ENTRY(x86_curcpu)
211 movl %fs:(CPU_INFO_SELF), %eax 211 movl %fs:(CPU_INFO_SELF), %eax
212 ret 212 ret
213END(x86_curcpu) 213END(x86_curcpu)
214 214
215ENTRY(x86_curlwp) 215ENTRY(x86_curlwp)
216 movl %fs:(CPU_INFO_CURLWP), %eax 216 movl %fs:(CPU_INFO_CURLWP), %eax
217 ret 217 ret
218END(x86_curlwp) 218END(x86_curlwp)
219 219
220ENTRY(cpu_set_curpri) 220ENTRY(cpu_set_curpri)
221 movl 4(%esp), %eax 221 movl 4(%esp), %eax
222 movl %eax, %fs:(CPU_INFO_CURPRIORITY) 222 movl %eax, %fs:(CPU_INFO_CURPRIORITY)
223 ret 223 ret
224END(cpu_set_curpri) 224END(cpu_set_curpri)
225 225
226ENTRY(__byte_swap_u32_variable) 226ENTRY(__byte_swap_u32_variable)
227 movl 4(%esp), %eax 227 movl 4(%esp), %eax
228 bswapl %eax 228 bswapl %eax
229 ret 229 ret
230END(__byte_swap_u32_variable) 230END(__byte_swap_u32_variable)
231 231
232ENTRY(__byte_swap_u16_variable) 232ENTRY(__byte_swap_u16_variable)
233 movl 4(%esp), %eax 233 movl 4(%esp), %eax
234 xchgb %al, %ah 234 xchgb %al, %ah
235 ret 235 ret
236END(__byte_swap_u16_variable) 236END(__byte_swap_u16_variable)
237 237
238/* 238/*
239 * void x86_flush() 239 * void x86_flush()
240 * 240 *
241 * Flush instruction pipelines by doing an intersegment (far) return. 241 * Flush instruction pipelines by doing an intersegment (far) return.
242 */ 242 */
243ENTRY(x86_flush) 243ENTRY(x86_flush)
244 popl %eax 244 popl %eax
245 pushl $GSEL(GCODE_SEL, SEL_KPL) 245 pushl $GSEL(GCODE_SEL, SEL_KPL)
246 pushl %eax 246 pushl %eax
247 lret 247 lret
248END(x86_flush) 248END(x86_flush)
249 249
250/* Waits - set up stack frame. */ 250/* Waits - set up stack frame. */
251ENTRY(x86_hlt) 251ENTRY(x86_hlt)
252 pushl %ebp 252 pushl %ebp
253 movl %esp, %ebp 253 movl %esp, %ebp
254 hlt 254 hlt
255 leave 255 leave
256 ret 256 ret
257END(x86_hlt) 257END(x86_hlt)
258 258
259/* Waits - set up stack frame. */ 259/* Waits - set up stack frame. */
260ENTRY(x86_stihlt) 260ENTRY(x86_stihlt)
261 pushl %ebp 261 pushl %ebp
262 movl %esp, %ebp 262 movl %esp, %ebp
263 sti 263 sti
264 hlt 264 hlt
265 leave 265 leave
266 ret 266 ret
267END(x86_stihlt) 267END(x86_stihlt)
268 268
269ENTRY(x86_monitor) 269ENTRY(x86_monitor)
270 movl 4(%esp), %eax 270 movl 4(%esp), %eax
271 movl 8(%esp), %ecx 271 movl 8(%esp), %ecx
272 movl 12(%esp), %edx 272 movl 12(%esp), %edx
273 monitor %eax, %ecx, %edx 273 monitor %eax, %ecx, %edx
274 ret 274 ret
275END(x86_monitor) 275END(x86_monitor)
276 276
277/* Waits - set up stack frame. */ 277/* Waits - set up stack frame. */
278ENTRY(x86_mwait)  278ENTRY(x86_mwait)
279 pushl %ebp 279 pushl %ebp
280 movl %esp, %ebp 280 movl %esp, %ebp
281 movl 8(%ebp), %eax 281 movl 8(%ebp), %eax
282 movl 12(%ebp), %ecx 282 movl 12(%ebp), %ecx
283 mwait %eax, %ecx 283 mwait %eax, %ecx
284 leave 284 leave
285 ret 285 ret
286END(x86_mwait)  286END(x86_mwait)
287 287
288ENTRY(x86_pause) 288ENTRY(x86_pause)
289 pause 289 pause
290 ret 290 ret
291END(x86_pause) 291END(x86_pause)
292 292
293ENTRY(x86_cpuid2) 293ENTRY(x86_cpuid2)
294 pushl %ebx 294 pushl %ebx
295 pushl %edi 295 pushl %edi
296 movl 12(%esp), %eax 296 movl 12(%esp), %eax
297 movl 16(%esp), %ecx 297 movl 16(%esp), %ecx
298 movl 20(%esp), %edi 298 movl 20(%esp), %edi
299 cpuid 299 cpuid
300 movl %eax, 0(%edi) 300 movl %eax, 0(%edi)
301 movl %ebx, 4(%edi) 301 movl %ebx, 4(%edi)
302 movl %ecx, 8(%edi) 302 movl %ecx, 8(%edi)
303 movl %edx, 12(%edi) 303 movl %edx, 12(%edi)
304 popl %edi 304 popl %edi
305 popl %ebx 305 popl %ebx
306 ret 306 ret
307END(x86_cpuid2) 307END(x86_cpuid2)
308 308
309ENTRY(x86_getss) 309ENTRY(x86_getss)
310 movl %ss, %eax 310 movl %ss, %eax
311 ret 311 ret
312END(x86_getss) 312END(x86_getss)
313 313
314ENTRY(fldcw) 314ENTRY(fldcw)
315 movl 4(%esp), %eax 315 movl 4(%esp), %eax
316 fldcw (%eax) 316 fldcw (%eax)
317 ret 317 ret
318END(fldcw) 318END(fldcw)
319 319
320ENTRY(fnclex)  320ENTRY(fnclex)
321 fnclex 321 fnclex
322 ret 322 ret
323END(fnclex)  323END(fnclex)
324 324
325ENTRY(fninit) 325ENTRY(fninit)
326 fninit 326 fninit
327 ret 327 ret
328END(fninit) 328END(fninit)
329 329
330ENTRY(fnsave) 330ENTRY(fnsave)
331 movl 4(%esp), %eax 331 movl 4(%esp), %eax
332 fnsave (%eax) 332 fnsave (%eax)
333 ret 333 ret
334END(fnsave) 334END(fnsave)
335 335
336ENTRY(fnstcw) 336ENTRY(fnstcw)
337 movl 4(%esp), %eax 337 movl 4(%esp), %eax
338 fnstcw (%eax) 338 fnstcw (%eax)
339 ret 339 ret
340END(fnstcw) 340END(fnstcw)
341 341
342ENTRY(fnstsw) 342ENTRY(fnstsw)
343 movl 4(%esp), %eax 343 movl 4(%esp), %eax
344 fnstsw (%eax) 344 fnstsw (%eax)
345 ret 345 ret
346END(fnstsw) 346END(fnstsw)
347 347
348ENTRY(fp_divide_by_0) 348ENTRY(fp_divide_by_0)
349 fldz 349 fldz
350 fld1 350 fld1
351 fdiv %st, %st(1) 351 fdiv %st, %st(1)
352 fwait 352 fwait
353 ret 353 ret
354END(fp_divide_by_0) 354END(fp_divide_by_0)
355 355
356ENTRY(frstor) 356ENTRY(frstor)
357 movl 4(%esp), %eax 357 movl 4(%esp), %eax
358 frstor (%eax) 358 frstor (%eax)
359 ret 359 ret
360END(frstor) 360END(frstor)
361 361
362ENTRY(fwait) 362ENTRY(fwait)
363 fwait 363 fwait
364 ret 364 ret
365END(fwait) 365END(fwait)
366 366
367ENTRY(clts) 367ENTRY(clts)
368 clts 368 clts
369 ret 369 ret
370END(clts) 370END(clts)
371 371
372ENTRY(stts) 372ENTRY(stts)
373 movl %cr0, %eax 373 movl %cr0, %eax
 374 testl $CR0_TS, %eax
 375 jnz 1f
374 orl $CR0_TS, %eax 376 orl $CR0_TS, %eax
375 movl %eax, %cr0 377 movl %eax, %cr0
 3781:
376 ret 379 ret
377END(stts) 380END(stts)
378 381
379ENTRY(fxsave) 382ENTRY(fxsave)
380 movl 4(%esp), %eax 383 movl 4(%esp), %eax
381 fxsave (%eax) 384 fxsave (%eax)
382 ret 385 ret
383END(fxsave) 386END(fxsave)
384 387
385ENTRY(fxrstor) 388ENTRY(fxrstor)
386 movl 4(%esp), %eax 389 movl 4(%esp), %eax
387 fxrstor (%eax) 390 fxrstor (%eax)
388 ret 391 ret
389END(fxrstor) 392END(fxrstor)
390 393
391ENTRY(fldummy) 394ENTRY(fldummy)
392 movl 4(%esp), %eax 395 movl 4(%esp), %eax
393 ffree %st(7) 396 ffree %st(7)
394 fld (%eax) 397 fld (%eax)
395 ret 398 ret
396END(fldummy) 399END(fldummy)
397 400
398ENTRY(inb) 401ENTRY(inb)
399 movl 4(%esp), %edx 402 movl 4(%esp), %edx
400 xorl %eax, %eax 403 xorl %eax, %eax
401 inb %dx, %al 404 inb %dx, %al
402 ret 405 ret
403END(inb) 406END(inb)
404 407
405ENTRY(insb) 408ENTRY(insb)
406 pushl %edi 409 pushl %edi
407 movl 8(%esp), %edx 410 movl 8(%esp), %edx
408 movl 12(%esp), %edi 411 movl 12(%esp), %edi
409 movl 16(%esp), %ecx 412 movl 16(%esp), %ecx
410 rep 413 rep
411 insb 414 insb
412 popl %edi 415 popl %edi
413 ret 416 ret
414END(insb) 417END(insb)
415 418
416ENTRY(inw) 419ENTRY(inw)
417 movl 4(%esp), %edx 420 movl 4(%esp), %edx
418 xorl %eax, %eax 421 xorl %eax, %eax
419 inw %dx, %ax 422 inw %dx, %ax
420 ret 423 ret
421END(inw) 424END(inw)
422 425
423ENTRY(insw) 426ENTRY(insw)
424 pushl %edi 427 pushl %edi
425 movl 8(%esp), %edx 428 movl 8(%esp), %edx
426 movl 12(%esp), %edi 429 movl 12(%esp), %edi
427 movl 16(%esp), %ecx 430 movl 16(%esp), %ecx
428 rep 431 rep
429 insw 432 insw
430 popl %edi 433 popl %edi
431 ret 434 ret
432END(insw) 435END(insw)
433 436
434ENTRY(inl) 437ENTRY(inl)
435 movl 4(%esp), %edx 438 movl 4(%esp), %edx
436 inl %dx, %eax 439 inl %dx, %eax
437 ret 440 ret
438END(inl) 441END(inl)
439 442
440ENTRY(insl) 443ENTRY(insl)
441 pushl %edi 444 pushl %edi
442 movl 8(%esp), %edx 445 movl 8(%esp), %edx
443 movl 12(%esp), %edi 446 movl 12(%esp), %edi
444 movl 16(%esp), %ecx 447 movl 16(%esp), %ecx
445 rep 448 rep
446 insl 449 insl
447 popl %edi 450 popl %edi
448 ret 451 ret
449END(insl) 452END(insl)
450 453
451ENTRY(outb) 454ENTRY(outb)
452 movl 4(%esp), %edx 455 movl 4(%esp), %edx
453 movl 8(%esp), %eax 456 movl 8(%esp), %eax
454 outb %al, %dx 457 outb %al, %dx
455 ret 458 ret
456END(outb) 459END(outb)
457 460
458ENTRY(outsb) 461ENTRY(outsb)
459 pushl %esi 462 pushl %esi
460 movl 8(%esp), %edx 463 movl 8(%esp), %edx
461 movl 12(%esp), %esi 464 movl 12(%esp), %esi
462 movl 16(%esp), %ecx 465 movl 16(%esp), %ecx
463 rep 466 rep
464 outsb 467 outsb
465 popl %esi 468 popl %esi
466 ret 469 ret
467END(outsb) 470END(outsb)
468 471
469ENTRY(outw) 472ENTRY(outw)
470 movl 4(%esp), %edx 473 movl 4(%esp), %edx
471 movl 8(%esp), %eax 474 movl 8(%esp), %eax
472 outw %ax, %dx 475 outw %ax, %dx
473 ret 476 ret
474END(outw) 477END(outw)
475 478
476ENTRY(outsw) 479ENTRY(outsw)
477 pushl %esi 480 pushl %esi
478 movl 8(%esp), %edx 481 movl 8(%esp), %edx
479 movl 12(%esp), %esi 482 movl 12(%esp), %esi
480 movl 16(%esp), %ecx 483 movl 16(%esp), %ecx
481 rep 484 rep
482 outsw 485 outsw
483 popl %esi 486 popl %esi
484 ret 487 ret
485END(outsw) 488END(outsw)
486 489
487ENTRY(outl) 490ENTRY(outl)
488 movl 4(%esp), %edx 491 movl 4(%esp), %edx
489 movl 8(%esp), %eax 492 movl 8(%esp), %eax
490 outl %eax, %dx 493 outl %eax, %dx
491 ret 494 ret
492END(outl) 495END(outl)
493 496
494ENTRY(outsl) 497ENTRY(outsl)
495 pushl %esi 498 pushl %esi
496 movl 8(%esp), %edx 499 movl 8(%esp), %edx
497 movl 12(%esp), %esi 500 movl 12(%esp), %esi
498 movl 16(%esp), %ecx 501 movl 16(%esp), %ecx
499 rep 502 rep
500 outsl 503 outsl
501 popl %esi 504 popl %esi
502 ret 505 ret
503END(outsl) 506END(outsl)