mirror of
https://github.com/edk2-porting/linux-next.git
synced 2024-12-25 05:34:00 +08:00
10fa1155a2
Currently we have a confused udelay implementation. * __const_udelay does not accept usecs but xloops in i386 and x86_64 * our implementation requires usecs as arg * it gets a xloops count when called by asm/arch/delay.h Bugs related to this (extremely long shutdown times) where reported by some x86_64 users, especially using Device Mapper. To hit this bug, a compile-time constant time parameter must be passed - that's why UML seems to work most times. Fix this with a simple udelay implementation. Signed-off-by: Paolo 'Blaisorblade' Giarrusso <blaisorblade@yahoo.it> Acked-by: Jeff Dike <jdike@addtoit.com> Signed-off-by: Andrew Morton <akpm@linux-foundation.org> Signed-off-by: Linus Torvalds <torvalds@linux-foundation.org>
21 lines
495 B
C
21 lines
495 B
C
#ifndef __UM_DELAY_H
|
|
#define __UM_DELAY_H
|
|
|
|
#define MILLION 1000000
|
|
|
|
/* Undefined on purpose */
|
|
extern void __bad_udelay(void);
|
|
|
|
extern void __udelay(unsigned long usecs);
|
|
extern void __delay(unsigned long loops);
|
|
|
|
#define udelay(n) ((__builtin_constant_p(n) && (n) > 20000) ? \
|
|
__bad_udelay() : __udelay(n))
|
|
|
|
/* It appears that ndelay is not used at all for UML, and has never been
|
|
* implemented. */
|
|
extern void __unimplemented_ndelay(void);
|
|
#define ndelay(n) __unimplemented_ndelay()
|
|
|
|
#endif
|