Skip to content

Commit

Permalink
[PATCH] powerpc: Consolidate asm compatibility macros
Browse files Browse the repository at this point in the history
This patch consolidates macros used to generate assembly for
compatibility across different CPUs or configs.  A new header,
asm-powerpc/asm-compat.h contains the main compatibility macros.  It
uses some preprocessor magic to make the macros suitable both for use
in .S files, and in inline asm in .c files.  Headers (bitops.h,
uaccess.h, atomic.h, bug.h) which had their own such compatibility
macros are changed to use asm-compat.h.

ppc_asm.h is now for use in .S files *only*, and a #error enforces
that.  As such, we're a lot more careless about namespace pollution
here than in asm-compat.h.

While we're at it, this patch adds a call to the PPC405_ERR77 macro in
futex.h which should have had it already, but didn't.

Built and booted on pSeries, Maple and iSeries (ARCH=powerpc).  Built
for 32-bit powermac (ARCH=powerpc) and Walnut (ARCH=ppc).

Signed-off-by: David Gibson <dwg@au1.ibm.com>
Signed-off-by: Paul Mackerras <paulus@samba.org>
  • Loading branch information
dgibson authored and paulusmack committed Nov 10, 2005
1 parent f6d3577 commit 3ddfbcf
Show file tree
Hide file tree
Showing 15 changed files with 210 additions and 210 deletions.
24 changes: 12 additions & 12 deletions arch/powerpc/kernel/fpu.S
Original file line number Diff line number Diff line change
Expand Up @@ -41,20 +41,20 @@ _GLOBAL(load_up_fpu)
#ifndef CONFIG_SMP
LOADBASE(r3, last_task_used_math)
toreal(r3)
LDL r4,OFF(last_task_used_math)(r3)
CMPI 0,r4,0
PPC_LL r4,OFF(last_task_used_math)(r3)
PPC_LCMPI 0,r4,0
beq 1f
toreal(r4)
addi r4,r4,THREAD /* want last_task_used_math->thread */
SAVE_32FPRS(0, r4)
mffs fr0
stfd fr0,THREAD_FPSCR(r4)
LDL r5,PT_REGS(r4)
PPC_LL r5,PT_REGS(r4)
toreal(r5)
LDL r4,_MSR-STACK_FRAME_OVERHEAD(r5)
PPC_LL r4,_MSR-STACK_FRAME_OVERHEAD(r5)
li r10,MSR_FP|MSR_FE0|MSR_FE1
andc r4,r4,r10 /* disable FP for previous task */
STL r4,_MSR-STACK_FRAME_OVERHEAD(r5)
PPC_STL r4,_MSR-STACK_FRAME_OVERHEAD(r5)
1:
#endif /* CONFIG_SMP */
/* enable use of FP after return */
Expand All @@ -77,7 +77,7 @@ _GLOBAL(load_up_fpu)
#ifndef CONFIG_SMP
subi r4,r5,THREAD
fromreal(r4)
STL r4,OFF(last_task_used_math)(r3)
PPC_STL r4,OFF(last_task_used_math)(r3)
#endif /* CONFIG_SMP */
/* restore registers and return */
/* we haven't used ctr or xer or lr */
Expand All @@ -97,24 +97,24 @@ _GLOBAL(giveup_fpu)
MTMSRD(r5) /* enable use of fpu now */
SYNC_601
isync
CMPI 0,r3,0
PPC_LCMPI 0,r3,0
beqlr- /* if no previous owner, done */
addi r3,r3,THREAD /* want THREAD of task */
LDL r5,PT_REGS(r3)
CMPI 0,r5,0
PPC_LL r5,PT_REGS(r3)
PPC_LCMPI 0,r5,0
SAVE_32FPRS(0, r3)
mffs fr0
stfd fr0,THREAD_FPSCR(r3)
beq 1f
LDL r4,_MSR-STACK_FRAME_OVERHEAD(r5)
PPC_LL r4,_MSR-STACK_FRAME_OVERHEAD(r5)
li r3,MSR_FP|MSR_FE0|MSR_FE1
andc r4,r4,r3 /* disable FP for previous task */
STL r4,_MSR-STACK_FRAME_OVERHEAD(r5)
PPC_STL r4,_MSR-STACK_FRAME_OVERHEAD(r5)
1:
#ifndef CONFIG_SMP
li r5,0
LOADBASE(r4,last_task_used_math)
STL r5,OFF(last_task_used_math)(r4)
PPC_STL r5,OFF(last_task_used_math)(r4)
#endif /* CONFIG_SMP */
blr

Expand Down
1 change: 1 addition & 0 deletions arch/powerpc/platforms/iseries/misc.S
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@

#include <asm/processor.h>
#include <asm/asm-offsets.h>
#include <asm/ppc_asm.h>

.text

Expand Down
176 changes: 88 additions & 88 deletions arch/powerpc/xmon/setjmp.S
Original file line number Diff line number Diff line change
Expand Up @@ -14,61 +14,61 @@

_GLOBAL(xmon_setjmp)
mflr r0
STL r0,0(r3)
STL r1,SZL(r3)
STL r2,2*SZL(r3)
PPC_STL r0,0(r3)
PPC_STL r1,SZL(r3)
PPC_STL r2,2*SZL(r3)
mfcr r0
STL r0,3*SZL(r3)
STL r13,4*SZL(r3)
STL r14,5*SZL(r3)
STL r15,6*SZL(r3)
STL r16,7*SZL(r3)
STL r17,8*SZL(r3)
STL r18,9*SZL(r3)
STL r19,10*SZL(r3)
STL r20,11*SZL(r3)
STL r21,12*SZL(r3)
STL r22,13*SZL(r3)
STL r23,14*SZL(r3)
STL r24,15*SZL(r3)
STL r25,16*SZL(r3)
STL r26,17*SZL(r3)
STL r27,18*SZL(r3)
STL r28,19*SZL(r3)
STL r29,20*SZL(r3)
STL r30,21*SZL(r3)
STL r31,22*SZL(r3)
PPC_STL r0,3*SZL(r3)
PPC_STL r13,4*SZL(r3)
PPC_STL r14,5*SZL(r3)
PPC_STL r15,6*SZL(r3)
PPC_STL r16,7*SZL(r3)
PPC_STL r17,8*SZL(r3)
PPC_STL r18,9*SZL(r3)
PPC_STL r19,10*SZL(r3)
PPC_STL r20,11*SZL(r3)
PPC_STL r21,12*SZL(r3)
PPC_STL r22,13*SZL(r3)
PPC_STL r23,14*SZL(r3)
PPC_STL r24,15*SZL(r3)
PPC_STL r25,16*SZL(r3)
PPC_STL r26,17*SZL(r3)
PPC_STL r27,18*SZL(r3)
PPC_STL r28,19*SZL(r3)
PPC_STL r29,20*SZL(r3)
PPC_STL r30,21*SZL(r3)
PPC_STL r31,22*SZL(r3)
li r3,0
blr

_GLOBAL(xmon_longjmp)
CMPI r4,0
PPC_LCMPI r4,0
bne 1f
li r4,1
1: LDL r13,4*SZL(r3)
LDL r14,5*SZL(r3)
LDL r15,6*SZL(r3)
LDL r16,7*SZL(r3)
LDL r17,8*SZL(r3)
LDL r18,9*SZL(r3)
LDL r19,10*SZL(r3)
LDL r20,11*SZL(r3)
LDL r21,12*SZL(r3)
LDL r22,13*SZL(r3)
LDL r23,14*SZL(r3)
LDL r24,15*SZL(r3)
LDL r25,16*SZL(r3)
LDL r26,17*SZL(r3)
LDL r27,18*SZL(r3)
LDL r28,19*SZL(r3)
LDL r29,20*SZL(r3)
LDL r30,21*SZL(r3)
LDL r31,22*SZL(r3)
LDL r0,3*SZL(r3)
1: PPC_LL r13,4*SZL(r3)
PPC_LL r14,5*SZL(r3)
PPC_LL r15,6*SZL(r3)
PPC_LL r16,7*SZL(r3)
PPC_LL r17,8*SZL(r3)
PPC_LL r18,9*SZL(r3)
PPC_LL r19,10*SZL(r3)
PPC_LL r20,11*SZL(r3)
PPC_LL r21,12*SZL(r3)
PPC_LL r22,13*SZL(r3)
PPC_LL r23,14*SZL(r3)
PPC_LL r24,15*SZL(r3)
PPC_LL r25,16*SZL(r3)
PPC_LL r26,17*SZL(r3)
PPC_LL r27,18*SZL(r3)
PPC_LL r28,19*SZL(r3)
PPC_LL r29,20*SZL(r3)
PPC_LL r30,21*SZL(r3)
PPC_LL r31,22*SZL(r3)
PPC_LL r0,3*SZL(r3)
mtcrf 0x38,r0
LDL r0,0(r3)
LDL r1,SZL(r3)
LDL r2,2*SZL(r3)
PPC_LL r0,0(r3)
PPC_LL r1,SZL(r3)
PPC_LL r2,2*SZL(r3)
mtlr r0
mr r3,r4
blr
Expand All @@ -84,52 +84,52 @@ _GLOBAL(xmon_longjmp)
* different ABIs, though).
*/
_GLOBAL(xmon_save_regs)
STL r0,0*SZL(r3)
STL r2,2*SZL(r3)
STL r3,3*SZL(r3)
STL r4,4*SZL(r3)
STL r5,5*SZL(r3)
STL r6,6*SZL(r3)
STL r7,7*SZL(r3)
STL r8,8*SZL(r3)
STL r9,9*SZL(r3)
STL r10,10*SZL(r3)
STL r11,11*SZL(r3)
STL r12,12*SZL(r3)
STL r13,13*SZL(r3)
STL r14,14*SZL(r3)
STL r15,15*SZL(r3)
STL r16,16*SZL(r3)
STL r17,17*SZL(r3)
STL r18,18*SZL(r3)
STL r19,19*SZL(r3)
STL r20,20*SZL(r3)
STL r21,21*SZL(r3)
STL r22,22*SZL(r3)
STL r23,23*SZL(r3)
STL r24,24*SZL(r3)
STL r25,25*SZL(r3)
STL r26,26*SZL(r3)
STL r27,27*SZL(r3)
STL r28,28*SZL(r3)
STL r29,29*SZL(r3)
STL r30,30*SZL(r3)
STL r31,31*SZL(r3)
PPC_STL r0,0*SZL(r3)
PPC_STL r2,2*SZL(r3)
PPC_STL r3,3*SZL(r3)
PPC_STL r4,4*SZL(r3)
PPC_STL r5,5*SZL(r3)
PPC_STL r6,6*SZL(r3)
PPC_STL r7,7*SZL(r3)
PPC_STL r8,8*SZL(r3)
PPC_STL r9,9*SZL(r3)
PPC_STL r10,10*SZL(r3)
PPC_STL r11,11*SZL(r3)
PPC_STL r12,12*SZL(r3)
PPC_STL r13,13*SZL(r3)
PPC_STL r14,14*SZL(r3)
PPC_STL r15,15*SZL(r3)
PPC_STL r16,16*SZL(r3)
PPC_STL r17,17*SZL(r3)
PPC_STL r18,18*SZL(r3)
PPC_STL r19,19*SZL(r3)
PPC_STL r20,20*SZL(r3)
PPC_STL r21,21*SZL(r3)
PPC_STL r22,22*SZL(r3)
PPC_STL r23,23*SZL(r3)
PPC_STL r24,24*SZL(r3)
PPC_STL r25,25*SZL(r3)
PPC_STL r26,26*SZL(r3)
PPC_STL r27,27*SZL(r3)
PPC_STL r28,28*SZL(r3)
PPC_STL r29,29*SZL(r3)
PPC_STL r30,30*SZL(r3)
PPC_STL r31,31*SZL(r3)
/* go up one stack frame for SP */
LDL r4,0(r1)
STL r4,1*SZL(r3)
PPC_LL r4,0(r1)
PPC_STL r4,1*SZL(r3)
/* get caller's LR */
LDL r0,LRSAVE(r4)
STL r0,_NIP-STACK_FRAME_OVERHEAD(r3)
STL r0,_LINK-STACK_FRAME_OVERHEAD(r3)
PPC_LL r0,LRSAVE(r4)
PPC_STL r0,_NIP-STACK_FRAME_OVERHEAD(r3)
PPC_STL r0,_LINK-STACK_FRAME_OVERHEAD(r3)
mfmsr r0
STL r0,_MSR-STACK_FRAME_OVERHEAD(r3)
PPC_STL r0,_MSR-STACK_FRAME_OVERHEAD(r3)
mfctr r0
STL r0,_CTR-STACK_FRAME_OVERHEAD(r3)
PPC_STL r0,_CTR-STACK_FRAME_OVERHEAD(r3)
mfxer r0
STL r0,_XER-STACK_FRAME_OVERHEAD(r3)
PPC_STL r0,_XER-STACK_FRAME_OVERHEAD(r3)
mfcr r0
STL r0,_CCR-STACK_FRAME_OVERHEAD(r3)
PPC_STL r0,_CCR-STACK_FRAME_OVERHEAD(r3)
li r0,0
STL r0,_TRAP-STACK_FRAME_OVERHEAD(r3)
PPC_STL r0,_TRAP-STACK_FRAME_OVERHEAD(r3)
blr
3 changes: 1 addition & 2 deletions arch/ppc/boot/openfirmware/Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -80,8 +80,7 @@ $(obj)/note: $(utils)/mknote FORCE
$(call if_changed,mknote)


$(obj)/coffcrt0.o: EXTRA_AFLAGS := -traditional -DXCOFF
$(obj)/crt0.o: EXTRA_AFLAGS := -traditional
$(obj)/coffcrt0.o: EXTRA_AFLAGS := -DXCOFF
targets += coffcrt0.o crt0.o
$(obj)/coffcrt0.o $(obj)/crt0.o: $(common)/crt0.S FORCE
$(call if_changed_dep,as_o_S)
Expand Down
55 changes: 55 additions & 0 deletions include/asm-powerpc/asm-compat.h
Original file line number Diff line number Diff line change
@@ -0,0 +1,55 @@
#ifndef _ASM_POWERPC_ASM_COMPAT_H
#define _ASM_POWERPC_ASM_COMPAT_H

#include <linux/config.h>
#include <asm/types.h>

#ifdef __ASSEMBLY__
# define stringify_in_c(...) __VA_ARGS__
# define ASM_CONST(x) x
#else
/* This version of stringify will deal with commas... */
# define __stringify_in_c(...) #__VA_ARGS__
# define stringify_in_c(...) __stringify_in_c(__VA_ARGS__) " "
# define __ASM_CONST(x) x##UL
# define ASM_CONST(x) __ASM_CONST(x)
#endif

#ifdef __powerpc64__

/* operations for longs and pointers */
#define PPC_LL stringify_in_c(ld)
#define PPC_STL stringify_in_c(std)
#define PPC_LCMPI stringify_in_c(cmpdi)
#define PPC_LONG stringify_in_c(.llong)
#define PPC_TLNEI stringify_in_c(tdnei)
#define PPC_LLARX stringify_in_c(ldarx)
#define PPC_STLCX stringify_in_c(stdcx.)
#define PPC_CNTLZL stringify_in_c(cntlzd)

#else /* 32-bit */

/* operations for longs and pointers */
#define PPC_LL stringify_in_c(lwz)
#define PPC_STL stringify_in_c(stw)
#define PPC_LCMPI stringify_in_c(cmpwi)
#define PPC_LONG stringify_in_c(.long)
#define PPC_TLNEI stringify_in_c(twnei)
#define PPC_LLARX stringify_in_c(lwarx)
#define PPC_STLCX stringify_in_c(stwcx.)
#define PPC_CNTLZL stringify_in_c(cntlzw)

#endif

#ifdef CONFIG_IBM405_ERR77
/* Erratum #77 on the 405 means we need a sync or dcbt before every
* stwcx. The old ATOMIC_SYNC_FIX covered some but not all of this.
*/
#define PPC405_ERR77(ra,rb) stringify_in_c(dcbt ra, rb;)
#define PPC405_ERR77_SYNC stringify_in_c(sync;)
#else
#define PPC405_ERR77(ra,rb)
#define PPC405_ERR77_SYNC
#endif

#endif /* _ASM_POWERPC_ASM_COMPAT_H */
10 changes: 1 addition & 9 deletions include/asm-powerpc/atomic.h
Original file line number Diff line number Diff line change
Expand Up @@ -9,21 +9,13 @@ typedef struct { volatile int counter; } atomic_t;

#ifdef __KERNEL__
#include <asm/synch.h>
#include <asm/asm-compat.h>

#define ATOMIC_INIT(i) { (i) }

#define atomic_read(v) ((v)->counter)
#define atomic_set(v,i) (((v)->counter) = (i))

/* Erratum #77 on the 405 means we need a sync or dcbt before every stwcx.
* The old ATOMIC_SYNC_FIX covered some but not all of this.
*/
#ifdef CONFIG_IBM405_ERR77
#define PPC405_ERR77(ra,rb) "dcbt " #ra "," #rb ";"
#else
#define PPC405_ERR77(ra,rb)
#endif

static __inline__ void atomic_add(int a, atomic_t *v)
{
int t;
Expand Down
Loading

0 comments on commit 3ddfbcf

Please sign in to comment.