2009-12-01 Till Straumann <strauman@slac.stanford.edu>

* new-exceptions/cpu.c, new-exceptions/cpu_asm.S,
	new-exceptions/bspsupport/ppc_exc_asm_macros.h,
	new-exceptions/bspsupport/ppc_exc_initialize.c,
	new-exceptions/bspsupport/vectors.h:
	Added AltiVec support (save/restore volatile vregs
	across exceptions).
This commit is contained in:
Till Straumann
2009-12-02 01:41:57 +00:00
parent fbee4ffdde
commit c7f8408d31
6 changed files with 117 additions and 33 deletions

View File

@@ -1,3 +1,12 @@
2009-12-01 Till Straumann <strauman@slac.stanford.edu>
* new-exceptions/cpu.c, new-exceptions/cpu_asm.S,
new-exceptions/bspsupport/ppc_exc_asm_macros.h,
new-exceptions/bspsupport/ppc_exc_initialize.c,
new-exceptions/bspsupport/vectors.h:
Added AltiVec support (save/restore volatile vregs
across exceptions).
2009-12-01 Till Straumann <strauman@slac.stanford.edu> 2009-12-01 Till Straumann <strauman@slac.stanford.edu>
* Makefile.am, mpc6xx/altivec: new directory implementing * Makefile.am, mpc6xx/altivec: new directory implementing

View File

@@ -520,6 +520,27 @@ wrap_disable_thread_dispatching_done_\_FLVR:
wrap_change_msr_done_\_FLVR: wrap_change_msr_done_\_FLVR:
#ifdef __ALTIVEC__
LA SCRATCH_REGISTER_0, _CPU_save_altivec_volatile
mtctr SCRATCH_REGISTER_0
addi r3, FRAME_REGISTER, EXC_VEC_OFFSET
bctrl
/*
* Establish defaults for vrsave and vscr
*/
li SCRATCH_REGISTER_0, 0
mtvrsave SCRATCH_REGISTER_0
/*
* Use java/c9x mode; clear saturation bit
*/
vxor 0, 0, 0
mtvscr 0
/*
* Reload VECTOR_REGISTER
*/
lwz VECTOR_REGISTER, EXCEPTION_NUMBER_OFFSET(FRAME_REGISTER)
#endif
/* /*
* Call high level exception handler * Call high level exception handler
*/ */
@@ -619,6 +640,13 @@ wrap_handler_done_\_FLVR:
wrap_thread_dispatching_done_\_FLVR: wrap_thread_dispatching_done_\_FLVR:
#ifdef __ALTIVEC__
LA SCRATCH_REGISTER_0, _CPU_load_altivec_volatile
mtctr SCRATCH_REGISTER_0
addi r3, FRAME_REGISTER, EXC_VEC_OFFSET
bctrl
#endif
/* Restore MSR? */ /* Restore MSR? */
bne CR_MSR, wrap_restore_msr_\_FLVR bne CR_MSR, wrap_restore_msr_\_FLVR

View File

@@ -135,6 +135,11 @@ rtems_status_code ppc_exc_initialize(
/* Use current MMU / RI settings when running C exception handlers */ /* Use current MMU / RI settings when running C exception handlers */
ppc_exc_msr_bits = ppc_machine_state_register() & (MSR_DR | MSR_IR | MSR_RI); ppc_exc_msr_bits = ppc_machine_state_register() & (MSR_DR | MSR_IR | MSR_RI);
#ifdef __ALTIVEC__
/* Need vector unit enabled to save/restore altivec context */
ppc_exc_msr_bits |= MSR_VE;
#endif
if (ppc_cpu_is(PPC_e200z6)) { if (ppc_cpu_is(PPC_e200z6)) {
ppc_exc_initialize_e200(); ppc_exc_initialize_e200();
} else if (ppc_cpu_is_bookE() == PPC_BOOKE_STD || ppc_cpu_is_bookE() == PPC_BOOKE_E500) { } else if (ppc_cpu_is_bookE() == PPC_BOOKE_STD || ppc_cpu_is_bookE() == PPC_BOOKE_E500) {

View File

@@ -189,6 +189,22 @@
#define EXC_XER_OFFSET 156 #define EXC_XER_OFFSET 156
#define EXC_LR_OFFSET 160 #define EXC_LR_OFFSET 160
#define EXC_GENERIC_SIZE 176
#ifdef __ALTIVEC__
#define EXC_VEC_OFFSET EXC_GENERIC_SIZE
#ifndef PPC_CACHE_ALIGNMENT
#error "Missing include file!"
#endif
/* 20 volatile registers
* + cache-aligned area for vcsr, vrsave
* + area for alignment
*/
#define EXC_VEC_SIZE (16*20 + 2*PPC_CACHE_ALIGNMENT)
#else
#define EXC_VEC_SIZE (0)
#endif
/* Exception stack frame -> BSP_Exception_frame */ /* Exception stack frame -> BSP_Exception_frame */
#define FRAME_LINK_SPACE 8 #define FRAME_LINK_SPACE 8
@@ -197,7 +213,7 @@
* As SVR4 ABI requires 16, make it 16 (as some * As SVR4 ABI requires 16, make it 16 (as some
* exception may need more registers to be processed...) * exception may need more registers to be processed...)
*/ */
#define EXCEPTION_FRAME_END 176 #define EXCEPTION_FRAME_END (EXC_GENERIC_SIZE + EXC_VEC_SIZE)
/** @} */ /** @} */

View File

@@ -33,6 +33,7 @@
#include <rtems/score/context.h> #include <rtems/score/context.h>
#include <rtems/score/thread.h> #include <rtems/score/thread.h>
#include <rtems/score/interr.h> #include <rtems/score/interr.h>
#include <rtems/score/cpu.h>
#include <rtems/powerpc/powerpc.h> #include <rtems/powerpc/powerpc.h>
/* _CPU_Initialize /* _CPU_Initialize
@@ -45,6 +46,9 @@
void _CPU_Initialize(void) void _CPU_Initialize(void)
{ {
/* Do nothing */ /* Do nothing */
#ifdef __ALTIVEC__
_CPU_Initialize_altivec();
#endif
} }
/*PAGE /*PAGE
@@ -144,6 +148,10 @@ void _CPU_Context_Initialize(
#else #else
#error unsupported PPC_ABI #error unsupported PPC_ABI
#endif #endif
#ifdef __ALTIVEC__
_CPU_Context_initialize_altivec(the_context);
#endif
} }
/*PAGE /*PAGE

View File

@@ -293,52 +293,67 @@ PROC (_CPU_Context_switch):
sync sync
isync isync
/* This assumes that all the registers are in the given order */ /* This assumes that all the registers are in the given order */
li r5, 32
addi r3,r3,-4
#if ( PPC_USE_DATA_CACHE ) #if ( PPC_USE_DATA_CACHE )
dcbz r5, r3 #if PPC_CACHE_ALIGNMENT != 32
#error "code assumes PPC_CACHE_ALIGNMENT == 32!"
#endif #endif
stw r1, GP_1+4(r3) li r5, PPC_CACHE_ALIGNMENT
stw r2, GP_2+4(r3) #endif
addi r9,r3,-4
#if ( PPC_USE_DATA_CACHE )
dcbz r5, r9
#endif
stw r1, GP_1+4(r9)
stw r2, GP_2+4(r9)
#if (PPC_USE_MULTIPLE == 1) #if (PPC_USE_MULTIPLE == 1)
addi r3, r3, GP_18+4 addi r9, r9, GP_18+4
#if ( PPC_USE_DATA_CACHE ) #if ( PPC_USE_DATA_CACHE )
dcbz r5, r3 dcbz r5, r9
#endif #endif
stmw r13, GP_13-GP_18(r3) stmw r13, GP_13-GP_18(r9)
#else #else
stw r13, GP_13+4(r3) stw r13, GP_13+4(r9)
stw r14, GP_14+4(r3) stw r14, GP_14+4(r9)
stw r15, GP_15+4(r3) stw r15, GP_15+4(r9)
stw r16, GP_16+4(r3) stw r16, GP_16+4(r9)
stw r17, GP_17+4(r3) stw r17, GP_17+4(r9)
stwu r18, GP_18+4(r3) stwu r18, GP_18+4(r9)
#if ( PPC_USE_DATA_CACHE ) #if ( PPC_USE_DATA_CACHE )
dcbz r5, r3 dcbz r5, r9
#endif #endif
stw r19, GP_19-GP_18(r3) stw r19, GP_19-GP_18(r9)
stw r20, GP_20-GP_18(r3) stw r20, GP_20-GP_18(r9)
stw r21, GP_21-GP_18(r3) stw r21, GP_21-GP_18(r9)
stw r22, GP_22-GP_18(r3) stw r22, GP_22-GP_18(r9)
stw r23, GP_23-GP_18(r3) stw r23, GP_23-GP_18(r9)
stw r24, GP_24-GP_18(r3) stw r24, GP_24-GP_18(r9)
stw r25, GP_25-GP_18(r3) stw r25, GP_25-GP_18(r9)
stw r26, GP_26-GP_18(r3) stw r26, GP_26-GP_18(r9)
stw r27, GP_27-GP_18(r3) stw r27, GP_27-GP_18(r9)
stw r28, GP_28-GP_18(r3) stw r28, GP_28-GP_18(r9)
stw r29, GP_29-GP_18(r3) stw r29, GP_29-GP_18(r9)
stw r30, GP_30-GP_18(r3) stw r30, GP_30-GP_18(r9)
stw r31, GP_31-GP_18(r3) stw r31, GP_31-GP_18(r9)
#endif #endif
#if ( PPC_USE_DATA_CACHE ) #if ( PPC_USE_DATA_CACHE )
dcbt r0, r4 dcbt r0, r4
#endif #endif
mfcr r6 mfcr r6
stw r6, GP_CR-GP_18(r3) stw r6, GP_CR-GP_18(r9)
mflr r7 mflr r7
stw r7, GP_PC-GP_18(r3) stw r7, GP_PC-GP_18(r9)
mfmsr r8 mfmsr r8
stw r8, GP_MSR-GP_18(r3) stw r8, GP_MSR-GP_18(r9)
#ifdef __ALTIVEC__
mr r14, r4
EXTERN_PROC(_CPU_Context_switch_altivec)
bl _CPU_Context_switch_altivec
mr r4, r14
#if ( PPC_USE_DATA_CACHE )
li r5, PPC_CACHE_ALIGNMENT
#endif
#endif
#if ( PPC_USE_DATA_CACHE ) #if ( PPC_USE_DATA_CACHE )
dcbt r5, r4 dcbt r5, r4
@@ -431,5 +446,8 @@ PROC (_CPU_Context_restore):
lwz r30, GP_30(r3) lwz r30, GP_30(r3)
lwz r31, GP_31(r3) lwz r31, GP_31(r3)
#endif #endif
#ifdef __ALTIVEC__
EXTERN_PROC(_CPU_Context_restore_altivec)
b _CPU_Context_restore_altivec
#endif
blr blr