powerpc: AltiVec and FPU context support

Add AltiVec and FPU support to the Context_Control in case we use the
e6500 multilib.

Add PPC_MULTILIB_ALTIVEC and PPC_MULTILIB_FPU multilib defines.  Add
non-volatile AltiVec and FPU context to Context_Control.  Add save/restore of
non-volatile AltiVec and FPU to _CPU_Context_switch().  Add save/restore
of volatile AltiVec and FPU context to the exception code.  Adjust data
cache optimizations for the new context and cache line size.
This commit is contained in:
Sebastian Huber
2014-12-23 14:18:06 +01:00
parent c279d0a33f
commit 3e2647a714
12 changed files with 1521 additions and 20 deletions

View File

@@ -426,6 +426,19 @@ wrap_no_save_frame_register_\_FLVR:
/* Check exception type and remember it in non-volatile CR_TYPE */ /* Check exception type and remember it in non-volatile CR_TYPE */
cmpwi CR_TYPE, VECTOR_REGISTER, 0 cmpwi CR_TYPE, VECTOR_REGISTER, 0
#if defined(PPC_MULTILIB_FPU) || defined(PPC_MULTILIB_ALTIVEC)
/* Enable FPU and/or AltiVec */
mfmsr SCRATCH_REGISTER_0
#ifdef PPC_MULTILIB_FPU
ori SCRATCH_REGISTER_0, SCRATCH_REGISTER_0, MSR_FP
#endif
#ifdef PPC_MULTILIB_ALTIVEC
oris SCRATCH_REGISTER_0, SCRATCH_REGISTER_0, MSR_VE >> 16
#endif
mtmsr SCRATCH_REGISTER_0
isync
#endif
/* /*
* Depending on the exception type we do now save the non-volatile * Depending on the exception type we do now save the non-volatile
* registers or disable thread dispatching and switch to the ISR stack. * registers or disable thread dispatching and switch to the ISR stack.
@@ -545,7 +558,7 @@ wrap_change_msr_done_\_FLVR:
#endif /* PPC_EXC_CONFIG_BOOKE_ONLY */ #endif /* PPC_EXC_CONFIG_BOOKE_ONLY */
#ifdef __ALTIVEC__ #if defined(__ALTIVEC__) && !defined(PPC_MULTILIB_ALTIVEC)
LA SCRATCH_REGISTER_0, _CPU_save_altivec_volatile LA SCRATCH_REGISTER_0, _CPU_save_altivec_volatile
mtctr SCRATCH_REGISTER_0 mtctr SCRATCH_REGISTER_0
addi r3, FRAME_REGISTER, EXC_VEC_OFFSET addi r3, FRAME_REGISTER, EXC_VEC_OFFSET
@@ -566,6 +579,71 @@ wrap_change_msr_done_\_FLVR:
lwz VECTOR_REGISTER, EXCEPTION_NUMBER_OFFSET(FRAME_REGISTER) lwz VECTOR_REGISTER, EXCEPTION_NUMBER_OFFSET(FRAME_REGISTER)
#endif #endif
#ifdef PPC_MULTILIB_ALTIVEC
li SCRATCH_REGISTER_0, PPC_EXC_VR_OFFSET(0)
stvx v0, FRAME_REGISTER, SCRATCH_REGISTER_0
mfvscr v0
li SCRATCH_REGISTER_0, PPC_EXC_VR_OFFSET(1)
stvx v1, FRAME_REGISTER, SCRATCH_REGISTER_0
li SCRATCH_REGISTER_0, PPC_EXC_VR_OFFSET(2)
stvx v2, FRAME_REGISTER, SCRATCH_REGISTER_0
li SCRATCH_REGISTER_0, PPC_EXC_VR_OFFSET(3)
stvx v3, FRAME_REGISTER, SCRATCH_REGISTER_0
li SCRATCH_REGISTER_0, PPC_EXC_VR_OFFSET(4)
stvx v4, FRAME_REGISTER, SCRATCH_REGISTER_0
li SCRATCH_REGISTER_0, PPC_EXC_VR_OFFSET(5)
stvx v5, FRAME_REGISTER, SCRATCH_REGISTER_0
li SCRATCH_REGISTER_0, PPC_EXC_VR_OFFSET(6)
stvx v6, FRAME_REGISTER, SCRATCH_REGISTER_0
li SCRATCH_REGISTER_0, PPC_EXC_VR_OFFSET(7)
stvx v7, FRAME_REGISTER, SCRATCH_REGISTER_0
li SCRATCH_REGISTER_0, PPC_EXC_VR_OFFSET(8)
stvx v8, FRAME_REGISTER, SCRATCH_REGISTER_0
li SCRATCH_REGISTER_0, PPC_EXC_VR_OFFSET(9)
stvx v9, FRAME_REGISTER, SCRATCH_REGISTER_0
li SCRATCH_REGISTER_0, PPC_EXC_VR_OFFSET(0)
stvx v10, FRAME_REGISTER, SCRATCH_REGISTER_0
li SCRATCH_REGISTER_0, PPC_EXC_VR_OFFSET(11)
stvx v11, FRAME_REGISTER, SCRATCH_REGISTER_0
li SCRATCH_REGISTER_0, PPC_EXC_VR_OFFSET(12)
stvx v12, FRAME_REGISTER, SCRATCH_REGISTER_0
li SCRATCH_REGISTER_0, PPC_EXC_VR_OFFSET(13)
stvx v13, FRAME_REGISTER, SCRATCH_REGISTER_0
li SCRATCH_REGISTER_0, PPC_EXC_VR_OFFSET(14)
stvx v14, FRAME_REGISTER, SCRATCH_REGISTER_0
li SCRATCH_REGISTER_0, PPC_EXC_VR_OFFSET(15)
stvx v15, FRAME_REGISTER, SCRATCH_REGISTER_0
li SCRATCH_REGISTER_0, PPC_EXC_VR_OFFSET(16)
stvx v16, FRAME_REGISTER, SCRATCH_REGISTER_0
li SCRATCH_REGISTER_0, PPC_EXC_VR_OFFSET(17)
stvx v17, FRAME_REGISTER, SCRATCH_REGISTER_0
li SCRATCH_REGISTER_0, PPC_EXC_VR_OFFSET(18)
stvx v18, FRAME_REGISTER, SCRATCH_REGISTER_0
li SCRATCH_REGISTER_0, PPC_EXC_VR_OFFSET(19)
stvx v19, FRAME_REGISTER, SCRATCH_REGISTER_0
li SCRATCH_REGISTER_0, PPC_EXC_VSCR_OFFSET
stvewx v0, r1, SCRATCH_REGISTER_0
#endif
#ifdef PPC_MULTILIB_FPU
stfd f0, PPC_EXC_FR_OFFSET(0)(FRAME_REGISTER)
mffs f0
stfd f1, PPC_EXC_FR_OFFSET(1)(FRAME_REGISTER)
stfd f2, PPC_EXC_FR_OFFSET(2)(FRAME_REGISTER)
stfd f3, PPC_EXC_FR_OFFSET(3)(FRAME_REGISTER)
stfd f4, PPC_EXC_FR_OFFSET(4)(FRAME_REGISTER)
stfd f5, PPC_EXC_FR_OFFSET(5)(FRAME_REGISTER)
stfd f6, PPC_EXC_FR_OFFSET(6)(FRAME_REGISTER)
stfd f7, PPC_EXC_FR_OFFSET(7)(FRAME_REGISTER)
stfd f8, PPC_EXC_FR_OFFSET(8)(FRAME_REGISTER)
stfd f9, PPC_EXC_FR_OFFSET(9)(FRAME_REGISTER)
stfd f10, PPC_EXC_FR_OFFSET(10)(FRAME_REGISTER)
stfd f11, PPC_EXC_FR_OFFSET(11)(FRAME_REGISTER)
stfd f12, PPC_EXC_FR_OFFSET(12)(FRAME_REGISTER)
stfd f13, PPC_EXC_FR_OFFSET(13)(FRAME_REGISTER)
stfd f0, PPC_EXC_FPSCR_OFFSET(FRAME_REGISTER)
#endif
/* /*
* Call high level exception handler * Call high level exception handler
*/ */
@@ -666,13 +744,78 @@ wrap_handler_done_\_FLVR:
wrap_thread_dispatching_done_\_FLVR: wrap_thread_dispatching_done_\_FLVR:
#ifdef __ALTIVEC__ #if defined(__ALTIVEC__) && !defined(PPC_MULTILIB_ALTIVEC)
LA SCRATCH_REGISTER_0, _CPU_load_altivec_volatile LA SCRATCH_REGISTER_0, _CPU_load_altivec_volatile
mtctr SCRATCH_REGISTER_0 mtctr SCRATCH_REGISTER_0
addi r3, FRAME_REGISTER, EXC_VEC_OFFSET addi r3, FRAME_REGISTER, EXC_VEC_OFFSET
bctrl bctrl
#endif #endif
#ifdef PPC_MULTILIB_ALTIVEC
li SCRATCH_REGISTER_0, PPC_EXC_MIN_VSCR_OFFSET
lvewx v0, r1, SCRATCH_REGISTER_0
mtvscr v0
li SCRATCH_REGISTER_0, PPC_EXC_VR_OFFSET(0)
lvx v0, FRAME_REGISTER, SCRATCH_REGISTER_0
li SCRATCH_REGISTER_0, PPC_EXC_VR_OFFSET(1)
lvx v1, FRAME_REGISTER, SCRATCH_REGISTER_0
li SCRATCH_REGISTER_0, PPC_EXC_VR_OFFSET(2)
lvx v2, FRAME_REGISTER, SCRATCH_REGISTER_0
li SCRATCH_REGISTER_0, PPC_EXC_VR_OFFSET(3)
lvx v3, FRAME_REGISTER, SCRATCH_REGISTER_0
li SCRATCH_REGISTER_0, PPC_EXC_VR_OFFSET(4)
lvx v4, FRAME_REGISTER, SCRATCH_REGISTER_0
li SCRATCH_REGISTER_0, PPC_EXC_VR_OFFSET(5)
lvx v5, FRAME_REGISTER, SCRATCH_REGISTER_0
li SCRATCH_REGISTER_0, PPC_EXC_VR_OFFSET(6)
lvx v6, FRAME_REGISTER, SCRATCH_REGISTER_0
li SCRATCH_REGISTER_0, PPC_EXC_VR_OFFSET(7)
lvx v7, FRAME_REGISTER, SCRATCH_REGISTER_0
li SCRATCH_REGISTER_0, PPC_EXC_VR_OFFSET(8)
lvx v8, FRAME_REGISTER, SCRATCH_REGISTER_0
li SCRATCH_REGISTER_0, PPC_EXC_VR_OFFSET(9)
lvx v9, FRAME_REGISTER, SCRATCH_REGISTER_0
li SCRATCH_REGISTER_0, PPC_EXC_VR_OFFSET(0)
lvx v10, FRAME_REGISTER, SCRATCH_REGISTER_0
li SCRATCH_REGISTER_0, PPC_EXC_VR_OFFSET(11)
lvx v11, FRAME_REGISTER, SCRATCH_REGISTER_0
li SCRATCH_REGISTER_0, PPC_EXC_VR_OFFSET(12)
lvx v12, FRAME_REGISTER, SCRATCH_REGISTER_0
li SCRATCH_REGISTER_0, PPC_EXC_VR_OFFSET(13)
lvx v13, FRAME_REGISTER, SCRATCH_REGISTER_0
li SCRATCH_REGISTER_0, PPC_EXC_VR_OFFSET(14)
lvx v14, FRAME_REGISTER, SCRATCH_REGISTER_0
li SCRATCH_REGISTER_0, PPC_EXC_VR_OFFSET(15)
lvx v15, FRAME_REGISTER, SCRATCH_REGISTER_0
li SCRATCH_REGISTER_0, PPC_EXC_VR_OFFSET(16)
lvx v16, FRAME_REGISTER, SCRATCH_REGISTER_0
li SCRATCH_REGISTER_0, PPC_EXC_VR_OFFSET(17)
lvx v17, FRAME_REGISTER, SCRATCH_REGISTER_0
li SCRATCH_REGISTER_0, PPC_EXC_VR_OFFSET(18)
lvx v18, FRAME_REGISTER, SCRATCH_REGISTER_0
li SCRATCH_REGISTER_0, PPC_EXC_VR_OFFSET(19)
lvx v19, FRAME_REGISTER, SCRATCH_REGISTER_0
#endif
#ifdef PPC_MULTILIB_FPU
lfd f0, PPC_EXC_FPSCR_OFFSET(FRAME_REGISTER)
mtfsf 0xff, f0
lfd f0, PPC_EXC_FR_OFFSET(0)(FRAME_REGISTER)
lfd f1, PPC_EXC_FR_OFFSET(1)(FRAME_REGISTER)
lfd f2, PPC_EXC_FR_OFFSET(2)(FRAME_REGISTER)
lfd f3, PPC_EXC_FR_OFFSET(3)(FRAME_REGISTER)
lfd f4, PPC_EXC_FR_OFFSET(4)(FRAME_REGISTER)
lfd f5, PPC_EXC_FR_OFFSET(5)(FRAME_REGISTER)
lfd f6, PPC_EXC_FR_OFFSET(6)(FRAME_REGISTER)
lfd f7, PPC_EXC_FR_OFFSET(7)(FRAME_REGISTER)
lfd f8, PPC_EXC_FR_OFFSET(8)(FRAME_REGISTER)
lfd f9, PPC_EXC_FR_OFFSET(9)(FRAME_REGISTER)
lfd f10, PPC_EXC_FR_OFFSET(10)(FRAME_REGISTER)
lfd f11, PPC_EXC_FR_OFFSET(11)(FRAME_REGISTER)
lfd f12, PPC_EXC_FR_OFFSET(12)(FRAME_REGISTER)
lfd f13, PPC_EXC_FR_OFFSET(13)(FRAME_REGISTER)
#endif
#ifndef PPC_EXC_CONFIG_BOOKE_ONLY #ifndef PPC_EXC_CONFIG_BOOKE_ONLY
/* Restore MSR? */ /* Restore MSR? */
@@ -801,6 +944,56 @@ wrap_save_non_volatile_regs_\_FLVR:
stw r31, GPR31_OFFSET(FRAME_REGISTER) stw r31, GPR31_OFFSET(FRAME_REGISTER)
#endif #endif
#ifdef PPC_MULTILIB_ALTIVEC
li SCRATCH_REGISTER_1, PPC_EXC_VR_OFFSET(20)
stvx v20, FRAME_REGISTER, SCRATCH_REGISTER_1
li SCRATCH_REGISTER_1, PPC_EXC_VR_OFFSET(21)
stvx v21, FRAME_REGISTER, SCRATCH_REGISTER_1
li SCRATCH_REGISTER_1, PPC_EXC_VR_OFFSET(22)
stvx v22, FRAME_REGISTER, SCRATCH_REGISTER_1
li SCRATCH_REGISTER_1, PPC_EXC_VR_OFFSET(23)
stvx v23, FRAME_REGISTER, SCRATCH_REGISTER_1
li SCRATCH_REGISTER_1, PPC_EXC_VR_OFFSET(24)
stvx v24, FRAME_REGISTER, SCRATCH_REGISTER_1
li SCRATCH_REGISTER_1, PPC_EXC_VR_OFFSET(25)
stvx v25, FRAME_REGISTER, SCRATCH_REGISTER_1
li SCRATCH_REGISTER_1, PPC_EXC_VR_OFFSET(26)
stvx v26, FRAME_REGISTER, SCRATCH_REGISTER_1
li SCRATCH_REGISTER_1, PPC_EXC_VR_OFFSET(27)
stvx v27, FRAME_REGISTER, SCRATCH_REGISTER_1
li SCRATCH_REGISTER_1, PPC_EXC_VR_OFFSET(28)
stvx v28, FRAME_REGISTER, SCRATCH_REGISTER_1
li SCRATCH_REGISTER_1, PPC_EXC_VR_OFFSET(29)
stvx v29, FRAME_REGISTER, SCRATCH_REGISTER_1
li SCRATCH_REGISTER_1, PPC_EXC_VR_OFFSET(30)
stvx v30, FRAME_REGISTER, SCRATCH_REGISTER_1
li SCRATCH_REGISTER_1, PPC_EXC_VR_OFFSET(31)
stvx v31, FRAME_REGISTER, SCRATCH_REGISTER_1
mfvrsave SCRATCH_REGISTER_1
stw SCRATCH_REGISTER_1, PPC_EXC_VRSAVE_OFFSET(FRAME_REGISTER)
#endif
#ifdef PPC_MULTILIB_FPU
stfd f14, PPC_EXC_FR_OFFSET(14)(FRAME_REGISTER)
stfd f15, PPC_EXC_FR_OFFSET(15)(FRAME_REGISTER)
stfd f16, PPC_EXC_FR_OFFSET(16)(FRAME_REGISTER)
stfd f17, PPC_EXC_FR_OFFSET(17)(FRAME_REGISTER)
stfd f18, PPC_EXC_FR_OFFSET(18)(FRAME_REGISTER)
stfd f19, PPC_EXC_FR_OFFSET(19)(FRAME_REGISTER)
stfd f20, PPC_EXC_FR_OFFSET(20)(FRAME_REGISTER)
stfd f21, PPC_EXC_FR_OFFSET(21)(FRAME_REGISTER)
stfd f22, PPC_EXC_FR_OFFSET(22)(FRAME_REGISTER)
stfd f23, PPC_EXC_FR_OFFSET(23)(FRAME_REGISTER)
stfd f24, PPC_EXC_FR_OFFSET(24)(FRAME_REGISTER)
stfd f25, PPC_EXC_FR_OFFSET(25)(FRAME_REGISTER)
stfd f26, PPC_EXC_FR_OFFSET(26)(FRAME_REGISTER)
stfd f27, PPC_EXC_FR_OFFSET(27)(FRAME_REGISTER)
stfd f28, PPC_EXC_FR_OFFSET(28)(FRAME_REGISTER)
stfd f29, PPC_EXC_FR_OFFSET(29)(FRAME_REGISTER)
stfd f30, PPC_EXC_FR_OFFSET(30)(FRAME_REGISTER)
stfd f31, PPC_EXC_FR_OFFSET(31)(FRAME_REGISTER)
#endif
b wrap_disable_thread_dispatching_done_\_FLVR b wrap_disable_thread_dispatching_done_\_FLVR
wrap_restore_non_volatile_regs_\_FLVR: wrap_restore_non_volatile_regs_\_FLVR:
@@ -839,6 +1032,56 @@ wrap_restore_non_volatile_regs_\_FLVR:
/* Restore stack pointer */ /* Restore stack pointer */
stw SCRATCH_REGISTER_0, 0(r1) stw SCRATCH_REGISTER_0, 0(r1)
#ifdef PPC_MULTILIB_ALTIVEC
li SCRATCH_REGISTER_1, PPC_EXC_VR_OFFSET(20)
lvx v20, FRAME_REGISTER, SCRATCH_REGISTER_1
li SCRATCH_REGISTER_1, PPC_EXC_VR_OFFSET(21)
lvx v21, FRAME_REGISTER, SCRATCH_REGISTER_1
li SCRATCH_REGISTER_1, PPC_EXC_VR_OFFSET(22)
lvx v22, FRAME_REGISTER, SCRATCH_REGISTER_1
li SCRATCH_REGISTER_1, PPC_EXC_VR_OFFSET(23)
lvx v23, FRAME_REGISTER, SCRATCH_REGISTER_1
li SCRATCH_REGISTER_1, PPC_EXC_VR_OFFSET(24)
lvx v24, FRAME_REGISTER, SCRATCH_REGISTER_1
li SCRATCH_REGISTER_1, PPC_EXC_VR_OFFSET(25)
lvx v25, FRAME_REGISTER, SCRATCH_REGISTER_1
li SCRATCH_REGISTER_1, PPC_EXC_VR_OFFSET(26)
lvx v26, FRAME_REGISTER, SCRATCH_REGISTER_1
li SCRATCH_REGISTER_1, PPC_EXC_VR_OFFSET(27)
lvx v27, FRAME_REGISTER, SCRATCH_REGISTER_1
li SCRATCH_REGISTER_1, PPC_EXC_VR_OFFSET(28)
lvx v28, FRAME_REGISTER, SCRATCH_REGISTER_1
li SCRATCH_REGISTER_1, PPC_EXC_VR_OFFSET(29)
lvx v29, FRAME_REGISTER, SCRATCH_REGISTER_1
li SCRATCH_REGISTER_1, PPC_EXC_VR_OFFSET(30)
lvx v30, FRAME_REGISTER, SCRATCH_REGISTER_1
li SCRATCH_REGISTER_1, PPC_EXC_VR_OFFSET(31)
lvx v31, FRAME_REGISTER, SCRATCH_REGISTER_1
lwz SCRATCH_REGISTER_1, PPC_EXC_VRSAVE_OFFSET(FRAME_REGISTER)
mtvrsave SCRATCH_REGISTER_1
#endif
#ifdef PPC_MULTILIB_FPU
lfd f14, PPC_EXC_FR_OFFSET(14)(FRAME_REGISTER)
lfd f15, PPC_EXC_FR_OFFSET(15)(FRAME_REGISTER)
lfd f16, PPC_EXC_FR_OFFSET(16)(FRAME_REGISTER)
lfd f17, PPC_EXC_FR_OFFSET(17)(FRAME_REGISTER)
lfd f18, PPC_EXC_FR_OFFSET(18)(FRAME_REGISTER)
lfd f19, PPC_EXC_FR_OFFSET(19)(FRAME_REGISTER)
lfd f20, PPC_EXC_FR_OFFSET(20)(FRAME_REGISTER)
lfd f21, PPC_EXC_FR_OFFSET(21)(FRAME_REGISTER)
lfd f22, PPC_EXC_FR_OFFSET(22)(FRAME_REGISTER)
lfd f23, PPC_EXC_FR_OFFSET(23)(FRAME_REGISTER)
lfd f24, PPC_EXC_FR_OFFSET(24)(FRAME_REGISTER)
lfd f25, PPC_EXC_FR_OFFSET(25)(FRAME_REGISTER)
lfd f26, PPC_EXC_FR_OFFSET(26)(FRAME_REGISTER)
lfd f27, PPC_EXC_FR_OFFSET(27)(FRAME_REGISTER)
lfd f28, PPC_EXC_FR_OFFSET(28)(FRAME_REGISTER)
lfd f29, PPC_EXC_FR_OFFSET(29)(FRAME_REGISTER)
lfd f30, PPC_EXC_FR_OFFSET(30)(FRAME_REGISTER)
lfd f31, PPC_EXC_FR_OFFSET(31)(FRAME_REGISTER)
#endif
b wrap_thread_dispatching_done_\_FLVR b wrap_thread_dispatching_done_\_FLVR
wrap_call_global_handler_\_FLVR: wrap_call_global_handler_\_FLVR:

View File

@@ -1,5 +1,5 @@
/* /*
* Copyright (c) 2011-2014 embedded brains GmbH. All rights reserved. * Copyright (c) 2011-2015 embedded brains GmbH. All rights reserved.
* *
* embedded brains GmbH * embedded brains GmbH
* Dornierstr. 4 * Dornierstr. 4
@@ -105,6 +105,19 @@ ppc_exc_wrap_async_normal:
isync isync
#endif #endif
#if defined(PPC_MULTILIB_FPU) || defined(PPC_MULTILIB_ALTIVEC)
/* Enable FPU and/or AltiVec */
mfmsr FRAME_REGISTER
#ifdef PPC_MULTILIB_FPU
ori FRAME_REGISTER, FRAME_REGISTER, MSR_FP
#endif
#ifdef PPC_MULTILIB_ALTIVEC
oris FRAME_REGISTER, FRAME_REGISTER, MSR_VE >> 16
#endif
mtmsr FRAME_REGISTER
isync
#endif
/* Move frame pointer to non-volatile FRAME_REGISTER */ /* Move frame pointer to non-volatile FRAME_REGISTER */
mr FRAME_REGISTER, r1 mr FRAME_REGISTER, r1
@@ -176,6 +189,73 @@ ppc_exc_wrap_async_normal:
evstdd SCRATCH_1_REGISTER, PPC_EXC_ACC_OFFSET(r1) evstdd SCRATCH_1_REGISTER, PPC_EXC_ACC_OFFSET(r1)
#endif #endif
#ifdef PPC_MULTILIB_ALTIVEC
/* Save volatile AltiVec context */
li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(0)
stvx v0, r1, SCRATCH_0_REGISTER
mfvscr v0
li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(1)
stvx v1, r1, SCRATCH_0_REGISTER
li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(2)
stvx v2, r1, SCRATCH_0_REGISTER
li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(3)
stvx v3, r1, SCRATCH_0_REGISTER
li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(4)
stvx v4, r1, SCRATCH_0_REGISTER
li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(5)
stvx v5, r1, SCRATCH_0_REGISTER
li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(6)
stvx v6, r1, SCRATCH_0_REGISTER
li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(7)
stvx v7, r1, SCRATCH_0_REGISTER
li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(8)
stvx v8, r1, SCRATCH_0_REGISTER
li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(9)
stvx v9, r1, SCRATCH_0_REGISTER
li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(10)
stvx v10, r1, SCRATCH_0_REGISTER
li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(11)
stvx v11, r1, SCRATCH_0_REGISTER
li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(12)
stvx v12, r1, SCRATCH_0_REGISTER
li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(13)
stvx v13, r1, SCRATCH_0_REGISTER
li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(14)
stvx v14, r1, SCRATCH_0_REGISTER
li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(15)
stvx v15, r1, SCRATCH_0_REGISTER
li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(16)
stvx v16, r1, SCRATCH_0_REGISTER
li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(17)
stvx v17, r1, SCRATCH_0_REGISTER
li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(18)
stvx v18, r1, SCRATCH_0_REGISTER
li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(19)
stvx v19, r1, SCRATCH_0_REGISTER
li SCRATCH_0_REGISTER, PPC_EXC_MIN_VSCR_OFFSET
stvewx v0, r1, SCRATCH_0_REGISTER
#endif
#ifdef PPC_MULTILIB_FPU
/* Save volatile FPU context */
stfd f0, PPC_EXC_MIN_FR_OFFSET(0)(r1)
mffs f0
stfd f1, PPC_EXC_MIN_FR_OFFSET(1)(r1)
stfd f2, PPC_EXC_MIN_FR_OFFSET(2)(r1)
stfd f3, PPC_EXC_MIN_FR_OFFSET(3)(r1)
stfd f4, PPC_EXC_MIN_FR_OFFSET(4)(r1)
stfd f5, PPC_EXC_MIN_FR_OFFSET(5)(r1)
stfd f6, PPC_EXC_MIN_FR_OFFSET(6)(r1)
stfd f7, PPC_EXC_MIN_FR_OFFSET(7)(r1)
stfd f8, PPC_EXC_MIN_FR_OFFSET(8)(r1)
stfd f9, PPC_EXC_MIN_FR_OFFSET(9)(r1)
stfd f10, PPC_EXC_MIN_FR_OFFSET(10)(r1)
stfd f11, PPC_EXC_MIN_FR_OFFSET(11)(r1)
stfd f12, PPC_EXC_MIN_FR_OFFSET(12)(r1)
stfd f13, PPC_EXC_MIN_FR_OFFSET(13)(r1)
stfd f0, PPC_EXC_MIN_FPSCR_OFFSET(r1)
#endif
/* Increment ISR nest level and thread dispatch disable level */ /* Increment ISR nest level and thread dispatch disable level */
cmpwi ISR_NEST_REGISTER, 0 cmpwi ISR_NEST_REGISTER, 0
addi ISR_NEST_REGISTER, ISR_NEST_REGISTER, 1 addi ISR_NEST_REGISTER, ISR_NEST_REGISTER, 1
@@ -246,6 +326,73 @@ profiling_done:
bl _Thread_Dispatch bl _Thread_Dispatch
thread_dispatching_done: thread_dispatching_done:
#ifdef PPC_MULTILIB_ALTIVEC
/* Restore volatile AltiVec context */
li SCRATCH_0_REGISTER, PPC_EXC_MIN_VSCR_OFFSET
lvewx v0, r1, SCRATCH_0_REGISTER
mtvscr v0
li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(0)
lvx v0, r1, SCRATCH_0_REGISTER
li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(1)
lvx v1, r1, SCRATCH_0_REGISTER
li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(2)
lvx v2, r1, SCRATCH_0_REGISTER
li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(3)
lvx v3, r1, SCRATCH_0_REGISTER
li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(4)
lvx v4, r1, SCRATCH_0_REGISTER
li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(5)
lvx v5, r1, SCRATCH_0_REGISTER
li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(6)
lvx v6, r1, SCRATCH_0_REGISTER
li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(7)
lvx v7, r1, SCRATCH_0_REGISTER
li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(8)
lvx v8, r1, SCRATCH_0_REGISTER
li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(9)
lvx v9, r1, SCRATCH_0_REGISTER
li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(10)
lvx v10, r1, SCRATCH_0_REGISTER
li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(11)
lvx v11, r1, SCRATCH_0_REGISTER
li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(12)
lvx v12, r1, SCRATCH_0_REGISTER
li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(13)
lvx v13, r1, SCRATCH_0_REGISTER
li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(14)
lvx v14, r1, SCRATCH_0_REGISTER
li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(15)
lvx v15, r1, SCRATCH_0_REGISTER
li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(16)
lvx v16, r1, SCRATCH_0_REGISTER
li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(17)
lvx v17, r1, SCRATCH_0_REGISTER
li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(18)
lvx v18, r1, SCRATCH_0_REGISTER
li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(19)
lvx v19, r1, SCRATCH_0_REGISTER
#endif
#ifdef PPC_MULTILIB_FPU
/* Restore volatile FPU context */
lfd f0, PPC_EXC_MIN_FPSCR_OFFSET(r1)
mtfsf 0xff, f0
lfd f0, PPC_EXC_MIN_FR_OFFSET(0)(r1)
lfd f1, PPC_EXC_MIN_FR_OFFSET(1)(r1)
lfd f2, PPC_EXC_MIN_FR_OFFSET(2)(r1)
lfd f3, PPC_EXC_MIN_FR_OFFSET(3)(r1)
lfd f4, PPC_EXC_MIN_FR_OFFSET(4)(r1)
lfd f5, PPC_EXC_MIN_FR_OFFSET(5)(r1)
lfd f6, PPC_EXC_MIN_FR_OFFSET(6)(r1)
lfd f7, PPC_EXC_MIN_FR_OFFSET(7)(r1)
lfd f8, PPC_EXC_MIN_FR_OFFSET(8)(r1)
lfd f9, PPC_EXC_MIN_FR_OFFSET(9)(r1)
lfd f10, PPC_EXC_MIN_FR_OFFSET(10)(r1)
lfd f11, PPC_EXC_MIN_FR_OFFSET(11)(r1)
lfd f12, PPC_EXC_MIN_FR_OFFSET(12)(r1)
lfd f13, PPC_EXC_MIN_FR_OFFSET(13)(r1)
#endif
#ifdef __SPE__ #ifdef __SPE__
/* Load SPEFSCR and ACC */ /* Load SPEFSCR and ACC */
lwz DISPATCH_LEVEL_REGISTER, PPC_EXC_SPEFSCR_OFFSET(r1) lwz DISPATCH_LEVEL_REGISTER, PPC_EXC_SPEFSCR_OFFSET(r1)

View File

@@ -36,6 +36,15 @@
#define PPC_EXC_ASSERT_CANONIC_OFFSET(field) \ #define PPC_EXC_ASSERT_CANONIC_OFFSET(field) \
PPC_EXC_ASSERT_OFFSET(field, field ## _OFFSET) PPC_EXC_ASSERT_OFFSET(field, field ## _OFFSET)
#define PPC_EXC_MIN_ASSERT_OFFSET(field, off) \
RTEMS_STATIC_ASSERT( \
offsetof(ppc_exc_min_frame, field) + FRAME_LINK_SPACE == off, \
ppc_exc_min_frame_offset_ ## field \
)
#define PPC_EXC_MIN_ASSERT_CANONIC_OFFSET(field) \
PPC_EXC_MIN_ASSERT_OFFSET(field, field ## _OFFSET)
PPC_EXC_ASSERT_OFFSET(EXC_SRR0, SRR0_FRAME_OFFSET); PPC_EXC_ASSERT_OFFSET(EXC_SRR0, SRR0_FRAME_OFFSET);
PPC_EXC_ASSERT_OFFSET(EXC_SRR1, SRR1_FRAME_OFFSET); PPC_EXC_ASSERT_OFFSET(EXC_SRR1, SRR1_FRAME_OFFSET);
PPC_EXC_ASSERT_OFFSET(_EXC_number, EXCEPTION_NUMBER_OFFSET); PPC_EXC_ASSERT_OFFSET(_EXC_number, EXCEPTION_NUMBER_OFFSET);
@@ -80,6 +89,145 @@ PPC_EXC_ASSERT_CANONIC_OFFSET(GPR29);
PPC_EXC_ASSERT_CANONIC_OFFSET(GPR30); PPC_EXC_ASSERT_CANONIC_OFFSET(GPR30);
PPC_EXC_ASSERT_CANONIC_OFFSET(GPR31); PPC_EXC_ASSERT_CANONIC_OFFSET(GPR31);
PPC_EXC_MIN_ASSERT_OFFSET(EXC_SRR0, SRR0_FRAME_OFFSET);
PPC_EXC_MIN_ASSERT_OFFSET(EXC_SRR1, SRR1_FRAME_OFFSET);
PPC_EXC_MIN_ASSERT_CANONIC_OFFSET(EXC_CR);
PPC_EXC_MIN_ASSERT_CANONIC_OFFSET(EXC_CTR);
PPC_EXC_MIN_ASSERT_CANONIC_OFFSET(EXC_XER);
PPC_EXC_MIN_ASSERT_CANONIC_OFFSET(EXC_LR);
#ifdef __SPE__
PPC_EXC_MIN_ASSERT_OFFSET(EXC_SPEFSCR, PPC_EXC_SPEFSCR_OFFSET);
PPC_EXC_MIN_ASSERT_OFFSET(EXC_ACC, PPC_EXC_ACC_OFFSET);
#endif
PPC_EXC_MIN_ASSERT_CANONIC_OFFSET(GPR0);
PPC_EXC_MIN_ASSERT_CANONIC_OFFSET(GPR1);
PPC_EXC_MIN_ASSERT_CANONIC_OFFSET(GPR2);
PPC_EXC_MIN_ASSERT_CANONIC_OFFSET(GPR3);
PPC_EXC_MIN_ASSERT_CANONIC_OFFSET(GPR4);
PPC_EXC_MIN_ASSERT_CANONIC_OFFSET(GPR5);
PPC_EXC_MIN_ASSERT_CANONIC_OFFSET(GPR6);
PPC_EXC_MIN_ASSERT_CANONIC_OFFSET(GPR7);
PPC_EXC_MIN_ASSERT_CANONIC_OFFSET(GPR8);
PPC_EXC_MIN_ASSERT_CANONIC_OFFSET(GPR9);
PPC_EXC_MIN_ASSERT_CANONIC_OFFSET(GPR10);
PPC_EXC_MIN_ASSERT_CANONIC_OFFSET(GPR11);
PPC_EXC_MIN_ASSERT_CANONIC_OFFSET(GPR12);
#ifdef PPC_MULTILIB_ALTIVEC
PPC_EXC_ASSERT_OFFSET(VSCR, PPC_EXC_VSCR_OFFSET);
PPC_EXC_ASSERT_OFFSET(VRSAVE, PPC_EXC_VRSAVE_OFFSET);
RTEMS_STATIC_ASSERT(PPC_EXC_VR_OFFSET(0) % 16 == 0, PPC_EXC_VR_OFFSET);
PPC_EXC_ASSERT_OFFSET(V0, PPC_EXC_VR_OFFSET(0));
PPC_EXC_ASSERT_OFFSET(V1, PPC_EXC_VR_OFFSET(1));
PPC_EXC_ASSERT_OFFSET(V2, PPC_EXC_VR_OFFSET(2));
PPC_EXC_ASSERT_OFFSET(V3, PPC_EXC_VR_OFFSET(3));
PPC_EXC_ASSERT_OFFSET(V4, PPC_EXC_VR_OFFSET(4));
PPC_EXC_ASSERT_OFFSET(V5, PPC_EXC_VR_OFFSET(5));
PPC_EXC_ASSERT_OFFSET(V6, PPC_EXC_VR_OFFSET(6));
PPC_EXC_ASSERT_OFFSET(V7, PPC_EXC_VR_OFFSET(7));
PPC_EXC_ASSERT_OFFSET(V8, PPC_EXC_VR_OFFSET(8));
PPC_EXC_ASSERT_OFFSET(V9, PPC_EXC_VR_OFFSET(9));
PPC_EXC_ASSERT_OFFSET(V10, PPC_EXC_VR_OFFSET(10));
PPC_EXC_ASSERT_OFFSET(V11, PPC_EXC_VR_OFFSET(11));
PPC_EXC_ASSERT_OFFSET(V12, PPC_EXC_VR_OFFSET(12));
PPC_EXC_ASSERT_OFFSET(V13, PPC_EXC_VR_OFFSET(13));
PPC_EXC_ASSERT_OFFSET(V14, PPC_EXC_VR_OFFSET(14));
PPC_EXC_ASSERT_OFFSET(V15, PPC_EXC_VR_OFFSET(15));
PPC_EXC_ASSERT_OFFSET(V16, PPC_EXC_VR_OFFSET(16));
PPC_EXC_ASSERT_OFFSET(V17, PPC_EXC_VR_OFFSET(17));
PPC_EXC_ASSERT_OFFSET(V18, PPC_EXC_VR_OFFSET(18));
PPC_EXC_ASSERT_OFFSET(V19, PPC_EXC_VR_OFFSET(19));
PPC_EXC_ASSERT_OFFSET(V20, PPC_EXC_VR_OFFSET(20));
PPC_EXC_ASSERT_OFFSET(V21, PPC_EXC_VR_OFFSET(21));
PPC_EXC_ASSERT_OFFSET(V22, PPC_EXC_VR_OFFSET(22));
PPC_EXC_ASSERT_OFFSET(V23, PPC_EXC_VR_OFFSET(23));
PPC_EXC_ASSERT_OFFSET(V24, PPC_EXC_VR_OFFSET(24));
PPC_EXC_ASSERT_OFFSET(V25, PPC_EXC_VR_OFFSET(25));
PPC_EXC_ASSERT_OFFSET(V26, PPC_EXC_VR_OFFSET(26));
PPC_EXC_ASSERT_OFFSET(V27, PPC_EXC_VR_OFFSET(27));
PPC_EXC_ASSERT_OFFSET(V28, PPC_EXC_VR_OFFSET(28));
PPC_EXC_ASSERT_OFFSET(V29, PPC_EXC_VR_OFFSET(29));
PPC_EXC_ASSERT_OFFSET(V30, PPC_EXC_VR_OFFSET(30));
PPC_EXC_ASSERT_OFFSET(V31, PPC_EXC_VR_OFFSET(31));
PPC_EXC_MIN_ASSERT_OFFSET(VSCR, PPC_EXC_MIN_VSCR_OFFSET);
RTEMS_STATIC_ASSERT(PPC_EXC_MIN_VR_OFFSET(0) % 16 == 0, PPC_EXC_MIN_VR_OFFSET);
PPC_EXC_MIN_ASSERT_OFFSET(V0, PPC_EXC_MIN_VR_OFFSET(0));
PPC_EXC_MIN_ASSERT_OFFSET(V1, PPC_EXC_MIN_VR_OFFSET(1));
PPC_EXC_MIN_ASSERT_OFFSET(V2, PPC_EXC_MIN_VR_OFFSET(2));
PPC_EXC_MIN_ASSERT_OFFSET(V3, PPC_EXC_MIN_VR_OFFSET(3));
PPC_EXC_MIN_ASSERT_OFFSET(V4, PPC_EXC_MIN_VR_OFFSET(4));
PPC_EXC_MIN_ASSERT_OFFSET(V5, PPC_EXC_MIN_VR_OFFSET(5));
PPC_EXC_MIN_ASSERT_OFFSET(V6, PPC_EXC_MIN_VR_OFFSET(6));
PPC_EXC_MIN_ASSERT_OFFSET(V7, PPC_EXC_MIN_VR_OFFSET(7));
PPC_EXC_MIN_ASSERT_OFFSET(V8, PPC_EXC_MIN_VR_OFFSET(8));
PPC_EXC_MIN_ASSERT_OFFSET(V9, PPC_EXC_MIN_VR_OFFSET(9));
PPC_EXC_MIN_ASSERT_OFFSET(V10, PPC_EXC_MIN_VR_OFFSET(10));
PPC_EXC_MIN_ASSERT_OFFSET(V11, PPC_EXC_MIN_VR_OFFSET(11));
PPC_EXC_MIN_ASSERT_OFFSET(V12, PPC_EXC_MIN_VR_OFFSET(12));
PPC_EXC_MIN_ASSERT_OFFSET(V13, PPC_EXC_MIN_VR_OFFSET(13));
PPC_EXC_MIN_ASSERT_OFFSET(V14, PPC_EXC_MIN_VR_OFFSET(14));
PPC_EXC_MIN_ASSERT_OFFSET(V15, PPC_EXC_MIN_VR_OFFSET(15));
PPC_EXC_MIN_ASSERT_OFFSET(V16, PPC_EXC_MIN_VR_OFFSET(16));
PPC_EXC_MIN_ASSERT_OFFSET(V17, PPC_EXC_MIN_VR_OFFSET(17));
PPC_EXC_MIN_ASSERT_OFFSET(V18, PPC_EXC_MIN_VR_OFFSET(18));
PPC_EXC_MIN_ASSERT_OFFSET(V19, PPC_EXC_MIN_VR_OFFSET(19));
#endif
#ifdef PPC_MULTILIB_FPU
RTEMS_STATIC_ASSERT(PPC_EXC_FR_OFFSET(0) % 8 == 0, PPC_EXC_FR_OFFSET);
PPC_EXC_ASSERT_OFFSET(F0, PPC_EXC_FR_OFFSET(0));
PPC_EXC_ASSERT_OFFSET(F1, PPC_EXC_FR_OFFSET(1));
PPC_EXC_ASSERT_OFFSET(F2, PPC_EXC_FR_OFFSET(2));
PPC_EXC_ASSERT_OFFSET(F3, PPC_EXC_FR_OFFSET(3));
PPC_EXC_ASSERT_OFFSET(F4, PPC_EXC_FR_OFFSET(4));
PPC_EXC_ASSERT_OFFSET(F5, PPC_EXC_FR_OFFSET(5));
PPC_EXC_ASSERT_OFFSET(F6, PPC_EXC_FR_OFFSET(6));
PPC_EXC_ASSERT_OFFSET(F7, PPC_EXC_FR_OFFSET(7));
PPC_EXC_ASSERT_OFFSET(F8, PPC_EXC_FR_OFFSET(8));
PPC_EXC_ASSERT_OFFSET(F9, PPC_EXC_FR_OFFSET(9));
PPC_EXC_ASSERT_OFFSET(F10, PPC_EXC_FR_OFFSET(10));
PPC_EXC_ASSERT_OFFSET(F11, PPC_EXC_FR_OFFSET(11));
PPC_EXC_ASSERT_OFFSET(F12, PPC_EXC_FR_OFFSET(12));
PPC_EXC_ASSERT_OFFSET(F13, PPC_EXC_FR_OFFSET(13));
PPC_EXC_ASSERT_OFFSET(F14, PPC_EXC_FR_OFFSET(14));
PPC_EXC_ASSERT_OFFSET(F15, PPC_EXC_FR_OFFSET(15));
PPC_EXC_ASSERT_OFFSET(F16, PPC_EXC_FR_OFFSET(16));
PPC_EXC_ASSERT_OFFSET(F17, PPC_EXC_FR_OFFSET(17));
PPC_EXC_ASSERT_OFFSET(F18, PPC_EXC_FR_OFFSET(18));
PPC_EXC_ASSERT_OFFSET(F19, PPC_EXC_FR_OFFSET(19));
PPC_EXC_ASSERT_OFFSET(F20, PPC_EXC_FR_OFFSET(20));
PPC_EXC_ASSERT_OFFSET(F21, PPC_EXC_FR_OFFSET(21));
PPC_EXC_ASSERT_OFFSET(F22, PPC_EXC_FR_OFFSET(22));
PPC_EXC_ASSERT_OFFSET(F23, PPC_EXC_FR_OFFSET(23));
PPC_EXC_ASSERT_OFFSET(F24, PPC_EXC_FR_OFFSET(24));
PPC_EXC_ASSERT_OFFSET(F25, PPC_EXC_FR_OFFSET(25));
PPC_EXC_ASSERT_OFFSET(F26, PPC_EXC_FR_OFFSET(26));
PPC_EXC_ASSERT_OFFSET(F27, PPC_EXC_FR_OFFSET(27));
PPC_EXC_ASSERT_OFFSET(F28, PPC_EXC_FR_OFFSET(28));
PPC_EXC_ASSERT_OFFSET(F29, PPC_EXC_FR_OFFSET(29));
PPC_EXC_ASSERT_OFFSET(F30, PPC_EXC_FR_OFFSET(30));
PPC_EXC_ASSERT_OFFSET(F31, PPC_EXC_FR_OFFSET(31));
PPC_EXC_ASSERT_OFFSET(FPSCR, PPC_EXC_FPSCR_OFFSET);
RTEMS_STATIC_ASSERT(PPC_EXC_MIN_FR_OFFSET(0) % 8 == 0, PPC_EXC_MIN_FR_OFFSET);
PPC_EXC_MIN_ASSERT_OFFSET(F0, PPC_EXC_MIN_FR_OFFSET(0));
PPC_EXC_MIN_ASSERT_OFFSET(F1, PPC_EXC_MIN_FR_OFFSET(1));
PPC_EXC_MIN_ASSERT_OFFSET(F2, PPC_EXC_MIN_FR_OFFSET(2));
PPC_EXC_MIN_ASSERT_OFFSET(F3, PPC_EXC_MIN_FR_OFFSET(3));
PPC_EXC_MIN_ASSERT_OFFSET(F4, PPC_EXC_MIN_FR_OFFSET(4));
PPC_EXC_MIN_ASSERT_OFFSET(F5, PPC_EXC_MIN_FR_OFFSET(5));
PPC_EXC_MIN_ASSERT_OFFSET(F6, PPC_EXC_MIN_FR_OFFSET(6));
PPC_EXC_MIN_ASSERT_OFFSET(F7, PPC_EXC_MIN_FR_OFFSET(7));
PPC_EXC_MIN_ASSERT_OFFSET(F8, PPC_EXC_MIN_FR_OFFSET(8));
PPC_EXC_MIN_ASSERT_OFFSET(F9, PPC_EXC_MIN_FR_OFFSET(9));
PPC_EXC_MIN_ASSERT_OFFSET(F10, PPC_EXC_MIN_FR_OFFSET(10));
PPC_EXC_MIN_ASSERT_OFFSET(F11, PPC_EXC_MIN_FR_OFFSET(11));
PPC_EXC_MIN_ASSERT_OFFSET(F12, PPC_EXC_MIN_FR_OFFSET(12));
PPC_EXC_MIN_ASSERT_OFFSET(F13, PPC_EXC_MIN_FR_OFFSET(13));
PPC_EXC_MIN_ASSERT_OFFSET(FPSCR, PPC_EXC_MIN_FPSCR_OFFSET);
#endif
RTEMS_STATIC_ASSERT( RTEMS_STATIC_ASSERT(
PPC_EXC_MINIMAL_FRAME_SIZE % CPU_STACK_ALIGNMENT == 0, PPC_EXC_MINIMAL_FRAME_SIZE % CPU_STACK_ALIGNMENT == 0,
PPC_EXC_MINIMAL_FRAME_SIZE PPC_EXC_MINIMAL_FRAME_SIZE

View File

@@ -179,6 +179,42 @@ void _CPU_Exception_frame_print(const CPU_Exception_frame *excPtr)
printk(" MCSR = 0x%08x\n", mcsr); printk(" MCSR = 0x%08x\n", mcsr);
} }
#ifdef PPC_MULTILIB_ALTIVEC
{
unsigned char *v = (unsigned char *) &excPtr->V0;
int i;
int j;
printk(" VSCR = 0x%08x\n", excPtr->VSCR);
printk("VRSAVE = 0x%08x\n", excPtr->VRSAVE);
for (i = 0; i < 32; ++i) {
printk(" V%02i = 0x", i);
for (j = 0; j < 16; ++j) {
printk("%02x", v[j]);
}
printk("\n");
v += 16;
}
}
#endif
#ifdef PPC_MULTILIB_FPU
{
unsigned long long *f = (unsigned long long *) &excPtr->F0;
int i;
printk("FPSCR = 0x%08llx\n", excPtr->FPSCR);
for (i = 0; i < 32; ++i) {
printk(" F%02i = 0x%016llx\n", i, f[i]);
}
}
#endif
if (executing != NULL) { if (executing != NULL) {
const char *name = (const char *) &executing->Object.name; const char *name = (const char *) &executing->Object.name;

View File

@@ -146,8 +146,37 @@ extern "C" {
#ifndef __SPE__ #ifndef __SPE__
#define PPC_EXC_GPR_OFFSET(gpr) ((gpr) * PPC_GPR_SIZE + 36) #define PPC_EXC_GPR_OFFSET(gpr) ((gpr) * PPC_GPR_SIZE + 36)
#define PPC_EXC_VECTOR_PROLOGUE_OFFSET PPC_EXC_GPR_OFFSET(4) #define PPC_EXC_VECTOR_PROLOGUE_OFFSET PPC_EXC_GPR_OFFSET(4)
#define PPC_EXC_MINIMAL_FRAME_SIZE 96 #if defined(PPC_MULTILIB_ALTIVEC) && defined(PPC_MULTILIB_FPU)
#define PPC_EXC_FRAME_SIZE 176 #define PPC_EXC_VSCR_OFFSET 168
#define PPC_EXC_VRSAVE_OFFSET 172
#define PPC_EXC_VR_OFFSET(v) ((v) * 16 + 176)
#define PPC_EXC_FR_OFFSET(f) ((f) * 8 + 688)
#define PPC_EXC_FPSCR_OFFSET 944
#define PPC_EXC_FRAME_SIZE 960
#define PPC_EXC_MIN_VSCR_OFFSET 92
#define PPC_EXC_MIN_VR_OFFSET(v) ((v) * 16 + 96)
#define PPC_EXC_MIN_FR_OFFSET(f) ((f) * 8 + 416)
#define PPC_EXC_MIN_FPSCR_OFFSET 528
#define PPC_EXC_MINIMAL_FRAME_SIZE 544
#elif defined(PPC_MULTILIB_ALTIVEC)
#define PPC_EXC_VSCR_OFFSET 168
#define PPC_EXC_VRSAVE_OFFSET 172
#define PPC_EXC_VR_OFFSET(v) ((v) * 16 + 176)
#define PPC_EXC_FRAME_SIZE 688
#define PPC_EXC_MIN_VSCR_OFFSET 92
#define PPC_EXC_MIN_VR_OFFSET(v) ((v) * 16 + 96)
#define PPC_EXC_MINIMAL_FRAME_SIZE 416
#elif defined(PPC_MULTILIB_FPU)
#define PPC_EXC_FR_OFFSET(f) ((f) * 8 + 168)
#define PPC_EXC_FPSCR_OFFSET 424
#define PPC_EXC_FRAME_SIZE 448
#define PPC_EXC_MIN_FR_OFFSET(f) ((f) * 8 + 96)
#define PPC_EXC_MIN_FPSCR_OFFSET 92
#define PPC_EXC_MINIMAL_FRAME_SIZE 224
#else
#define PPC_EXC_FRAME_SIZE 176
#define PPC_EXC_MINIMAL_FRAME_SIZE 96
#endif
#else #else
#define PPC_EXC_SPEFSCR_OFFSET 36 #define PPC_EXC_SPEFSCR_OFFSET 36
#define PPC_EXC_ACC_OFFSET 40 #define PPC_EXC_ACC_OFFSET 40
@@ -214,7 +243,7 @@ extern "C" {
#define EXC_GENERIC_SIZE PPC_EXC_FRAME_SIZE #define EXC_GENERIC_SIZE PPC_EXC_FRAME_SIZE
#ifdef __ALTIVEC__ #if defined(__ALTIVEC__) && !defined(PPC_MULTILIB_ALTIVEC)
#define EXC_VEC_OFFSET EXC_GENERIC_SIZE #define EXC_VEC_OFFSET EXC_GENERIC_SIZE
#ifndef PPC_CACHE_ALIGNMENT #ifndef PPC_CACHE_ALIGNMENT
#error "Missing include file!" #error "Missing include file!"
@@ -248,6 +277,77 @@ extern "C" {
* @{ * @{
*/ */
typedef struct {
uint32_t EXC_SRR0;
uint32_t EXC_SRR1;
uint32_t unused;
uint32_t EXC_CR;
uint32_t EXC_CTR;
uint32_t EXC_XER;
uint32_t EXC_LR;
#ifdef __SPE__
uint32_t EXC_SPEFSCR;
uint64_t EXC_ACC;
#endif
PPC_GPR_TYPE GPR0;
PPC_GPR_TYPE GPR1;
PPC_GPR_TYPE GPR2;
PPC_GPR_TYPE GPR3;
PPC_GPR_TYPE GPR4;
PPC_GPR_TYPE GPR5;
PPC_GPR_TYPE GPR6;
PPC_GPR_TYPE GPR7;
PPC_GPR_TYPE GPR8;
PPC_GPR_TYPE GPR9;
PPC_GPR_TYPE GPR10;
PPC_GPR_TYPE GPR11;
PPC_GPR_TYPE GPR12;
uint32_t EARLY_INSTANT;
#ifdef PPC_MULTILIB_ALTIVEC
uint32_t VSCR;
uint8_t V0[16];
uint8_t V1[16];
uint8_t V2[16];
uint8_t V3[16];
uint8_t V4[16];
uint8_t V5[16];
uint8_t V6[16];
uint8_t V7[16];
uint8_t V8[16];
uint8_t V9[16];
uint8_t V10[16];
uint8_t V11[16];
uint8_t V12[16];
uint8_t V13[16];
uint8_t V14[16];
uint8_t V15[16];
uint8_t V16[16];
uint8_t V17[16];
uint8_t V18[16];
uint8_t V19[16];
#endif
#ifdef PPC_MULTILIB_FPU
#ifndef PPC_MULTILIB_ALTIVEC
uint32_t reserved_for_alignment;
#endif
double F0;
double F1;
double F2;
double F3;
double F4;
double F5;
double F6;
double F7;
double F8;
double F9;
double F10;
double F11;
double F12;
double F13;
uint64_t FPSCR;
#endif
} ppc_exc_min_frame;
typedef CPU_Exception_frame BSP_Exception_frame; typedef CPU_Exception_frame BSP_Exception_frame;
/** @} */ /** @} */

View File

@@ -45,7 +45,7 @@
*/ */
void _CPU_Initialize(void) void _CPU_Initialize(void)
{ {
#ifdef __ALTIVEC__ #if defined(__ALTIVEC__) && !defined(PPC_MULTILIB_ALTIVEC)
_CPU_Initialize_altivec(); _CPU_Initialize_altivec();
#endif #endif
} }
@@ -75,6 +75,8 @@ void _CPU_Context_Initialize(
_CPU_MSR_GET( msr_value ); _CPU_MSR_GET( msr_value );
the_ppc_context = ppc_get_context( the_context );
/* /*
* Setting the interrupt mask here is not strictly necessary * Setting the interrupt mask here is not strictly necessary
* since the IRQ level will be established from _Thread_Handler() * since the IRQ level will be established from _Thread_Handler()
@@ -95,6 +97,9 @@ void _CPU_Context_Initialize(
msr_value &= ~ppc_interrupt_get_disable_mask(); msr_value &= ~ppc_interrupt_get_disable_mask();
} }
#ifdef PPC_MULTILIB_FPU
msr_value |= MSR_FP;
#else
/* /*
* The FP bit of the MSR should only be enabled if this is a floating * The FP bit of the MSR should only be enabled if this is a floating
* point task. Unfortunately, the vfprintf_r routine in newlib * point task. Unfortunately, the vfprintf_r routine in newlib
@@ -118,13 +123,19 @@ void _CPU_Context_Initialize(
msr_value |= PPC_MSR_FP; msr_value |= PPC_MSR_FP;
else else
msr_value &= ~PPC_MSR_FP; msr_value &= ~PPC_MSR_FP;
#endif
#ifdef PPC_MULTILIB_ALTIVEC
msr_value |= MSR_VE;
the_ppc_context->vrsave = 0;
#endif
the_ppc_context = ppc_get_context( the_context );
the_ppc_context->gpr1 = sp; the_ppc_context->gpr1 = sp;
the_ppc_context->msr = msr_value; the_ppc_context->msr = msr_value;
the_ppc_context->lr = (uint32_t) entry_point; the_ppc_context->lr = (uint32_t) entry_point;
#ifdef __ALTIVEC__ #if defined(__ALTIVEC__) && !defined(PPC_MULTILIB_ALTIVEC)
_CPU_Context_initialize_altivec( the_ppc_context ); _CPU_Context_initialize_altivec( the_ppc_context );
#endif #endif

View File

@@ -23,7 +23,7 @@
* COPYRIGHT (c) 1989-1997. * COPYRIGHT (c) 1989-1997.
* On-Line Applications Research Corporation (OAR). * On-Line Applications Research Corporation (OAR).
* *
* Copyright (c) 2011-2014 embedded brains GmbH * Copyright (c) 2011-2015 embedded brains GmbH
* *
* The license and distribution terms for this file may in * The license and distribution terms for this file may in
* the file LICENSE in this distribution or at * the file LICENSE in this distribution or at
@@ -55,6 +55,7 @@
#define PPC_CONTEXT_CACHE_LINE_2 (3 * PPC_DEFAULT_CACHE_LINE_SIZE) #define PPC_CONTEXT_CACHE_LINE_2 (3 * PPC_DEFAULT_CACHE_LINE_SIZE)
#define PPC_CONTEXT_CACHE_LINE_3 (4 * PPC_DEFAULT_CACHE_LINE_SIZE) #define PPC_CONTEXT_CACHE_LINE_3 (4 * PPC_DEFAULT_CACHE_LINE_SIZE)
#define PPC_CONTEXT_CACHE_LINE_4 (5 * PPC_DEFAULT_CACHE_LINE_SIZE) #define PPC_CONTEXT_CACHE_LINE_4 (5 * PPC_DEFAULT_CACHE_LINE_SIZE)
#define PPC_CONTEXT_CACHE_LINE_5 (6 * PPC_DEFAULT_CACHE_LINE_SIZE)
BEGIN_CODE BEGIN_CODE
@@ -257,7 +258,10 @@ PROC (_CPU_Context_switch):
clrrwi r5, r4, PPC_DEFAULT_CACHE_LINE_POWER clrrwi r5, r4, PPC_DEFAULT_CACHE_LINE_POWER
DATA_CACHE_ZERO_AND_TOUCH(r10, PPC_CONTEXT_CACHE_LINE_0) DATA_CACHE_ZERO_AND_TOUCH(r10, PPC_CONTEXT_CACHE_LINE_0)
#if PPC_CONTEXT_CACHE_LINE_2 <= PPC_CONTEXT_VOLATILE_SIZE
DATA_CACHE_ZERO_AND_TOUCH(r11, PPC_CONTEXT_CACHE_LINE_1) DATA_CACHE_ZERO_AND_TOUCH(r11, PPC_CONTEXT_CACHE_LINE_1)
#endif
/* Save context to r3 */ /* Save context to r3 */
@@ -317,6 +321,11 @@ PROC (_CPU_Context_switch):
PPC_GPR_STORE r24, PPC_CONTEXT_OFFSET_GPR24(r3) PPC_GPR_STORE r24, PPC_CONTEXT_OFFSET_GPR24(r3)
PPC_GPR_STORE r25, PPC_CONTEXT_OFFSET_GPR25(r3) PPC_GPR_STORE r25, PPC_CONTEXT_OFFSET_GPR25(r3)
#if PPC_CONTEXT_OFFSET_V22 == PPC_CONTEXT_CACHE_LINE_2
DATA_CACHE_ZERO_AND_TOUCH(r10, PPC_CONTEXT_CACHE_LINE_2)
#endif
PPC_GPR_STORE r26, PPC_CONTEXT_OFFSET_GPR26(r3) PPC_GPR_STORE r26, PPC_CONTEXT_OFFSET_GPR26(r3)
PPC_GPR_STORE r27, PPC_CONTEXT_OFFSET_GPR27(r3) PPC_GPR_STORE r27, PPC_CONTEXT_OFFSET_GPR27(r3)
@@ -327,6 +336,71 @@ PROC (_CPU_Context_switch):
stw r2, PPC_CONTEXT_OFFSET_GPR2(r3) stw r2, PPC_CONTEXT_OFFSET_GPR2(r3)
#ifdef PPC_MULTILIB_ALTIVEC
li r9, PPC_CONTEXT_OFFSET_V20
stvx v20, r3, r9
li r9, PPC_CONTEXT_OFFSET_V21
stvx v21, r3, r9
#if PPC_CONTEXT_OFFSET_V26 == PPC_CONTEXT_CACHE_LINE_3
DATA_CACHE_ZERO_AND_TOUCH(r10, PPC_CONTEXT_CACHE_LINE_3)
#endif
li r9, PPC_CONTEXT_OFFSET_V22
stvx v22, r3, r9
li r9, PPC_CONTEXT_OFFSET_V23
stvx v23, r3, r9
li r9, PPC_CONTEXT_OFFSET_V24
stvx v24, r3, r9
li r9, PPC_CONTEXT_OFFSET_V25
stvx v25, r3, r9
#if PPC_CONTEXT_OFFSET_V30 == PPC_CONTEXT_CACHE_LINE_4
DATA_CACHE_ZERO_AND_TOUCH(r10, PPC_CONTEXT_CACHE_LINE_4)
#endif
li r9, PPC_CONTEXT_OFFSET_V26
stvx v26, r3, r9
li r9, PPC_CONTEXT_OFFSET_V27
stvx v27, r3, r9
li r9, PPC_CONTEXT_OFFSET_V28
stvx v28, r3, r9
li r9, PPC_CONTEXT_OFFSET_V29
stvx v29, r3, r9
#if PPC_CONTEXT_OFFSET_F17 == PPC_CONTEXT_CACHE_LINE_5
DATA_CACHE_ZERO_AND_TOUCH(r10, PPC_CONTEXT_CACHE_LINE_5)
#endif
li r9, PPC_CONTEXT_OFFSET_V30
stvx v30, r3, r9
li r9, PPC_CONTEXT_OFFSET_V31
stvx v31, r3, r9
mfvrsave r9
stw r9, PPC_CONTEXT_OFFSET_VRSAVE(r3)
#endif
#ifdef PPC_MULTILIB_FPU
stfd f14, PPC_CONTEXT_OFFSET_F14(r3)
stfd f15, PPC_CONTEXT_OFFSET_F15(r3)
stfd f16, PPC_CONTEXT_OFFSET_F16(r3)
stfd f17, PPC_CONTEXT_OFFSET_F17(r3)
stfd f18, PPC_CONTEXT_OFFSET_F18(r3)
stfd f19, PPC_CONTEXT_OFFSET_F19(r3)
stfd f20, PPC_CONTEXT_OFFSET_F20(r3)
stfd f21, PPC_CONTEXT_OFFSET_F21(r3)
stfd f22, PPC_CONTEXT_OFFSET_F22(r3)
stfd f23, PPC_CONTEXT_OFFSET_F23(r3)
stfd f24, PPC_CONTEXT_OFFSET_F24(r3)
stfd f25, PPC_CONTEXT_OFFSET_F25(r3)
stfd f26, PPC_CONTEXT_OFFSET_F26(r3)
stfd f27, PPC_CONTEXT_OFFSET_F27(r3)
stfd f28, PPC_CONTEXT_OFFSET_F28(r3)
stfd f29, PPC_CONTEXT_OFFSET_F29(r3)
stfd f30, PPC_CONTEXT_OFFSET_F30(r3)
stfd f31, PPC_CONTEXT_OFFSET_F31(r3)
#endif
#ifdef RTEMS_SMP #ifdef RTEMS_SMP
/* The executing context no longer executes on this processor */ /* The executing context no longer executes on this processor */
msync msync
@@ -351,7 +425,7 @@ check_is_executing:
/* Restore context from r5 */ /* Restore context from r5 */
restore_context: restore_context:
#ifdef __ALTIVEC__ #if defined(__ALTIVEC__) && !defined(PPC_MULTILIB_ALTIVEC)
mr r14, r5 mr r14, r5
.extern _CPU_Context_switch_altivec .extern _CPU_Context_switch_altivec
bl _CPU_Context_switch_altivec bl _CPU_Context_switch_altivec
@@ -390,6 +464,56 @@ restore_context:
lwz r2, PPC_CONTEXT_OFFSET_GPR2(r5) lwz r2, PPC_CONTEXT_OFFSET_GPR2(r5)
#ifdef PPC_MULTILIB_ALTIVEC
li r9, PPC_CONTEXT_OFFSET_V20
lvx v20, r5, r9
li r9, PPC_CONTEXT_OFFSET_V21
lvx v21, r5, r9
li r9, PPC_CONTEXT_OFFSET_V22
lvx v22, r5, r9
li r9, PPC_CONTEXT_OFFSET_V23
lvx v23, r5, r9
li r9, PPC_CONTEXT_OFFSET_V24
lvx v24, r5, r9
li r9, PPC_CONTEXT_OFFSET_V25
lvx v25, r5, r9
li r9, PPC_CONTEXT_OFFSET_V26
lvx v26, r5, r9
li r9, PPC_CONTEXT_OFFSET_V27
lvx v27, r5, r9
li r9, PPC_CONTEXT_OFFSET_V28
lvx v28, r5, r9
li r9, PPC_CONTEXT_OFFSET_V29
lvx v29, r5, r9
li r9, PPC_CONTEXT_OFFSET_V30
lvx v30, r5, r9
li r9, PPC_CONTEXT_OFFSET_V31
lvx v31, r5, r9
lwz r9, PPC_CONTEXT_OFFSET_VRSAVE(r5)
mtvrsave r9
#endif
#ifdef PPC_MULTILIB_FPU
lfd f14, PPC_CONTEXT_OFFSET_F14(r5)
lfd f15, PPC_CONTEXT_OFFSET_F15(r5)
lfd f16, PPC_CONTEXT_OFFSET_F16(r5)
lfd f17, PPC_CONTEXT_OFFSET_F17(r5)
lfd f18, PPC_CONTEXT_OFFSET_F18(r5)
lfd f19, PPC_CONTEXT_OFFSET_F19(r5)
lfd f20, PPC_CONTEXT_OFFSET_F20(r5)
lfd f21, PPC_CONTEXT_OFFSET_F21(r5)
lfd f22, PPC_CONTEXT_OFFSET_F22(r5)
lfd f23, PPC_CONTEXT_OFFSET_F23(r5)
lfd f24, PPC_CONTEXT_OFFSET_F24(r5)
lfd f25, PPC_CONTEXT_OFFSET_F25(r5)
lfd f26, PPC_CONTEXT_OFFSET_F26(r5)
lfd f27, PPC_CONTEXT_OFFSET_F27(r5)
lfd f28, PPC_CONTEXT_OFFSET_F28(r5)
lfd f29, PPC_CONTEXT_OFFSET_F29(r5)
lfd f30, PPC_CONTEXT_OFFSET_F30(r5)
lfd f31, PPC_CONTEXT_OFFSET_F31(r5)
#endif
mtcr r8 mtcr r8
mtlr r7 mtlr r7
mtmsr r6 mtmsr r6
@@ -405,7 +529,7 @@ PROC (_CPU_Context_restore):
/* Align to a cache line */ /* Align to a cache line */
clrrwi r5, r3, PPC_DEFAULT_CACHE_LINE_POWER clrrwi r5, r3, PPC_DEFAULT_CACHE_LINE_POWER
#ifdef __ALTIVEC__ #if defined(__ALTIVEC__) && !defined(PPC_MULTILIB_ALTIVEC)
li r3, 0 li r3, 0
#endif #endif

View File

@@ -57,6 +57,47 @@ PPC_ASSERT_OFFSET(gpr2, GPR2);
PPC_ASSERT_OFFSET(is_executing, IS_EXECUTING); PPC_ASSERT_OFFSET(is_executing, IS_EXECUTING);
#endif #endif
#ifdef PPC_MULTILIB_ALTIVEC
RTEMS_STATIC_ASSERT(
PPC_CONTEXT_OFFSET_V20 % 16 == 0,
ppc_context_altivec
);
PPC_ASSERT_OFFSET(v20, V20);
PPC_ASSERT_OFFSET(v21, V21);
PPC_ASSERT_OFFSET(v22, V22);
PPC_ASSERT_OFFSET(v23, V23);
PPC_ASSERT_OFFSET(v24, V24);
PPC_ASSERT_OFFSET(v25, V25);
PPC_ASSERT_OFFSET(v26, V26);
PPC_ASSERT_OFFSET(v27, V27);
PPC_ASSERT_OFFSET(v28, V28);
PPC_ASSERT_OFFSET(v29, V29);
PPC_ASSERT_OFFSET(v30, V30);
PPC_ASSERT_OFFSET(v31, V31);
PPC_ASSERT_OFFSET(vrsave, VRSAVE);
#endif
#ifdef PPC_MULTILIB_FPU
PPC_ASSERT_OFFSET(f14, F14);
PPC_ASSERT_OFFSET(f15, F15);
PPC_ASSERT_OFFSET(f16, F16);
PPC_ASSERT_OFFSET(f17, F17);
PPC_ASSERT_OFFSET(f18, F18);
PPC_ASSERT_OFFSET(f19, F19);
PPC_ASSERT_OFFSET(f20, F20);
PPC_ASSERT_OFFSET(f21, F21);
PPC_ASSERT_OFFSET(f22, F22);
PPC_ASSERT_OFFSET(f23, F23);
PPC_ASSERT_OFFSET(f24, F24);
PPC_ASSERT_OFFSET(f25, F25);
PPC_ASSERT_OFFSET(f26, F26);
PPC_ASSERT_OFFSET(f27, F27);
PPC_ASSERT_OFFSET(f28, F28);
PPC_ASSERT_OFFSET(f29, F29);
PPC_ASSERT_OFFSET(f30, F30);
PPC_ASSERT_OFFSET(f31, F31);
#endif
RTEMS_STATIC_ASSERT( RTEMS_STATIC_ASSERT(
sizeof(Context_Control) % PPC_DEFAULT_CACHE_LINE_SIZE == 0, sizeof(Context_Control) % PPC_DEFAULT_CACHE_LINE_SIZE == 0,
ppc_context_size ppc_context_size

View File

@@ -1,5 +1,5 @@
/* /*
* Copyright (c) 2013 embedded brains GmbH. All rights reserved. * Copyright (c) 2013-2015 embedded brains GmbH. All rights reserved.
* *
* embedded brains GmbH * embedded brains GmbH
* Dornierstr. 4 * Dornierstr. 4
@@ -40,8 +40,60 @@
#define GPR29_OFFSET OFFSET(15) #define GPR29_OFFSET OFFSET(15)
#define GPR30_OFFSET OFFSET(16) #define GPR30_OFFSET OFFSET(16)
#define GPR31_OFFSET OFFSET(17) #define GPR31_OFFSET OFFSET(17)
#ifdef PPC_MULTILIB_FPU
#define FOFFSET(i) ((i) * 8 + OFFSET(18))
#define F14_OFFSET FOFFSET(0)
#define F15_OFFSET FOFFSET(1)
#define F16_OFFSET FOFFSET(2)
#define F17_OFFSET FOFFSET(3)
#define F18_OFFSET FOFFSET(4)
#define F19_OFFSET FOFFSET(5)
#define F20_OFFSET FOFFSET(6)
#define F21_OFFSET FOFFSET(7)
#define F22_OFFSET FOFFSET(8)
#define F23_OFFSET FOFFSET(9)
#define F24_OFFSET FOFFSET(10)
#define F25_OFFSET FOFFSET(11)
#define F26_OFFSET FOFFSET(12)
#define F27_OFFSET FOFFSET(13)
#define F28_OFFSET FOFFSET(14)
#define F29_OFFSET FOFFSET(15)
#define F30_OFFSET FOFFSET(16)
#define F31_OFFSET FOFFSET(17)
#define FPSCR_OFFSET FOFFSET(18)
#define FTMP_OFFSET FOFFSET(19)
#define FTMP2_OFFSET FOFFSET(20)
#define FPUEND FOFFSET(21)
#else
#define FPUEND OFFSET(18)
#endif
#ifdef PPC_MULTILIB_ALTIVEC
#define VOFFSET(i) ((i) * 16 + ((FPUEND + 16 - 1) & ~(16 - 1)))
#define V20_OFFSET VOFFSET(0)
#define V21_OFFSET VOFFSET(1)
#define V22_OFFSET VOFFSET(2)
#define V23_OFFSET VOFFSET(3)
#define V24_OFFSET VOFFSET(4)
#define V25_OFFSET VOFFSET(5)
#define V26_OFFSET VOFFSET(6)
#define V27_OFFSET VOFFSET(7)
#define V28_OFFSET VOFFSET(8)
#define V29_OFFSET VOFFSET(9)
#define V30_OFFSET VOFFSET(10)
#define V31_OFFSET VOFFSET(11)
#define VTMP_OFFSET VOFFSET(12)
#define VTMP2_OFFSET VOFFSET(13)
#define VSCR_OFFSET VOFFSET(14)
#define VRSAVE_OFFSET (VSCR_OFFSET + 4)
#define ALTIVECEND (VRSAVE_OFFSET + 4)
#else
#define ALTIVECEND FPUEND
#endif
#define FRAME_SIZE \ #define FRAME_SIZE \
((OFFSET(18) + CPU_STACK_ALIGNMENT - 1) & ~(CPU_STACK_ALIGNMENT - 1)) ((ALTIVECEND + CPU_STACK_ALIGNMENT - 1) & ~(CPU_STACK_ALIGNMENT - 1))
.global _CPU_Context_validate .global _CPU_Context_validate
@@ -72,6 +124,61 @@ _CPU_Context_validate:
stw r30, GPR30_OFFSET(r1) stw r30, GPR30_OFFSET(r1)
stw r31, GPR31_OFFSET(r1) stw r31, GPR31_OFFSET(r1)
#ifdef PPC_MULTILIB_FPU
stfd f14, F14_OFFSET(r1)
stfd f15, F15_OFFSET(r1)
stfd f16, F16_OFFSET(r1)
stfd f17, F17_OFFSET(r1)
stfd f18, F18_OFFSET(r1)
stfd f19, F19_OFFSET(r1)
stfd f20, F20_OFFSET(r1)
stfd f21, F21_OFFSET(r1)
stfd f22, F22_OFFSET(r1)
stfd f23, F23_OFFSET(r1)
stfd f24, F24_OFFSET(r1)
stfd f25, F25_OFFSET(r1)
stfd f26, F26_OFFSET(r1)
stfd f27, F27_OFFSET(r1)
stfd f28, F28_OFFSET(r1)
stfd f29, F29_OFFSET(r1)
stfd f30, F30_OFFSET(r1)
stfd f31, F31_OFFSET(r1)
mffs f0
stfd f0, FPSCR_OFFSET(r1)
#endif
#ifdef PPC_MULTILIB_ALTIVEC
li r0, V20_OFFSET
stvx v20, r1, r0
li r0, V21_OFFSET
stvx v21, r1, r0
li r0, V22_OFFSET
stvx v22, r1, r0
li r0, V23_OFFSET
stvx v23, r1, r0
li r0, V24_OFFSET
stvx v24, r1, r0
li r0, V25_OFFSET
stvx v25, r1, r0
li r0, V26_OFFSET
stvx v26, r1, r0
li r0, V27_OFFSET
stvx v27, r1, r0
li r0, V28_OFFSET
stvx v28, r1, r0
li r0, V29_OFFSET
stvx v29, r1, r0
li r0, V30_OFFSET
stvx v30, r1, r0
li r0, V31_OFFSET
stvx v31, r1, r0
mfvscr v0
li r0, VSCR_OFFSET
stvewx v0, r1, r0
mfvrsave r0
stw r0, VRSAVE_OFFSET(r1)
#endif
/* Fill */ /* Fill */
/* CR and GPR29 are equal most of the time */ /* CR and GPR29 are equal most of the time */
@@ -124,6 +231,99 @@ _CPU_Context_validate:
/* GPR31 contains the stack pointer */ /* GPR31 contains the stack pointer */
mr r31, r1 mr r31, r1
#ifdef PPC_MULTILIB_FPU
.macro FILL_F i
addi r4, r3, 0x100 + \i
stw r4, FTMP_OFFSET(r1)
addi r4, r3, 0x200 + \i
stw r4, FTMP_OFFSET + 4(r1)
lfd \i, FTMP_OFFSET(r1)
.endm
FILL_F 0
FILL_F 1
FILL_F 2
FILL_F 3
FILL_F 4
FILL_F 5
FILL_F 6
FILL_F 7
FILL_F 8
FILL_F 9
FILL_F 10
FILL_F 11
FILL_F 12
FILL_F 13
FILL_F 14
FILL_F 15
FILL_F 16
FILL_F 17
FILL_F 18
FILL_F 19
FILL_F 20
FILL_F 21
FILL_F 22
FILL_F 23
FILL_F 24
FILL_F 25
FILL_F 26
FILL_F 27
FILL_F 28
FILL_F 29
FILL_F 30
FILL_F 31
#endif
#ifdef PPC_MULTILIB_ALTIVEC
.macro FILL_V i
addi r4, r3, 0x300 + \i
stw r4, VTMP_OFFSET(r1)
addi r4, r3, 0x400 + \i
stw r4, VTMP_OFFSET + 4(r1)
addi r4, r3, 0x500 + \i
stw r4, VTMP_OFFSET + 8(r1)
addi r4, r3, 0x600 + \i
stw r4, VTMP_OFFSET + 12(r1)
li r4, VTMP_OFFSET
lvx \i, r1, r4
.endm
FILL_V 0
FILL_V 1
FILL_V 2
FILL_V 3
FILL_V 4
FILL_V 5
FILL_V 6
FILL_V 7
FILL_V 8
FILL_V 9
FILL_V 10
FILL_V 11
FILL_V 12
FILL_V 13
FILL_V 14
FILL_V 15
FILL_V 16
FILL_V 17
FILL_V 18
FILL_V 19
FILL_V 20
FILL_V 21
FILL_V 22
FILL_V 23
FILL_V 24
FILL_V 25
FILL_V 26
FILL_V 27
FILL_V 28
FILL_V 29
FILL_V 30
FILL_V 31
addi r4, r3, 0x700
mtvrsave r4
#endif
/* Check */ /* Check */
check: check:
mfcr r4 mfcr r4
@@ -226,12 +426,194 @@ check:
bne restore bne restore
cmpw r31, r1 cmpw r31, r1
bne restore bne restore
#ifdef PPC_MULTILIB_FPU
.macro CHECK_F i
stfd \i, FTMP_OFFSET(r1)
lwz r5, FTMP_OFFSET(r1)
addi r4, r3, 0x100 + \i
cmpw r5, r4
bne restore
lwz r5, FTMP_OFFSET + 4(r1)
addi r4, r3, 0x200 + \i
cmpw r5, r4
bne restore
.endm
/* Check FPSCR */
stfd f0, FTMP_OFFSET(r1)
mffs f0
stfd f0, FTMP2_OFFSET(r1)
lwz r4, FTMP2_OFFSET + 4(r1)
lwz r5, FPSCR_OFFSET + 4(r1)
cmpw r5, r4
bne restore
lfd f0, FTMP_OFFSET(r1)
CHECK_F 0
CHECK_F 1
CHECK_F 2
CHECK_F 3
CHECK_F 4
CHECK_F 5
CHECK_F 6
CHECK_F 7
CHECK_F 8
CHECK_F 9
CHECK_F 10
CHECK_F 11
CHECK_F 12
CHECK_F 13
CHECK_F 14
CHECK_F 15
CHECK_F 16
CHECK_F 17
CHECK_F 18
CHECK_F 19
CHECK_F 20
CHECK_F 21
CHECK_F 22
CHECK_F 23
CHECK_F 24
CHECK_F 25
CHECK_F 26
CHECK_F 27
CHECK_F 28
CHECK_F 29
CHECK_F 30
CHECK_F 31
#endif
#ifdef PPC_MULTILIB_ALTIVEC
.macro CHECK_V i
li r4, VTMP_OFFSET
stvx \i, r1, r4
lwz r5, VTMP_OFFSET(r1)
addi r4, r3, 0x300 + \i
cmpw r5, r4
bne restore
lwz r5, VTMP_OFFSET + 4(r1)
addi r4, r3, 0x400 + \i
cmpw r5, r4
bne restore
lwz r5, VTMP_OFFSET + 8(r1)
addi r4, r3, 0x500 + \i
cmpw r5, r4
bne restore
lwz r5, VTMP_OFFSET + 12(r1)
addi r4, r3, 0x600 + \i
cmpw r5, r4
bne restore
.endm
/* Check VSCR */
li r4, VTMP_OFFSET
stvx v0, r1, r4
mfvscr v0
li r4, VTMP2_OFFSET
stvewx v0, r1, r4
lwz r4, VTMP2_OFFSET(r1)
lwz r5, VSCR_OFFSET(r1)
cmpw r5, r4
bne restore
li r4, VTMP_OFFSET
lvx v0, r1, r4
CHECK_V 0
CHECK_V 1
CHECK_V 2
CHECK_V 3
CHECK_V 4
CHECK_V 5
CHECK_V 6
CHECK_V 7
CHECK_V 8
CHECK_V 9
CHECK_V 10
CHECK_V 11
CHECK_V 12
CHECK_V 13
CHECK_V 14
CHECK_V 15
CHECK_V 16
CHECK_V 17
CHECK_V 18
CHECK_V 19
CHECK_V 20
CHECK_V 21
CHECK_V 22
CHECK_V 23
CHECK_V 24
CHECK_V 25
CHECK_V 26
CHECK_V 27
CHECK_V 28
CHECK_V 29
CHECK_V 30
CHECK_V 31
mfvrsave r5
addi r4, r3, 0x700
cmpw r5, r4
bne restore
#endif
mtcr r29 mtcr r29
addi r5, r3, 1 addi r5, r3, 1
b check b check
/* Restore */ /* Restore */
restore: restore:
#ifdef PPC_MULTILIB_ALTIVEC
lwz r0, VRSAVE_OFFSET(r1)
mtvrsave r0
li r0, V31_OFFSET
lvx v31, r1, r0
li r0, V30_OFFSET
lvx v30, r1, r0
li r0, V29_OFFSET
lvx v29, r1, r0
li r0, V28_OFFSET
lvx v28, r1, r0
li r0, V27_OFFSET
lvx v27, r1, r0
li r0, V26_OFFSET
lvx v26, r1, r0
li r0, V25_OFFSET
lvx v25, r1, r0
li r0, V24_OFFSET
lvx v24, r1, r0
li r0, V23_OFFSET
lvx v23, r1, r0
li r0, V22_OFFSET
lvx v22, r1, r0
li r0, V21_OFFSET
lvx v21, r1, r0
li r0, V20_OFFSET
lvx v20, r1, r0
#endif
#ifdef PPC_MULTILIB_FPU
lfd f31, F31_OFFSET(r1)
lfd f30, F30_OFFSET(r1)
lfd f29, F29_OFFSET(r1)
lfd f28, F28_OFFSET(r1)
lfd f27, F27_OFFSET(r1)
lfd f26, F26_OFFSET(r1)
lfd f25, F25_OFFSET(r1)
lfd f24, F24_OFFSET(r1)
lfd f23, F23_OFFSET(r1)
lfd f22, F22_OFFSET(r1)
lfd f21, F21_OFFSET(r1)
lfd f20, F20_OFFSET(r1)
lfd f19, F19_OFFSET(r1)
lfd f18, F18_OFFSET(r1)
lfd f17, F17_OFFSET(r1)
lfd f16, F16_OFFSET(r1)
lfd f15, F15_OFFSET(r1)
lfd f14, F14_OFFSET(r1)
#endif
lwz r31, GPR31_OFFSET(r1) lwz r31, GPR31_OFFSET(r1)
lwz r30, GPR30_OFFSET(r1) lwz r30, GPR30_OFFSET(r1)
lwz r29, GPR29_OFFSET(r1) lwz r29, GPR29_OFFSET(r1)

View File

@@ -22,6 +22,99 @@
_CPU_Context_volatile_clobber: _CPU_Context_volatile_clobber:
#ifdef PPC_MULTILIB_FPU
.macro CLOBBER_F i
addi r4, r3, 0x100 + \i
stw r4, 16(r1)
addi r4, r3, 0x200 + \i
stw r4, 16 + 4(r1)
lfd \i, 16(r1)
.endm
stwu r1, -32(r1)
/* Negate FPSCR[FPRF] bits */
mffs f0
stfd f0, 16(r1)
lwz r0, 20(r1)
nor r3, r0, r0
rlwinm r0, r0, 0, 20, 14
rlwinm r3, r3, 0, 15, 19
or r0, r3, r0
stw r0, 20(r1)
lfd f0, 16(r1)
mtfsf 0xff, f0
CLOBBER_F 0
CLOBBER_F 1
CLOBBER_F 2
CLOBBER_F 3
CLOBBER_F 4
CLOBBER_F 5
CLOBBER_F 6
CLOBBER_F 7
CLOBBER_F 8
CLOBBER_F 9
CLOBBER_F 10
CLOBBER_F 11
CLOBBER_F 12
CLOBBER_F 13
addi r1, r1, 32
#endif
#ifdef PPC_MULTILIB_ALTIVEC
.macro CLOBBER_V i
addi r4, r3, 0x300 + \i
stw r4, 16(r1)
addi r4, r3, 0x400 + \i
stw r4, 16 + 4(r1)
addi r4, r3, 0x500 + \i
stw r4, 16 + 8(r1)
addi r4, r3, 0x600 + \i
stw r4, 16 + 12(r1)
li r4, 16
lvx \i, r1, r4
.endm
stwu r1, -32(r1)
/* Negate VSCR[SAT] bit */
mfvscr v0
li r3, 16
stvewx v0, r1, r3
lwz r0, 16(r1)
nor r3, r0, r0
rlwinm r0, r0, 0, 0, 30
rlwinm r3, r3, 0, 31, 31
or r0, r3, r0
stw r0, 16(r1)
li r3, 16
lvewx v0, r1, r3
mtvscr v0
CLOBBER_V 0
CLOBBER_V 1
CLOBBER_V 2
CLOBBER_V 3
CLOBBER_V 4
CLOBBER_V 5
CLOBBER_V 6
CLOBBER_V 7
CLOBBER_V 8
CLOBBER_V 9
CLOBBER_V 10
CLOBBER_V 11
CLOBBER_V 12
CLOBBER_V 13
CLOBBER_V 14
CLOBBER_V 15
CLOBBER_V 16
CLOBBER_V 17
CLOBBER_V 18
CLOBBER_V 19
addi r1, r1, 32
#endif
addi r4, r3, 10 addi r4, r3, 10
rlwinm r4, r4, 0, 20, 7 rlwinm r4, r4, 0, 20, 7
mfcr r5 mfcr r5

View File

@@ -300,10 +300,22 @@ typedef struct {
PPC_GPR_TYPE gpr30; PPC_GPR_TYPE gpr30;
PPC_GPR_TYPE gpr31; PPC_GPR_TYPE gpr31;
uint32_t gpr2; uint32_t gpr2;
#ifdef RTEMS_SMP #if defined(PPC_MULTILIB_ALTIVEC)
volatile uint32_t is_executing; uint32_t reserved_for_alignment;
#endif uint8_t v20[16];
#ifdef __ALTIVEC__ uint8_t v21[16];
uint8_t v22[16];
uint8_t v23[16];
uint8_t v24[16];
uint8_t v25[16];
uint8_t v26[16];
uint8_t v27[16];
uint8_t v28[16];
uint8_t v29[16];
uint8_t v30[16];
uint8_t v31[16];
uint32_t vrsave;
#elif defined(__ALTIVEC__)
/* /*
* 12 non-volatile vector registers, cache-aligned area for vscr/vrsave * 12 non-volatile vector registers, cache-aligned area for vscr/vrsave
* and padding to ensure cache-alignment. Unfortunately, we can't verify * and padding to ensure cache-alignment. Unfortunately, we can't verify
@@ -315,6 +327,34 @@ typedef struct {
*/ */
uint8_t altivec[16*12 + 32 + PPC_DEFAULT_CACHE_LINE_SIZE]; uint8_t altivec[16*12 + 32 + PPC_DEFAULT_CACHE_LINE_SIZE];
#endif #endif
#if defined(PPC_MULTILIB_FPU)
double f14;
double f15;
double f16;
double f17;
double f18;
double f19;
double f20;
double f21;
double f22;
double f23;
double f24;
double f25;
double f26;
double f27;
double f28;
double f29;
double f30;
double f31;
#endif
#if defined(RTEMS_SMP)
/*
* This item is at the structure end, so that we can use dcbz for the
* previous items to optimize the context switch. We must not set this
* item to zero via the dcbz.
*/
volatile uint32_t is_executing;
#endif
} ppc_context; } ppc_context;
typedef struct { typedef struct {
@@ -386,8 +426,60 @@ static inline ppc_context *ppc_get_context( const Context_Control *context )
#define PPC_CONTEXT_OFFSET_GPR31 PPC_CONTEXT_GPR_OFFSET( 31 ) #define PPC_CONTEXT_OFFSET_GPR31 PPC_CONTEXT_GPR_OFFSET( 31 )
#define PPC_CONTEXT_OFFSET_GPR2 PPC_CONTEXT_GPR_OFFSET( 32 ) #define PPC_CONTEXT_OFFSET_GPR2 PPC_CONTEXT_GPR_OFFSET( 32 )
#ifdef PPC_MULTILIB_ALTIVEC
#define PPC_CONTEXT_OFFSET_V( v ) \
( ( ( v ) - 20 ) * 16 + PPC_DEFAULT_CACHE_LINE_SIZE + 96 )
#define PPC_CONTEXT_OFFSET_V20 PPC_CONTEXT_OFFSET_V( 20 )
#define PPC_CONTEXT_OFFSET_V21 PPC_CONTEXT_OFFSET_V( 21 )
#define PPC_CONTEXT_OFFSET_V22 PPC_CONTEXT_OFFSET_V( 22 )
#define PPC_CONTEXT_OFFSET_V23 PPC_CONTEXT_OFFSET_V( 23 )
#define PPC_CONTEXT_OFFSET_V24 PPC_CONTEXT_OFFSET_V( 24 )
#define PPC_CONTEXT_OFFSET_V25 PPC_CONTEXT_OFFSET_V( 25 )
#define PPC_CONTEXT_OFFSET_V26 PPC_CONTEXT_OFFSET_V( 26 )
#define PPC_CONTEXT_OFFSET_V27 PPC_CONTEXT_OFFSET_V( 27 )
#define PPC_CONTEXT_OFFSET_V28 PPC_CONTEXT_OFFSET_V( 28 )
#define PPC_CONTEXT_OFFSET_V29 PPC_CONTEXT_OFFSET_V( 29 )
#define PPC_CONTEXT_OFFSET_V30 PPC_CONTEXT_OFFSET_V( 30 )
#define PPC_CONTEXT_OFFSET_V31 PPC_CONTEXT_OFFSET_V( 31 )
#define PPC_CONTEXT_OFFSET_VRSAVE PPC_CONTEXT_OFFSET_V( 32 )
#define PPC_CONTEXT_OFFSET_F( f ) \
( ( ( f ) - 14 ) * 8 + PPC_DEFAULT_CACHE_LINE_SIZE + 296 )
#else
#define PPC_CONTEXT_OFFSET_F( f ) \
( ( ( f ) - 14 ) * 8 + PPC_DEFAULT_CACHE_LINE_SIZE + 96 )
#endif
#ifdef PPC_MULTILIB_FPU
#define PPC_CONTEXT_OFFSET_F14 PPC_CONTEXT_OFFSET_F( 14 )
#define PPC_CONTEXT_OFFSET_F15 PPC_CONTEXT_OFFSET_F( 15 )
#define PPC_CONTEXT_OFFSET_F16 PPC_CONTEXT_OFFSET_F( 16 )
#define PPC_CONTEXT_OFFSET_F17 PPC_CONTEXT_OFFSET_F( 17 )
#define PPC_CONTEXT_OFFSET_F18 PPC_CONTEXT_OFFSET_F( 18 )
#define PPC_CONTEXT_OFFSET_F19 PPC_CONTEXT_OFFSET_F( 19 )
#define PPC_CONTEXT_OFFSET_F20 PPC_CONTEXT_OFFSET_F( 20 )
#define PPC_CONTEXT_OFFSET_F21 PPC_CONTEXT_OFFSET_F( 21 )
#define PPC_CONTEXT_OFFSET_F22 PPC_CONTEXT_OFFSET_F( 22 )
#define PPC_CONTEXT_OFFSET_F23 PPC_CONTEXT_OFFSET_F( 23 )
#define PPC_CONTEXT_OFFSET_F24 PPC_CONTEXT_OFFSET_F( 24 )
#define PPC_CONTEXT_OFFSET_F25 PPC_CONTEXT_OFFSET_F( 25 )
#define PPC_CONTEXT_OFFSET_F26 PPC_CONTEXT_OFFSET_F( 26 )
#define PPC_CONTEXT_OFFSET_F27 PPC_CONTEXT_OFFSET_F( 27 )
#define PPC_CONTEXT_OFFSET_F28 PPC_CONTEXT_OFFSET_F( 28 )
#define PPC_CONTEXT_OFFSET_F29 PPC_CONTEXT_OFFSET_F( 29 )
#define PPC_CONTEXT_OFFSET_F30 PPC_CONTEXT_OFFSET_F( 30 )
#define PPC_CONTEXT_OFFSET_F31 PPC_CONTEXT_OFFSET_F( 31 )
#endif
#if defined(PPC_MULTILIB_FPU)
#define PPC_CONTEXT_VOLATILE_SIZE PPC_CONTEXT_OFFSET_F( 32 )
#elif defined(PPC_MULTILIB_ALTIVEC)
#define PPC_CONTEXT_VOLATILE_SIZE (PPC_CONTEXT_OFFSET_VRSAVE + 4)
#else
#define PPC_CONTEXT_VOLATILE_SIZE (PPC_CONTEXT_GPR_OFFSET( 32 ) + 4)
#endif
#ifdef RTEMS_SMP #ifdef RTEMS_SMP
#define PPC_CONTEXT_OFFSET_IS_EXECUTING (PPC_CONTEXT_GPR_OFFSET( 32 ) + 4) #define PPC_CONTEXT_OFFSET_IS_EXECUTING PPC_CONTEXT_VOLATILE_SIZE
#endif #endif
#ifndef ASM #ifndef ASM
@@ -1101,6 +1193,80 @@ typedef struct {
PPC_GPR_TYPE GPR29; PPC_GPR_TYPE GPR29;
PPC_GPR_TYPE GPR30; PPC_GPR_TYPE GPR30;
PPC_GPR_TYPE GPR31; PPC_GPR_TYPE GPR31;
#if defined(PPC_MULTILIB_ALTIVEC) || defined(PPC_MULTILIB_FPU)
uint32_t reserved_for_alignment;
#endif
#ifdef PPC_MULTILIB_ALTIVEC
uint32_t VSCR;
uint32_t VRSAVE;
uint8_t V0[16];
uint8_t V1[16];
uint8_t V2[16];
uint8_t V3[16];
uint8_t V4[16];
uint8_t V5[16];
uint8_t V6[16];
uint8_t V7[16];
uint8_t V8[16];
uint8_t V9[16];
uint8_t V10[16];
uint8_t V11[16];
uint8_t V12[16];
uint8_t V13[16];
uint8_t V14[16];
uint8_t V15[16];
uint8_t V16[16];
uint8_t V17[16];
uint8_t V18[16];
uint8_t V19[16];
uint8_t V20[16];
uint8_t V21[16];
uint8_t V22[16];
uint8_t V23[16];
uint8_t V24[16];
uint8_t V25[16];
uint8_t V26[16];
uint8_t V27[16];
uint8_t V28[16];
uint8_t V29[16];
uint8_t V30[16];
uint8_t V31[16];
#endif
#ifdef PPC_MULTILIB_FPU
double F0;
double F1;
double F2;
double F3;
double F4;
double F5;
double F6;
double F7;
double F8;
double F9;
double F10;
double F11;
double F12;
double F13;
double F14;
double F15;
double F16;
double F17;
double F18;
double F19;
double F20;
double F21;
double F22;
double F23;
double F24;
double F25;
double F26;
double F27;
double F28;
double F29;
double F30;
double F31;
uint64_t FPSCR;
#endif
} CPU_Exception_frame; } CPU_Exception_frame;
void _CPU_Exception_frame_print( const CPU_Exception_frame *frame ); void _CPU_Exception_frame_print( const CPU_Exception_frame *frame );

View File

@@ -120,12 +120,22 @@ extern "C" {
* Assume PPC_HAS_FPU to be a synonym for _SOFT_FLOAT. * Assume PPC_HAS_FPU to be a synonym for _SOFT_FLOAT.
*/ */
#if defined(_SOFT_FLOAT) || defined(__NO_FPRS__) /* e500 has unified integer/FP registers */ #if defined(_SOFT_FLOAT) \
|| defined(__NO_FPRS__) /* e500 has unified integer/FP registers */ \
|| defined(__PPC_CPU_E6500__)
#define PPC_HAS_FPU 0 #define PPC_HAS_FPU 0
#else #else
#define PPC_HAS_FPU 1 #define PPC_HAS_FPU 1
#endif #endif
#if defined(__PPC_CPU_E6500__) && defined(__ALTIVEC__)
#define PPC_MULTILIB_ALTIVEC
#endif
#if defined(__PPC_CPU_E6500__) && !defined(_SOFT_FLOAT)
#define PPC_MULTILIB_FPU
#endif
/* /*
* Unless specified above, If the model has FP support, it is assumed to * Unless specified above, If the model has FP support, it is assumed to
* support doubles (8-byte floating point numbers). * support doubles (8-byte floating point numbers).