score: Fix scheduler helping implementation

Do not extract the idle threads from the ready set so that there is
always a thread available for comparison.
This commit is contained in:
Sebastian Huber
2014-07-10 14:27:42 +02:00
parent 333f9426f6
commit 27783f6ca8
8 changed files with 294 additions and 360 deletions

View File

@@ -148,28 +148,6 @@ static inline void _Scheduler_priority_SMP_Extract_from_ready(
); );
} }
static inline Thread_Control *_Scheduler_priority_SMP_Get_idle_thread(
Scheduler_Context *context
)
{
return _Scheduler_SMP_Get_idle_thread(
context,
_Scheduler_priority_SMP_Extract_from_ready
);
}
static void _Scheduler_priority_SMP_Release_idle_thread(
Scheduler_Context *context,
Thread_Control *idle
)
{
_Scheduler_SMP_Release_idle_thread(
context,
idle,
_Scheduler_priority_SMP_Insert_ready_fifo
);
}
static inline void _Scheduler_priority_SMP_Do_update( static inline void _Scheduler_priority_SMP_Do_update(
Scheduler_Context *context, Scheduler_Context *context,
Scheduler_Node *node_to_update, Scheduler_Node *node_to_update,

View File

@@ -54,6 +54,10 @@ typedef struct {
/** /**
* @brief Chain of the available idle threads. * @brief Chain of the available idle threads.
*
* Idle threads are used for the scheduler helping protocol. It is crucial
* that the idle threads preserve their relative order. This is the case for
* this priority based scheduler.
*/ */
Chain_Control Idle_threads; Chain_Control Idle_threads;
} Scheduler_SMP_Context; } Scheduler_SMP_Context;

View File

@@ -425,16 +425,12 @@ static inline bool _Scheduler_SMP_Is_processor_owned_by_us(
} }
static inline Thread_Control *_Scheduler_SMP_Get_idle_thread( static inline Thread_Control *_Scheduler_SMP_Get_idle_thread(
Scheduler_Context *context, Scheduler_Context *context
Scheduler_SMP_Extract extract_from_ready
) )
{ {
Scheduler_SMP_Context *self = _Scheduler_SMP_Get_self( context ); Scheduler_SMP_Context *self = _Scheduler_SMP_Get_self( context );
Thread_Control *idle = (Thread_Control *) Thread_Control *idle = (Thread_Control *)
_Chain_Get_first_unprotected( &self->Idle_threads ); _Chain_Get_first_unprotected( &self->Idle_threads );
Scheduler_Node *own_node = _Scheduler_Thread_get_own_node( idle );
( *extract_from_ready )( &self->Base, own_node );
_Assert( &idle->Object.Node != _Chain_Tail( &self->Idle_threads ) ); _Assert( &idle->Object.Node != _Chain_Tail( &self->Idle_threads ) );
@@ -443,15 +439,12 @@ static inline Thread_Control *_Scheduler_SMP_Get_idle_thread(
static inline void _Scheduler_SMP_Release_idle_thread( static inline void _Scheduler_SMP_Release_idle_thread(
Scheduler_Context *context, Scheduler_Context *context,
Thread_Control *idle, Thread_Control *idle
Scheduler_SMP_Insert insert_ready
) )
{ {
Scheduler_SMP_Context *self = _Scheduler_SMP_Get_self( context ); Scheduler_SMP_Context *self = _Scheduler_SMP_Get_self( context );
Scheduler_Node *own_node = _Scheduler_Thread_get_own_node( idle );
_Chain_Prepend_unprotected( &self->Idle_threads, &idle->Object.Node ); _Chain_Prepend_unprotected( &self->Idle_threads, &idle->Object.Node );
( *insert_ready )( context, own_node );
} }
static inline void _Scheduler_SMP_Allocate_processor_lazy( static inline void _Scheduler_SMP_Allocate_processor_lazy(
@@ -536,8 +529,7 @@ static inline Thread_Control *_Scheduler_SMP_Enqueue_to_scheduled(
Scheduler_Node *lowest_scheduled, Scheduler_Node *lowest_scheduled,
Scheduler_SMP_Insert insert_scheduled, Scheduler_SMP_Insert insert_scheduled,
Scheduler_SMP_Move move_from_scheduled_to_ready, Scheduler_SMP_Move move_from_scheduled_to_ready,
Scheduler_SMP_Allocate_processor allocate_processor, Scheduler_SMP_Allocate_processor allocate_processor
Scheduler_Release_idle_thread release_idle_thread
) )
{ {
Thread_Control *user = _Scheduler_Node_get_user( node ); Thread_Control *user = _Scheduler_Node_get_user( node );
@@ -570,7 +562,7 @@ static inline Thread_Control *_Scheduler_SMP_Enqueue_to_scheduled(
idle = _Scheduler_Release_idle_thread( idle = _Scheduler_Release_idle_thread(
context, context,
lowest_scheduled, lowest_scheduled,
release_idle_thread _Scheduler_SMP_Release_idle_thread
); );
if ( idle == NULL ) { if ( idle == NULL ) {
needs_help = lowest_scheduled_user; needs_help = lowest_scheduled_user;
@@ -603,7 +595,6 @@ static inline Thread_Control *_Scheduler_SMP_Enqueue_to_scheduled(
* if this pointer is passed as the second argument to the order function. * if this pointer is passed as the second argument to the order function.
* @param[in] allocate_processor Function to allocate a processor to a node * @param[in] allocate_processor Function to allocate a processor to a node
* based on the rules of the scheduler. * based on the rules of the scheduler.
* @param[in] release_idle_thread Function to release an idle thread.
*/ */
static inline Thread_Control *_Scheduler_SMP_Enqueue_ordered( static inline Thread_Control *_Scheduler_SMP_Enqueue_ordered(
Scheduler_Context *context, Scheduler_Context *context,
@@ -614,8 +605,7 @@ static inline Thread_Control *_Scheduler_SMP_Enqueue_ordered(
Scheduler_SMP_Insert insert_scheduled, Scheduler_SMP_Insert insert_scheduled,
Scheduler_SMP_Move move_from_scheduled_to_ready, Scheduler_SMP_Move move_from_scheduled_to_ready,
Scheduler_SMP_Get_lowest_scheduled get_lowest_scheduled, Scheduler_SMP_Get_lowest_scheduled get_lowest_scheduled,
Scheduler_SMP_Allocate_processor allocate_processor, Scheduler_SMP_Allocate_processor allocate_processor
Scheduler_Release_idle_thread release_idle_thread
) )
{ {
Scheduler_Node *lowest_scheduled = Scheduler_Node *lowest_scheduled =
@@ -628,8 +618,7 @@ static inline Thread_Control *_Scheduler_SMP_Enqueue_ordered(
lowest_scheduled, lowest_scheduled,
insert_scheduled, insert_scheduled,
move_from_scheduled_to_ready, move_from_scheduled_to_ready,
allocate_processor, allocate_processor
release_idle_thread
); );
} else { } else {
( *insert_ready )( context, node ); ( *insert_ready )( context, node );
@@ -656,8 +645,6 @@ static inline Thread_Control *_Scheduler_SMP_Enqueue_ordered(
* of ready nodes to the set of scheduled nodes. * of ready nodes to the set of scheduled nodes.
* @param[in] allocate_processor Function to allocate a processor to a node * @param[in] allocate_processor Function to allocate a processor to a node
* based on the rules of the scheduler. * based on the rules of the scheduler.
* @param[in] get_idle_thread Function to get an idle thread.
* @param[in] release_idle_thread Function to release an idle thread.
*/ */
static inline Thread_Control *_Scheduler_SMP_Enqueue_scheduled_ordered( static inline Thread_Control *_Scheduler_SMP_Enqueue_scheduled_ordered(
Scheduler_Context *context, Scheduler_Context *context,
@@ -668,9 +655,7 @@ static inline Thread_Control *_Scheduler_SMP_Enqueue_scheduled_ordered(
Scheduler_SMP_Insert insert_ready, Scheduler_SMP_Insert insert_ready,
Scheduler_SMP_Insert insert_scheduled, Scheduler_SMP_Insert insert_scheduled,
Scheduler_SMP_Move move_from_ready_to_scheduled, Scheduler_SMP_Move move_from_ready_to_scheduled,
Scheduler_SMP_Allocate_processor allocate_processor, Scheduler_SMP_Allocate_processor allocate_processor
Scheduler_Get_idle_thread get_idle_thread,
Scheduler_Release_idle_thread release_idle_thread
) )
{ {
Thread_Control *needs_help; Thread_Control *needs_help;
@@ -692,7 +677,7 @@ static inline Thread_Control *_Scheduler_SMP_Enqueue_scheduled_ordered(
_Scheduler_Try_to_schedule_node( _Scheduler_Try_to_schedule_node(
context, context,
highest_ready, highest_ready,
get_idle_thread _Scheduler_SMP_Get_idle_thread
) )
) { ) {
Thread_Control *user = _Scheduler_Node_get_user( node ); Thread_Control *user = _Scheduler_Node_get_user( node );
@@ -717,7 +702,7 @@ static inline Thread_Control *_Scheduler_SMP_Enqueue_scheduled_ordered(
idle = _Scheduler_Release_idle_thread( idle = _Scheduler_Release_idle_thread(
context, context,
node, node,
release_idle_thread _Scheduler_SMP_Release_idle_thread
); );
if ( idle == NULL ) { if ( idle == NULL ) {
needs_help = user; needs_help = user;
@@ -752,8 +737,7 @@ static inline void _Scheduler_SMP_Schedule_highest_ready(
Scheduler_SMP_Extract extract_from_ready, Scheduler_SMP_Extract extract_from_ready,
Scheduler_SMP_Get_highest_ready get_highest_ready, Scheduler_SMP_Get_highest_ready get_highest_ready,
Scheduler_SMP_Move move_from_ready_to_scheduled, Scheduler_SMP_Move move_from_ready_to_scheduled,
Scheduler_SMP_Allocate_processor allocate_processor, Scheduler_SMP_Allocate_processor allocate_processor
Scheduler_Get_idle_thread get_idle_thread
) )
{ {
while ( true ) { while ( true ) {
@@ -763,7 +747,7 @@ static inline void _Scheduler_SMP_Schedule_highest_ready(
_Scheduler_Try_to_schedule_node( _Scheduler_Try_to_schedule_node(
context, context,
highest_ready, highest_ready,
get_idle_thread _Scheduler_SMP_Get_idle_thread
) )
) { ) {
_Scheduler_SMP_Allocate_processor( _Scheduler_SMP_Allocate_processor(
@@ -797,7 +781,6 @@ static inline void _Scheduler_SMP_Schedule_highest_ready(
* @param[in] get_highest_ready Function to get the highest ready node. * @param[in] get_highest_ready Function to get the highest ready node.
* @param[in] move_from_ready_to_scheduled Function to move a node from the set * @param[in] move_from_ready_to_scheduled Function to move a node from the set
* of ready nodes to the set of scheduled nodes. * of ready nodes to the set of scheduled nodes.
* @param[in] get_idle_thread Function to get an idle thread.
*/ */
static inline void _Scheduler_SMP_Block( static inline void _Scheduler_SMP_Block(
Scheduler_Context *context, Scheduler_Context *context,
@@ -805,8 +788,7 @@ static inline void _Scheduler_SMP_Block(
Scheduler_SMP_Extract extract_from_ready, Scheduler_SMP_Extract extract_from_ready,
Scheduler_SMP_Get_highest_ready get_highest_ready, Scheduler_SMP_Get_highest_ready get_highest_ready,
Scheduler_SMP_Move move_from_ready_to_scheduled, Scheduler_SMP_Move move_from_ready_to_scheduled,
Scheduler_SMP_Allocate_processor allocate_processor, Scheduler_SMP_Allocate_processor allocate_processor
Scheduler_Get_idle_thread get_idle_thread
) )
{ {
Scheduler_SMP_Node *node = _Scheduler_SMP_Thread_get_node( thread ); Scheduler_SMP_Node *node = _Scheduler_SMP_Thread_get_node( thread );
@@ -815,7 +797,7 @@ static inline void _Scheduler_SMP_Block(
context, context,
&node->Base, &node->Base,
is_scheduled, is_scheduled,
get_idle_thread _Scheduler_SMP_Get_idle_thread
); );
if ( block ) { if ( block ) {
@@ -830,8 +812,7 @@ static inline void _Scheduler_SMP_Block(
extract_from_ready, extract_from_ready,
get_highest_ready, get_highest_ready,
move_from_ready_to_scheduled, move_from_ready_to_scheduled,
allocate_processor, allocate_processor
get_idle_thread
); );
} else { } else {
( *extract_from_ready )( context, &node->Base ); ( *extract_from_ready )( context, &node->Base );
@@ -842,8 +823,7 @@ static inline void _Scheduler_SMP_Block(
static inline Thread_Control *_Scheduler_SMP_Unblock( static inline Thread_Control *_Scheduler_SMP_Unblock(
Scheduler_Context *context, Scheduler_Context *context,
Thread_Control *thread, Thread_Control *thread,
Scheduler_SMP_Enqueue enqueue_fifo, Scheduler_SMP_Enqueue enqueue_fifo
Scheduler_Release_idle_thread release_idle_thread
) )
{ {
Scheduler_SMP_Node *node = _Scheduler_SMP_Thread_get_node( thread ); Scheduler_SMP_Node *node = _Scheduler_SMP_Thread_get_node( thread );
@@ -853,7 +833,7 @@ static inline Thread_Control *_Scheduler_SMP_Unblock(
thread, thread,
&node->Base, &node->Base,
is_scheduled, is_scheduled,
release_idle_thread _Scheduler_SMP_Release_idle_thread
); );
Thread_Control *needs_help; Thread_Control *needs_help;
@@ -917,8 +897,7 @@ static inline Thread_Control *_Scheduler_SMP_Ask_for_help(
Scheduler_Context *context, Scheduler_Context *context,
Thread_Control *offers_help, Thread_Control *offers_help,
Thread_Control *needs_help, Thread_Control *needs_help,
Scheduler_SMP_Enqueue enqueue_fifo, Scheduler_SMP_Enqueue enqueue_fifo
Scheduler_Release_idle_thread release_idle_thread
) )
{ {
Scheduler_SMP_Node *node = _Scheduler_SMP_Thread_get_own_node( offers_help ); Scheduler_SMP_Node *node = _Scheduler_SMP_Thread_get_own_node( offers_help );
@@ -940,7 +919,7 @@ static inline Thread_Control *_Scheduler_SMP_Ask_for_help(
offers_help, offers_help,
needs_help, needs_help,
previous_accepts_help, previous_accepts_help,
release_idle_thread _Scheduler_SMP_Release_idle_thread
); );
break; break;
case SCHEDULER_SMP_NODE_BLOCKED: case SCHEDULER_SMP_NODE_BLOCKED:

View File

@@ -231,8 +231,7 @@ void _Scheduler_priority_affinity_SMP_Block(
_Scheduler_priority_SMP_Extract_from_ready, _Scheduler_priority_SMP_Extract_from_ready,
_Scheduler_priority_affinity_SMP_Get_highest_ready, _Scheduler_priority_affinity_SMP_Get_highest_ready,
_Scheduler_priority_SMP_Move_from_ready_to_scheduled, _Scheduler_priority_SMP_Move_from_ready_to_scheduled,
_Scheduler_SMP_Allocate_processor_exact, _Scheduler_SMP_Allocate_processor_exact
_Scheduler_priority_SMP_Get_idle_thread
); );
/* /*
@@ -314,8 +313,7 @@ static Thread_Control *_Scheduler_priority_affinity_SMP_Enqueue_fifo(
_Scheduler_SMP_Insert_scheduled_fifo, _Scheduler_SMP_Insert_scheduled_fifo,
_Scheduler_priority_SMP_Move_from_scheduled_to_ready, _Scheduler_priority_SMP_Move_from_scheduled_to_ready,
_Scheduler_priority_affinity_SMP_Get_lowest_scheduled, _Scheduler_priority_affinity_SMP_Get_lowest_scheduled,
_Scheduler_SMP_Allocate_processor_exact, _Scheduler_SMP_Allocate_processor_exact
_Scheduler_priority_SMP_Release_idle_thread
); );
} }
@@ -399,8 +397,7 @@ Thread_Control *_Scheduler_priority_affinity_SMP_Unblock(
needs_help = _Scheduler_SMP_Unblock( needs_help = _Scheduler_SMP_Unblock(
context, context,
thread, thread,
_Scheduler_priority_affinity_SMP_Enqueue_fifo, _Scheduler_priority_affinity_SMP_Enqueue_fifo
_Scheduler_priority_SMP_Release_idle_thread
); );
/* /*
@@ -433,8 +430,7 @@ static Thread_Control *_Scheduler_priority_affinity_SMP_Enqueue_ordered(
insert_scheduled, insert_scheduled,
_Scheduler_priority_SMP_Move_from_scheduled_to_ready, _Scheduler_priority_SMP_Move_from_scheduled_to_ready,
_Scheduler_priority_affinity_SMP_Get_lowest_scheduled, _Scheduler_priority_affinity_SMP_Get_lowest_scheduled,
_Scheduler_SMP_Allocate_processor_exact, _Scheduler_SMP_Allocate_processor_exact
_Scheduler_priority_SMP_Release_idle_thread
); );
} }
@@ -482,9 +478,7 @@ _Scheduler_priority_affinity_SMP_Enqueue_scheduled_ordered(
insert_ready, insert_ready,
insert_scheduled, insert_scheduled,
_Scheduler_priority_SMP_Move_from_ready_to_scheduled, _Scheduler_priority_SMP_Move_from_ready_to_scheduled,
_Scheduler_SMP_Allocate_processor_exact, _Scheduler_SMP_Allocate_processor_exact
_Scheduler_priority_SMP_Get_idle_thread,
_Scheduler_priority_SMP_Release_idle_thread
); );
} }
@@ -572,8 +566,7 @@ Thread_Control *_Scheduler_priority_affinity_SMP_Ask_for_help(
context, context,
offers_help, offers_help,
needs_help, needs_help,
_Scheduler_priority_affinity_SMP_Enqueue_fifo, _Scheduler_priority_affinity_SMP_Enqueue_fifo
_Scheduler_priority_SMP_Release_idle_thread
); );
_Scheduler_priority_affinity_SMP_Check_for_migrations( context ); _Scheduler_priority_affinity_SMP_Check_for_migrations( context );

View File

@@ -93,8 +93,7 @@ void _Scheduler_priority_SMP_Block(
_Scheduler_priority_SMP_Extract_from_ready, _Scheduler_priority_SMP_Extract_from_ready,
_Scheduler_priority_SMP_Get_highest_ready, _Scheduler_priority_SMP_Get_highest_ready,
_Scheduler_priority_SMP_Move_from_ready_to_scheduled, _Scheduler_priority_SMP_Move_from_ready_to_scheduled,
_Scheduler_SMP_Allocate_processor_lazy, _Scheduler_SMP_Allocate_processor_lazy
_Scheduler_priority_SMP_Get_idle_thread
); );
} }
@@ -116,8 +115,7 @@ static Thread_Control *_Scheduler_priority_SMP_Enqueue_ordered(
insert_scheduled, insert_scheduled,
_Scheduler_priority_SMP_Move_from_scheduled_to_ready, _Scheduler_priority_SMP_Move_from_scheduled_to_ready,
_Scheduler_SMP_Get_lowest_scheduled, _Scheduler_SMP_Get_lowest_scheduled,
_Scheduler_SMP_Allocate_processor_lazy, _Scheduler_SMP_Allocate_processor_lazy
_Scheduler_priority_SMP_Release_idle_thread
); );
} }
@@ -170,9 +168,7 @@ static Thread_Control *_Scheduler_priority_SMP_Enqueue_scheduled_ordered(
insert_ready, insert_ready,
insert_scheduled, insert_scheduled,
_Scheduler_priority_SMP_Move_from_ready_to_scheduled, _Scheduler_priority_SMP_Move_from_ready_to_scheduled,
_Scheduler_SMP_Allocate_processor_lazy, _Scheduler_SMP_Allocate_processor_lazy
_Scheduler_priority_SMP_Get_idle_thread,
_Scheduler_priority_SMP_Release_idle_thread
); );
} }
@@ -214,8 +210,7 @@ Thread_Control *_Scheduler_priority_SMP_Unblock(
return _Scheduler_SMP_Unblock( return _Scheduler_SMP_Unblock(
context, context,
thread, thread,
_Scheduler_priority_SMP_Enqueue_fifo, _Scheduler_priority_SMP_Enqueue_fifo
_Scheduler_priority_SMP_Release_idle_thread
); );
} }
@@ -254,8 +249,7 @@ Thread_Control *_Scheduler_priority_SMP_Ask_for_help(
context, context,
offers_help, offers_help,
needs_help, needs_help,
_Scheduler_priority_SMP_Enqueue_fifo, _Scheduler_priority_SMP_Enqueue_fifo
_Scheduler_priority_SMP_Release_idle_thread
); );
} }

View File

@@ -165,28 +165,6 @@ static void _Scheduler_simple_SMP_Extract_from_ready(
_Chain_Extract_unprotected( &node_to_extract->Node ); _Chain_Extract_unprotected( &node_to_extract->Node );
} }
static Thread_Control *_Scheduler_simple_SMP_Get_idle_thread(
Scheduler_Context *context
)
{
return _Scheduler_SMP_Get_idle_thread(
context,
_Scheduler_simple_SMP_Extract_from_ready
);
}
static void _Scheduler_simple_SMP_Release_idle_thread(
Scheduler_Context *context,
Thread_Control *idle
)
{
_Scheduler_SMP_Release_idle_thread(
context,
idle,
_Scheduler_simple_SMP_Insert_ready_fifo
);
}
void _Scheduler_simple_SMP_Block( void _Scheduler_simple_SMP_Block(
const Scheduler_Control *scheduler, const Scheduler_Control *scheduler,
Thread_Control *thread Thread_Control *thread
@@ -200,8 +178,7 @@ void _Scheduler_simple_SMP_Block(
_Scheduler_simple_SMP_Extract_from_ready, _Scheduler_simple_SMP_Extract_from_ready,
_Scheduler_simple_SMP_Get_highest_ready, _Scheduler_simple_SMP_Get_highest_ready,
_Scheduler_simple_SMP_Move_from_ready_to_scheduled, _Scheduler_simple_SMP_Move_from_ready_to_scheduled,
_Scheduler_SMP_Allocate_processor_lazy, _Scheduler_SMP_Allocate_processor_lazy
_Scheduler_simple_SMP_Get_idle_thread
); );
} }
@@ -223,8 +200,7 @@ static Thread_Control *_Scheduler_simple_SMP_Enqueue_ordered(
insert_scheduled, insert_scheduled,
_Scheduler_simple_SMP_Move_from_scheduled_to_ready, _Scheduler_simple_SMP_Move_from_scheduled_to_ready,
_Scheduler_SMP_Get_lowest_scheduled, _Scheduler_SMP_Get_lowest_scheduled,
_Scheduler_SMP_Allocate_processor_lazy, _Scheduler_SMP_Allocate_processor_lazy
_Scheduler_simple_SMP_Release_idle_thread
); );
} }
@@ -277,9 +253,7 @@ static Thread_Control *_Scheduler_simple_SMP_Enqueue_scheduled_ordered(
insert_ready, insert_ready,
insert_scheduled, insert_scheduled,
_Scheduler_simple_SMP_Move_from_ready_to_scheduled, _Scheduler_simple_SMP_Move_from_ready_to_scheduled,
_Scheduler_SMP_Allocate_processor_lazy, _Scheduler_SMP_Allocate_processor_lazy
_Scheduler_simple_SMP_Get_idle_thread,
_Scheduler_simple_SMP_Release_idle_thread
); );
} }
@@ -321,8 +295,7 @@ Thread_Control *_Scheduler_simple_SMP_Unblock(
return _Scheduler_SMP_Unblock( return _Scheduler_SMP_Unblock(
context, context,
thread, thread,
_Scheduler_simple_SMP_Enqueue_fifo, _Scheduler_simple_SMP_Enqueue_fifo
_Scheduler_simple_SMP_Release_idle_thread
); );
} }
@@ -361,8 +334,7 @@ Thread_Control *_Scheduler_simple_SMP_Ask_for_help(
context, context,
offers_help, offers_help,
needs_help, needs_help,
_Scheduler_simple_SMP_Enqueue_fifo, _Scheduler_simple_SMP_Enqueue_fifo
_Scheduler_simple_SMP_Release_idle_thread
); );
} }

View File

@@ -900,9 +900,23 @@ static void test_mrsp_obtain_and_release_with_help(test_context *ctx)
rtems_test_assert(rtems_get_current_processor() == 1); rtems_test_assert(rtems_get_current_processor() == 1);
/*
* With this operation the scheduler instance 0 has now only the main and the
* idle threads in the ready set.
*/
sc = rtems_task_suspend(run_task_id);
rtems_test_assert(sc == RTEMS_SUCCESSFUL);
rtems_test_assert(rtems_get_current_processor() == 1);
change_prio(RTEMS_SELF, 1);
change_prio(RTEMS_SELF, 3);
sc = rtems_semaphore_release(ctx->mrsp_ids[0]); sc = rtems_semaphore_release(ctx->mrsp_ids[0]);
rtems_test_assert(sc == RTEMS_SUCCESSFUL); rtems_test_assert(sc == RTEMS_SUCCESSFUL);
rtems_test_assert(rtems_get_current_processor() == 0);
assert_prio(RTEMS_SELF, 3); assert_prio(RTEMS_SELF, 3);
wait_for_prio(help_task_id, 3); wait_for_prio(help_task_id, 3);

View File

@@ -21,283 +21,283 @@ test MrsP obtain and release with help
[0] RUN -> IDLE (prio 2, node MAIN) [0] RUN -> IDLE (prio 2, node MAIN)
[1] MAIN -> HELP (prio 2, node HELP) [1] MAIN -> HELP (prio 2, node HELP)
[1] HELP -> MAIN (prio 2, node HELP) [1] HELP -> MAIN (prio 2, node HELP)
[1] MAIN -> HELP (prio 2, node HELP)
[0] IDLE -> MAIN (prio 3, node MAIN) [0] IDLE -> MAIN (prio 3, node MAIN)
[1] MAIN -> HELP (prio 2, node HELP)
test MrsP obtain and release test MrsP obtain and release
test MrsP load test MrsP load
worker[0] worker[0]
sleep = 16 sleep = 53
timeout = 3420 timeout = 3445
obtain[0] = 2503 obtain[0] = 7240
obtain[1] = 1671 obtain[1] = 5484
obtain[2] = 4446 obtain[2] = 12983
obtain[3] = 2850 obtain[3] = 9453
obtain[4] = 5003 obtain[4] = 16142
obtain[5] = 3793 obtain[5] = 12509
obtain[6] = 5575 obtain[6] = 16471
obtain[7] = 4468 obtain[7] = 14380
obtain[8] = 5326 obtain[8] = 16566
obtain[9] = 4645 obtain[9] = 16192
obtain[10] = 4565 obtain[10] = 14868
obtain[11] = 5082 obtain[11] = 18208
obtain[12] = 4050 obtain[12] = 12505
obtain[13] = 5203 obtain[13] = 19995
obtain[14] = 2945 obtain[14] = 11155
obtain[15] = 5704 obtain[15] = 20684
obtain[16] = 1652 obtain[16] = 7288
obtain[17] = 5870 obtain[17] = 22252
obtain[18] = 1472 obtain[18] = 6476
obtain[19] = 4933 obtain[19] = 18299
obtain[20] = 1136 obtain[20] = 5711
obtain[21] = 3463 obtain[21] = 17063
obtain[22] = 1257 obtain[22] = 4791
obtain[23] = 3230 obtain[23] = 14655
obtain[24] = 823 obtain[24] = 3452
obtain[25] = 2860 obtain[25] = 10565
obtain[26] = 736 obtain[26] = 2912
obtain[27] = 1270 obtain[27] = 8142
obtain[28] = 438 obtain[28] = 2090
obtain[29] = 1273 obtain[29] = 5086
obtain[30] = 378 obtain[30] = 1145
obtain[31] = 422 obtain[31] = 1946
cpu[0] = 116428 cpu[0] = 378475
cpu[1] = 16133 cpu[1] = 64814
cpu[2] = 32982 cpu[2] = 132133
cpu[3] = 32557 cpu[3] = 138047
worker[1] worker[1]
sleep = 1 sleep = 1
timeout = 4 timeout = 6
obtain[0] = 2 obtain[0] = 19
obtain[1] = 0 obtain[1] = 8
obtain[2] = 3 obtain[2] = 15
obtain[3] = 8 obtain[3] = 24
obtain[4] = 10 obtain[4] = 20
obtain[5] = 0 obtain[5] = 19
obtain[6] = 7 obtain[6] = 14
obtain[7] = 0 obtain[7] = 40
obtain[8] = 0 obtain[8] = 45
obtain[9] = 0 obtain[9] = 20
obtain[10] = 11 obtain[10] = 0
obtain[11] = 0 obtain[11] = 48
obtain[12] = 26 obtain[12] = 13
obtain[13] = 14 obtain[13] = 57
obtain[14] = 5 obtain[14] = 30
obtain[15] = 0 obtain[15] = 48
obtain[16] = 0 obtain[16] = 36
obtain[17] = 18 obtain[17] = 36
obtain[18] = 12 obtain[18] = 19
obtain[19] = 0 obtain[19] = 20
obtain[20] = 0 obtain[20] = 42
obtain[21] = 0 obtain[21] = 44
obtain[22] = 0 obtain[22] = 23
obtain[23] = 0 obtain[23] = 0
obtain[24] = 0 obtain[24] = 0
obtain[25] = 0 obtain[25] = 26
obtain[26] = 0 obtain[26] = 0
obtain[27] = 0 obtain[27] = 0
obtain[28] = 0 obtain[28] = 0
obtain[29] = 0 obtain[29] = 0
obtain[30] = 0 obtain[30] = 0
obtain[31] = 0 obtain[31] = 0
cpu[0] = 140 cpu[0] = 650
cpu[1] = 24 cpu[1] = 92
cpu[2] = 33 cpu[2] = 379
cpu[3] = 36 cpu[3] = 212
worker[2] worker[2]
sleep = 14 sleep = 51
timeout = 3513 timeout = 3731
obtain[0] = 2474 obtain[0] = 7182
obtain[1] = 1793 obtain[1] = 5663
obtain[2] = 4551 obtain[2] = 12945
obtain[3] = 2833 obtain[3] = 9229
obtain[4] = 5293 obtain[4] = 15592
obtain[5] = 3681 obtain[5] = 12125
obtain[6] = 5309 obtain[6] = 16767
obtain[7] = 4565 obtain[7] = 14480
obtain[8] = 5270 obtain[8] = 16620
obtain[9] = 4610 obtain[9] = 16098
obtain[10] = 4817 obtain[10] = 16409
obtain[11] = 4760 obtain[11] = 18109
obtain[12] = 3858 obtain[12] = 12995
obtain[13] = 5919 obtain[13] = 19452
obtain[14] = 3172 obtain[14] = 10719
obtain[15] = 5286 obtain[15] = 20024
obtain[16] = 1968 obtain[16] = 7769
obtain[17] = 5800 obtain[17] = 21913
obtain[18] = 1768 obtain[18] = 6636
obtain[19] = 4629 obtain[19] = 18524
obtain[20] = 1446 obtain[20] = 5952
obtain[21] = 4109 obtain[21] = 16411
obtain[22] = 1046 obtain[22] = 5228
obtain[23] = 3119 obtain[23] = 14456
obtain[24] = 1012 obtain[24] = 4292
obtain[25] = 2338 obtain[25] = 11143
obtain[26] = 790 obtain[26] = 3019
obtain[27] = 1628 obtain[27] = 8023
obtain[28] = 432 obtain[28] = 2006
obtain[29] = 1281 obtain[29] = 4664
obtain[30] = 270 obtain[30] = 1109
obtain[31] = 418 obtain[31] = 1976
cpu[0] = 15589 cpu[0] = 65356
cpu[1] = 121473 cpu[1] = 381723
cpu[2] = 31797 cpu[2] = 133444
cpu[3] = 31645 cpu[3] = 134588
worker[3] worker[3]
sleep = 1 sleep = 1
timeout = 3 timeout = 11
obtain[0] = 4 obtain[0] = 11
obtain[1] = 0 obtain[1] = 6
obtain[2] = 9 obtain[2] = 33
obtain[3] = 0 obtain[3] = 20
obtain[4] = 5 obtain[4] = 10
obtain[5] = 0 obtain[5] = 10
obtain[6] = 0 obtain[6] = 28
obtain[7] = 8 obtain[7] = 18
obtain[8] = 0 obtain[8] = 27
obtain[9] = 10 obtain[9] = 40
obtain[10] = 0 obtain[10] = 33
obtain[11] = 0 obtain[11] = 36
obtain[12] = 0 obtain[12] = 26
obtain[13] = 0 obtain[13] = 0
obtain[14] = 0 obtain[14] = 15
obtain[15] = 16 obtain[15] = 16
obtain[16] = 0 obtain[16] = 0
obtain[17] = 18 obtain[17] = 18
obtain[18] = 0 obtain[18] = 0
obtain[19] = 0 obtain[19] = 42
obtain[20] = 0 obtain[20] = 0
obtain[21] = 1 obtain[21] = 88
obtain[22] = 0 obtain[22] = 0
obtain[23] = 24 obtain[23] = 24
obtain[24] = 0 obtain[24] = 0
obtain[25] = 0 obtain[25] = 0
obtain[26] = 0 obtain[26] = 0
obtain[27] = 0 obtain[27] = 28
obtain[28] = 0 obtain[28] = 0
obtain[29] = 0 obtain[29] = 0
obtain[30] = 0 obtain[30] = 31
obtain[31] = 0 obtain[31] = 0
cpu[0] = 22 cpu[0] = 136
cpu[1] = 123 cpu[1] = 573
cpu[2] = 7 cpu[2] = 291
cpu[3] = 39 cpu[3] = 121
worker[4] worker[4]
sleep = 19 sleep = 47
timeout = 3025 timeout = 3278
obtain[0] = 2574 obtain[0] = 7397
obtain[1] = 1845 obtain[1] = 5723
obtain[2] = 4423 obtain[2] = 13399
obtain[3] = 2985 obtain[3] = 9018
obtain[4] = 5086 obtain[4] = 16575
obtain[5] = 3679 obtain[5] = 12731
obtain[6] = 5286 obtain[6] = 16571
obtain[7] = 4447 obtain[7] = 14376
obtain[8] = 5885 obtain[8] = 16786
obtain[9] = 4771 obtain[9] = 17022
obtain[10] = 4857 obtain[10] = 15889
obtain[11] = 5467 obtain[11] = 19338
obtain[12] = 4554 obtain[12] = 13240
obtain[13] = 5210 obtain[13] = 19055
obtain[14] = 3547 obtain[14] = 11533
obtain[15] = 6169 obtain[15] = 22667
obtain[16] = 2337 obtain[16] = 7521
obtain[17] = 6109 obtain[17] = 21826
obtain[18] = 1797 obtain[18] = 6320
obtain[19] = 5136 obtain[19] = 18522
obtain[20] = 1646 obtain[20] = 6874
obtain[21] = 4251 obtain[21] = 16498
obtain[22] = 1041 obtain[22] = 4983
obtain[23] = 3235 obtain[23] = 14210
obtain[24] = 975 obtain[24] = 4019
obtain[25] = 2961 obtain[25] = 11510
obtain[26] = 739 obtain[26] = 3425
obtain[27] = 1704 obtain[27] = 8809
obtain[28] = 554 obtain[28] = 2002
obtain[29] = 1344 obtain[29] = 5197
obtain[30] = 182 obtain[30] = 996
obtain[31] = 559 obtain[31] = 2276
cpu[0] = 5617 cpu[0] = 20729
cpu[1] = 5834 cpu[1] = 19760
cpu[2] = 100129 cpu[2] = 343613
cpu[3] = 99149 cpu[3] = 348561
worker[5] worker[5]
sleep = 18 sleep = 61
timeout = 3123 timeout = 3183
obtain[0] = 2439 obtain[0] = 7291
obtain[1] = 1878 obtain[1] = 5782
obtain[2] = 4576 obtain[2] = 13633
obtain[3] = 2938 obtain[3] = 9864
obtain[4] = 5088 obtain[4] = 16465
obtain[5] = 3723 obtain[5] = 12581
obtain[6] = 5611 obtain[6] = 17135
obtain[7] = 4411 obtain[7] = 14616
obtain[8] = 5522 obtain[8] = 16524
obtain[9] = 4893 obtain[9] = 16472
obtain[10] = 4877 obtain[10] = 15194
obtain[11] = 4932 obtain[11] = 18038
obtain[12] = 4263 obtain[12] = 13801
obtain[13] = 5608 obtain[13] = 19959
obtain[14] = 2791 obtain[14] = 11693
obtain[15] = 5905 obtain[15] = 20770
obtain[16] = 1739 obtain[16] = 7328
obtain[17] = 5322 obtain[17] = 23222
obtain[18] = 1892 obtain[18] = 7186
obtain[19] = 5118 obtain[19] = 19739
obtain[20] = 1360 obtain[20] = 6584
obtain[21] = 4764 obtain[21] = 17450
obtain[22] = 1099 obtain[22] = 5241
obtain[23] = 3180 obtain[23] = 14808
obtain[24] = 913 obtain[24] = 4287
obtain[25] = 2612 obtain[25] = 11387
obtain[26] = 807 obtain[26] = 3367
obtain[27] = 1588 obtain[27] = 8149
obtain[28] = 493 obtain[28] = 1887
obtain[29] = 1348 obtain[29] = 4969
obtain[30] = 389 obtain[30] = 1123
obtain[31] = 471 obtain[31] = 1695
cpu[0] = 5554 cpu[0] = 19504
cpu[1] = 6139 cpu[1] = 20069
cpu[2] = 97852 cpu[2] = 346015
cpu[3] = 95573 cpu[3] = 350953
worker[6] worker[6]
sleep = 1 sleep = 1
timeout = 11 timeout = 15
obtain[0] = 2 obtain[0] = 26
obtain[1] = 2 obtain[1] = 22
obtain[2] = 9 obtain[2] = 45
obtain[3] = 4 obtain[3] = 32
obtain[4] = 15 obtain[4] = 45
obtain[5] = 12 obtain[5] = 76
obtain[6] = 7 obtain[6] = 49
obtain[7] = 16 obtain[7] = 64
obtain[8] = 10 obtain[8] = 99
obtain[9] = 20 obtain[9] = 70
obtain[10] = 11 obtain[10] = 55
obtain[11] = 5 obtain[11] = 48
obtain[12] = 0 obtain[12] = 39
obtain[13] = 0 obtain[13] = 28
obtain[14] = 0 obtain[14] = 60
obtain[15] = 16 obtain[15] = 48
obtain[16] = 0 obtain[16] = 17
obtain[17] = 0 obtain[17] = 74
obtain[18] = 0 obtain[18] = 38
obtain[19] = 20 obtain[19] = 60
obtain[20] = 0 obtain[20] = 63
obtain[21] = 44 obtain[21] = 66
obtain[22] = 0 obtain[22] = 23
obtain[23] = 0 obtain[23] = 48
obtain[24] = 0 obtain[24] = 0
obtain[25] = 7 obtain[25] = 78
obtain[26] = 0 obtain[26] = 0
obtain[27] = 0 obtain[27] = 43
obtain[28] = 0 obtain[28] = 0
obtain[29] = 0 obtain[29] = 0
obtain[30] = 0 obtain[30] = 0
obtain[31] = 0 obtain[31] = 32
cpu[0] = 10 cpu[0] = 71
cpu[1] = 6 cpu[1] = 39
cpu[2] = 168 cpu[2] = 1333
cpu[3] = 217 cpu[3] = 1254
worker[7] worker[7]
sleep = 1 sleep = 1
timeout = 0 timeout = 0
@@ -337,8 +337,8 @@ worker[7]
cpu[1] = 0 cpu[1] = 0
cpu[2] = 1 cpu[2] = 1
cpu[3] = 0 cpu[3] = 0
migrations[0] = 110919 migrations[0] = 437361
migrations[1] = 110920 migrations[1] = 437363
migrations[2] = 109762 migrations[2] = 441234
migrations[3] = 112076 migrations[3] = 433487
*** END OF TEST SMPMRSP 1 *** *** END OF TEST SMPMRSP 1 ***