Make the parameters to vPortDefineHeapRegions() const.

Add additional asserts to the Keil CM3 and CM4F ports (other CM3/4 ports already updated).
Add the additional yield necessitated by the mutex held count to the case when configUSE_QUEUE_SETS is 0.
pull/1/head
Richard Barry 11 years ago
parent 4fe2abc792
commit 8aa5fa3459

@ -381,7 +381,7 @@ typedef struct HeapRegion
* terminated by a HeapRegions_t structure that has a size of 0. The region
* with the lowest start address must appear first in the array.
*/
void vPortDefineHeapRegions( HeapRegion_t *pxHeapRegions );
void vPortDefineHeapRegions( const HeapRegion_t * const pxHeapRegions );
/*

@ -424,14 +424,14 @@ uint8_t *puc;
}
/*-----------------------------------------------------------*/
void vPortDefineHeapRegions( HeapRegion_t *pxHeapRegions )
void vPortDefineHeapRegions( const HeapRegion_t * const pxHeapRegions )
{
BlockLink_t *pxFirstFreeBlockInRegion = NULL, *pxPreviousFreeBlock;
uint8_t *pucAlignedHeap;
size_t xTotalRegionSize, xTotalHeapSize = 0;
BaseType_t xDefinedRegions = 0;
uint32_t ulAddress;
HeapRegion_t *pxHeapRegion;
const HeapRegion_t *pxHeapRegion;
/* Can only call once! */
configASSERT( pxEnd == NULL );

@ -110,6 +110,9 @@ is defined. */
#define portNVIC_PENDSVCLEAR_BIT ( 1UL << 27UL )
#define portNVIC_PEND_SYSTICK_CLEAR_BIT ( 1UL << 25UL )
/* Masks off all bits but the VECTACTIVE bits in the ICSR register. */
#define portVECTACTIVE_MASK ( 0x1FUL )
#define portNVIC_PENDSV_PRI ( ( ( uint32_t ) configKERNEL_INTERRUPT_PRIORITY ) << 16UL )
#define portNVIC_SYSTICK_PRI ( ( ( uint32_t ) configKERNEL_INTERRUPT_PRIORITY ) << 24UL )
@ -268,11 +271,13 @@ __asm void prvStartFirstTask( void )
msr msp, r0
/* Globally enable interrupts. */
cpsie i
cpsie f
dsb
isb
/* Call SVC to start the first task. */
svc 0
nop
nop
}
/*-----------------------------------------------------------*/
@ -370,6 +375,16 @@ void vPortEnterCritical( void )
uxCriticalNesting++;
__dsb( portSY_FULL_READ_WRITE );
__isb( portSY_FULL_READ_WRITE );
/* This is not the interrupt safe version of the enter critical function so
assert() if it is being called from an interrupt context. Only API
functions that end in "FromISR" can be used in an interrupt. Only assert if
the critical nesting count is 1 to protect against recursive calls if the
assert function also uses a critical section. */
if( uxCriticalNesting == 1 )
{
configASSERT( ( portNVIC_INT_CTRL_REG & portVECTACTIVE_MASK ) == 0 );
}
}
/*-----------------------------------------------------------*/

@ -123,6 +123,9 @@ is defined. */
#define portPRIORITY_GROUP_MASK ( 0x07UL << 8UL )
#define portPRIGROUP_SHIFT ( 8UL )
/* Masks off all bits but the VECTACTIVE bits in the ICSR register. */
#define portVECTACTIVE_MASK ( 0x1FUL )
/* Constants required to manipulate the VFP. */
#define portFPCCR ( ( volatile uint32_t * ) 0xe000ef34 ) /* Floating point context control register. */
#define portASPEN_AND_LSPEN_BITS ( 0x3UL << 30UL )
@ -290,11 +293,13 @@ __asm void prvStartFirstTask( void )
msr msp, r0
/* Globally enable interrupts. */
cpsie i
cpsie f
dsb
isb
/* Call SVC to start the first task. */
svc 0
nop
nop
}
/*-----------------------------------------------------------*/
@ -414,6 +419,16 @@ void vPortEnterCritical( void )
uxCriticalNesting++;
__dsb( portSY_FULL_READ_WRITE );
__isb( portSY_FULL_READ_WRITE );
/* This is not the interrupt safe version of the enter critical function so
assert() if it is being called from an interrupt context. Only API
functions that end in "FromISR" can be used in an interrupt. Only assert if
the critical nesting count is 1 to protect against recursive calls if the
assert function also uses a critical section. */
if( uxCriticalNesting == 1 )
{
configASSERT( ( portNVIC_INT_CTRL_REG & portVECTACTIVE_MASK ) == 0 );
}
}
/*-----------------------------------------------------------*/

@ -695,6 +695,14 @@ Queue_t * const pxQueue = ( Queue_t * ) xQueue;
mtCOVERAGE_TEST_MARKER();
}
}
else if( xYieldRequired != pdFALSE )
{
/* This path is a special case that will only get
executed if the task was holding multiple mutexes and
the mutexes were given back in an order that is
different to that in which they were taken. */
queueYIELD_IF_USING_PREEMPTION();
}
else
{
mtCOVERAGE_TEST_MARKER();

Loading…
Cancel
Save