Merge pull request #7183 from theotherjimmy/refactor-resources

Tools: Extract resources object and implement incremental scan
pull/7533/head
Cruz Monrreal 2018-07-17 09:04:43 -05:00 committed by GitHub
commit c29fe896a1
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
35 changed files with 943 additions and 2061 deletions

View File

@ -1 +0,0 @@
*

View File

@ -1,993 +0,0 @@
/*
* Copyright (c) 2006-2016, ARM Limited, All Rights Reserved
* SPDX-License-Identifier: Apache-2.0
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#if !DEVICE_STORAGE
#error [NOT_SUPPORTED] Storage not supported for this target
#endif
#ifndef AVOID_GREENTEA
#include "greentea-client/test_env.h"
#endif
#include "utest/utest.h"
#include "unity/unity.h"
#include "storage_abstraction/Driver_Storage.h"
#include <string.h>
#include <inttypes.h>
using namespace utest::v1;
extern ARM_DRIVER_STORAGE ARM_Driver_Storage_MTD_K64F;
ARM_DRIVER_STORAGE *drv = &ARM_Driver_Storage_MTD_K64F;
/* temporary buffer to hold data for testing. */
static const unsigned BUFFER_SIZE = 16384;
static uint8_t buffer[BUFFER_SIZE];
/* forward declaration */
void initializationCompleteCallback(int32_t status, ARM_STORAGE_OPERATION operation);
/*
* Most tests need some basic initialization of the driver before proceeding
* with their operations.
*/
static control_t preambleForBasicInitialization(void)
{
ARM_STORAGE_CAPABILITIES capabilities = drv->GetCapabilities();
int32_t rc = drv->Initialize(initializationCompleteCallback);
TEST_ASSERT(rc >= ARM_DRIVER_OK);
if (rc == ARM_DRIVER_OK) {
TEST_ASSERT_EQUAL(1, capabilities.asynchronous_ops);
return CaseTimeout(200) + CaseRepeatAll;
} else {
TEST_ASSERT(rc == 1);
return CaseRepeatAll;
}
}
template<typename T>
static void verifyBytePattern(uint64_t addr, size_t sizeofData, T bytePattern)
{
/* we're limited by BUFFER_SIZE in how much we can verify in a single iteration;
* the variable 'amountBeingVerified' captures the size being verified in each
* iteration. */
size_t amountBeingVerified = sizeofData;
if (amountBeingVerified > BUFFER_SIZE) {
amountBeingVerified = BUFFER_SIZE;
}
TEST_ASSERT((amountBeingVerified % sizeof(T)) == 0);
while (sizeofData) {
int32_t rc = drv->ReadData(addr, buffer, amountBeingVerified);
TEST_ASSERT_EQUAL(amountBeingVerified, rc);
for (size_t index = 0; index < amountBeingVerified / sizeof(T); index++) {
// if (bytePattern != ((const T *)buffer)[index]) {
// printf("%u: expected %x, found %x\n", index, bytePattern, ((const T *)buffer)[index]);
// }
TEST_ASSERT_EQUAL(bytePattern, ((const T *)buffer)[index]);
}
sizeofData -= amountBeingVerified;
addr += amountBeingVerified;
}
}
void test_getVersion()
{
ARM_DRIVER_VERSION version = drv->GetVersion();
TEST_ASSERT_EQUAL(version.api, ARM_STORAGE_API_VERSION);
TEST_ASSERT_EQUAL(version.drv, ARM_DRIVER_VERSION_MAJOR_MINOR(1,00));
}
void test_getCapabilities()
{
TEST_ASSERT(sizeof(ARM_STORAGE_CAPABILITIES) == sizeof(uint32_t));
ARM_STORAGE_CAPABILITIES capabilities = drv->GetCapabilities();
TEST_ASSERT_EQUAL(0, capabilities.reserved);
}
void test_getInfo()
{
ARM_STORAGE_INFO info = {};
int32_t rc = drv->GetInfo(&info);
TEST_ASSERT_EQUAL(ARM_DRIVER_OK, rc);
TEST_ASSERT_EQUAL(0, info.security.reserved1);
TEST_ASSERT_EQUAL(0, info.security.reserved2);
TEST_ASSERT((info.program_cycles == ARM_STORAGE_PROGRAM_CYCLES_INFINITE) || (info.program_cycles > 0));
TEST_ASSERT(info.total_storage > 0);
}
void initializationCompleteCallback(int32_t status, ARM_STORAGE_OPERATION operation)
{
printf("init complete callback\n");
TEST_ASSERT_EQUAL(1, status);
TEST_ASSERT_EQUAL(operation, ARM_STORAGE_OPERATION_INITIALIZE);
Harness::validate_callback();
}
control_t test_initialize(const size_t call_count)
{
static const unsigned REPEAT_INSTANCES = 3;
printf("in test_initialize with call_count %u\n", call_count);
ARM_STORAGE_CAPABILITIES capabilities = drv->GetCapabilities();
int32_t rc = drv->Initialize(initializationCompleteCallback);
TEST_ASSERT(rc >= ARM_DRIVER_OK);
if (rc == ARM_DRIVER_OK) {
TEST_ASSERT_EQUAL(1, capabilities.asynchronous_ops);
return (call_count < REPEAT_INSTANCES) ? (CaseTimeout(200) + CaseRepeatAll) : (control_t) CaseNext;
}
TEST_ASSERT(rc == 1);
return (call_count < REPEAT_INSTANCES) ? CaseRepeatAll : CaseNext;
}
void uninitializationCompleteCallback(int32_t status, ARM_STORAGE_OPERATION operation)
{
printf("uninit complete callback\n");
TEST_ASSERT_EQUAL(status, ARM_DRIVER_OK);
TEST_ASSERT_EQUAL(operation, ARM_STORAGE_OPERATION_UNINITIALIZE);
Harness::validate_callback();
}
control_t test_uninitialize(const size_t call_count)
{
static const unsigned REPEAT_INSTANCES = 3;
printf("in test_uninitialize with call_count %u\n", call_count);
/* update the completion callback. */
if (call_count == 1) {
/* Achieve basic initialization for the driver before anything else. */
return preambleForBasicInitialization();
}
ARM_STORAGE_CAPABILITIES capabilities = drv->GetCapabilities();
int32_t rc = drv->Uninitialize();
if (call_count > 2) {
/* the driver should return some error for repeated un-initialization. */
TEST_ASSERT(rc < ARM_DRIVER_OK);
return (call_count < REPEAT_INSTANCES) ? CaseRepeatAll : CaseNext;
}
TEST_ASSERT(rc >= ARM_DRIVER_OK);
if (rc == ARM_DRIVER_OK) {
/* asynchronous operation */
TEST_ASSERT_EQUAL(1, capabilities.asynchronous_ops);
return CaseTimeout(200) + CaseRepeatAll;
}
/* synchronous operation */
TEST_ASSERT(rc == 1);
return (call_count < REPEAT_INSTANCES) ? CaseRepeatAll : CaseNext;
}
void powerControlCompleteCallback(int32_t status, ARM_STORAGE_OPERATION operation)
{
printf("power control complete callback\n");
TEST_ASSERT_EQUAL(status, ARM_DRIVER_OK);
TEST_ASSERT_EQUAL(operation, ARM_STORAGE_OPERATION_POWER_CONTROL);
Harness::validate_callback();
}
control_t test_powerControl(const size_t call_count)
{
static const unsigned REPEAT_INSTANCES = 2;
printf("in test_powerControl with call_count %u\n", call_count);
ARM_STORAGE_CAPABILITIES capabilities = drv->GetCapabilities();
if (call_count == 1) {
/* Achieve basic initialization for the driver before anything else. */
return preambleForBasicInitialization();
}
/* Update the completion callback to 'powerControlCompleteCallback'. */
if (call_count == 2) {
int32_t rc = drv->Initialize(powerControlCompleteCallback);
TEST_ASSERT(rc == 1); /* Expect synchronous completion of initialization; the system must have been
* initialized by the previous iteration. */
}
int32_t rc = drv->PowerControl(ARM_POWER_FULL);
if (rc == ARM_DRIVER_OK) {
TEST_ASSERT_EQUAL(1, capabilities.asynchronous_ops);
return (call_count < REPEAT_INSTANCES) ? CaseTimeout(200) + CaseRepeatAll: CaseTimeout(200);
} else {
TEST_ASSERT(rc == 1);
return (call_count < REPEAT_INSTANCES) ? CaseRepeatAll : CaseNext;
}
}
void readDataCompleteCallback(int32_t status, ARM_STORAGE_OPERATION operation)
{
printf("ReadData complete callback\n");
TEST_ASSERT_EQUAL(status, ARM_DRIVER_OK);
TEST_ASSERT_EQUAL(operation, ARM_STORAGE_OPERATION_READ_DATA);
Harness::validate_callback();
}
control_t test_readData(const size_t call_count)
{
static const unsigned REPEAT_INSTANCES = 5;
printf("in test_readData with call_count %u\n", call_count);
ARM_STORAGE_CAPABILITIES capabilities = drv->GetCapabilities();
if (call_count == 1) {
/* Achieve basic initialization for the driver before anything else. */
return preambleForBasicInitialization();
}
/* Update the completion callback to 'readDataCompleteCallback'. */
int32_t rc;
if (call_count == 2) {
rc = drv->Initialize(readDataCompleteCallback);
TEST_ASSERT(rc == 1); /* Expect synchronous completion of initialization; the system must have been
* initialized by the previous iteration. */
}
/* Get the first block. */
ARM_STORAGE_BLOCK firstBlock;
drv->GetNextBlock(NULL, &firstBlock); /* get first block */
TEST_ASSERT(ARM_STORAGE_VALID_BLOCK(&firstBlock));
TEST_ASSERT(firstBlock.size > 0);
ARM_STORAGE_INFO info;
rc = drv->GetInfo(&info);
TEST_ASSERT_EQUAL(ARM_DRIVER_OK, rc);
TEST_ASSERT(info.program_unit <= BUFFER_SIZE);
TEST_ASSERT(firstBlock.size >= (REPEAT_INSTANCES - 1) * info.program_unit);
/* choose an increasing address for each iteration. */
uint64_t addr = firstBlock.addr + (call_count - 1) * info.program_unit;
size_t sizeofData = info.program_unit;
rc = drv->ReadData(addr, buffer, sizeofData);
if (rc == ARM_DRIVER_OK) {
TEST_ASSERT_EQUAL(1, capabilities.asynchronous_ops);
return (call_count < REPEAT_INSTANCES) ? CaseTimeout(200) + CaseRepeatAll: CaseTimeout(200);
} else {
TEST_ASSERT(rc > 0);
return (call_count < REPEAT_INSTANCES) ? CaseRepeatAll : CaseNext;
}
}
void programDataCompleteCallback(int32_t status, ARM_STORAGE_OPERATION operation)
{
TEST_ASSERT(status >= 0);
static unsigned programIteration = 0;
static const uint32_t BYTE_PATTERN = 0xAA551122;
ARM_STORAGE_BLOCK firstBlock;
drv->GetNextBlock(NULL, &firstBlock); /* get first block */
TEST_ASSERT(ARM_STORAGE_VALID_BLOCK(&firstBlock));
ARM_STORAGE_INFO info;
int32_t rc = drv->GetInfo(&info);
TEST_ASSERT_EQUAL(ARM_DRIVER_OK, rc);
const uint64_t addr = firstBlock.addr + programIteration * firstBlock.attributes.erase_unit;
size_t sizeofData = info.program_unit;
ARM_STORAGE_CAPABILITIES capabilities = drv->GetCapabilities();
TEST_ASSERT((operation == ARM_STORAGE_OPERATION_ERASE) || (operation == ARM_STORAGE_OPERATION_PROGRAM_DATA));
if (operation == ARM_STORAGE_OPERATION_ERASE) {
// printf("programming %u bytes at address %lu with pattern 0x%" PRIx32 "\n", sizeofData, (uint32_t)addr, BYTE_PATTERN);
size_t sizeofData = info.program_unit;
TEST_ASSERT(BUFFER_SIZE >= sizeofData);
TEST_ASSERT((sizeofData % sizeof(uint32_t)) == 0);
for (size_t index = 0; index < sizeofData / sizeof(uint32_t); index++) {
((uint32_t *)buffer)[index] = BYTE_PATTERN;
}
status = drv->ProgramData(addr, buffer, sizeofData);
if (status < ARM_DRIVER_OK) {
return; /* failure. this will trigger a timeout and cause test failure. */
}
if (status == ARM_DRIVER_OK) {
TEST_ASSERT_EQUAL(1, capabilities.asynchronous_ops);
return; /* We've successfully pended a programData operation; we'll have another
* invocation of this callback when programming completes. */
}
}
/* We come here either because of completion for program-data or as a very
* unlikely fall through from synchronous completion of program-data (above). */
#ifndef __CC_ARM
printf("verifying programmed sector at addr %lu\n", (uint32_t)addr);
#endif
verifyBytePattern(addr, sizeofData, BYTE_PATTERN);
++programIteration;
Harness::validate_callback();
}
control_t test_programDataUsingProgramUnit(const size_t call_count)
{
static const unsigned REPEAT_INSTANCES = 5;
printf("in test_programDataUsingProgramUnit with call_count %u\n", call_count);
if (call_count == 1) {
/* Achieve basic initialization for the driver before anything else. */
return preambleForBasicInitialization();
}
/* Get the first block. */
ARM_STORAGE_BLOCK firstBlock;
drv->GetNextBlock(NULL, &firstBlock); /* get first block */
TEST_ASSERT(ARM_STORAGE_VALID_BLOCK(&firstBlock));
TEST_ASSERT(firstBlock.size > 0);
ARM_STORAGE_INFO info;
int32_t rc = drv->GetInfo(&info);
TEST_ASSERT_EQUAL(ARM_DRIVER_OK, rc);
TEST_ASSERT(info.program_unit <= firstBlock.attributes.erase_unit);
TEST_ASSERT(firstBlock.size >= (REPEAT_INSTANCES - 1) * firstBlock.attributes.erase_unit);
/* initialize the buffer to hold the pattern. */
ARM_STORAGE_CAPABILITIES capabilities = drv->GetCapabilities();
/* Update the completion callback to 'programDataCompleteCallback'. */
if (call_count == 2) {
int32_t rc = drv->Initialize(programDataCompleteCallback);
TEST_ASSERT(rc == 1); /* Expect synchronous completion of initialization; the system must have been
* initialized by the previous iteration. */
}
/* choose an increasing address for each iteration. */
uint64_t addr = firstBlock.addr + (call_count - 2) * firstBlock.attributes.erase_unit;
/* erase the sector at 'addr' */
printf("erasing sector at addr %lu\n", (uint32_t)addr);
rc = drv->Erase(addr, firstBlock.attributes.erase_unit);
TEST_ASSERT(rc >= 0);
if (rc == ARM_DRIVER_OK) {
TEST_ASSERT_EQUAL(1, capabilities.asynchronous_ops);
return (call_count < REPEAT_INSTANCES) ? CaseTimeout(200) + CaseRepeatAll: CaseTimeout(200);
} else {
TEST_ASSERT_EQUAL(firstBlock.attributes.erase_unit, rc);
verifyBytePattern(addr, firstBlock.attributes.erase_unit, info.erased_value ? (uint8_t)0xFF : (uint8_t)0);
static const uint32_t BYTE_PATTERN = 0xAA551122;
size_t sizeofData = info.program_unit;
TEST_ASSERT(BUFFER_SIZE >= sizeofData);
TEST_ASSERT((sizeofData % sizeof(uint32_t)) == 0);
for (size_t index = 0; index < sizeofData / sizeof(uint32_t); index++) {
((uint32_t *)buffer)[index] = BYTE_PATTERN;
}
/* program the sector at addr */
// printf("programming %u bytes at address %lu with pattern 0x%" PRIx32 "\n", sizeofData, (uint32_t)addr, BYTE_PATTERN);
rc = drv->ProgramData((uint32_t)addr, buffer, sizeofData);
if (rc == ARM_DRIVER_OK) {
TEST_ASSERT_EQUAL(1, capabilities.asynchronous_ops);
return (call_count < REPEAT_INSTANCES) ? CaseTimeout(200) + CaseRepeatAll: CaseTimeout(200);
} else {
TEST_ASSERT(rc > 0);
printf("verifying programmed sector at addr %lu\n", (uint32_t)addr);
verifyBytePattern(addr, sizeofData, BYTE_PATTERN);
return (call_count < REPEAT_INSTANCES) ? CaseRepeatAll : CaseNext;
}
}
}
void programDataOptimalCompleteCallback(int32_t status, ARM_STORAGE_OPERATION operation)
{
TEST_ASSERT(status >= 0);
static unsigned programIteration = 0;
static const uint8_t BYTE_PATTERN = 0xAA;
ARM_STORAGE_BLOCK firstBlock;
drv->GetNextBlock(NULL, &firstBlock); /* get first block */
TEST_ASSERT(ARM_STORAGE_VALID_BLOCK(&firstBlock));
const uint64_t addr = firstBlock.addr + programIteration * firstBlock.attributes.erase_unit;
ARM_STORAGE_INFO info;
int32_t rc = drv->GetInfo(&info);
TEST_ASSERT_EQUAL(ARM_DRIVER_OK, rc);
size_t sizeofData = info.optimal_program_unit;
ARM_STORAGE_CAPABILITIES capabilities = drv->GetCapabilities();
TEST_ASSERT((operation == ARM_STORAGE_OPERATION_ERASE) || (operation == ARM_STORAGE_OPERATION_PROGRAM_DATA));
if (operation == ARM_STORAGE_OPERATION_ERASE) {
#ifndef __CC_ARM
printf("programming %u bytes at address %lu with pattern 0x%x\n", sizeofData, (uint32_t)addr, BYTE_PATTERN);
#endif
size_t sizeofData = info.optimal_program_unit;
TEST_ASSERT(BUFFER_SIZE >= sizeofData);
memset(buffer, BYTE_PATTERN, sizeofData);
status = drv->ProgramData(addr, buffer, sizeofData);
if (status < ARM_DRIVER_OK) {
return; /* failure. this will trigger a timeout and cause test failure. */
}
if (status == ARM_DRIVER_OK) {
TEST_ASSERT_EQUAL(1, capabilities.asynchronous_ops);
return; /* We've successfully pended a programData operation; we'll have another
* invocation of this callback when programming completes. */
}
}
/* We come here either because of completion for program-data or as a very
* unlikely fall through from synchronous completion of program-data (above). */
#ifndef __CC_ARM
printf("verifying programmed sector at addr %lu\n", (uint32_t)addr);
#endif
verifyBytePattern(addr, sizeofData, BYTE_PATTERN);
++programIteration;
Harness::validate_callback();
}
control_t test_programDataUsingOptimalProgramUnit(const size_t call_count)
{
static const unsigned REPEAT_INSTANCES = 5;
printf("in test_programDataUsingOptimalProgramUnit with call_count %u\n", call_count);
if (call_count == 1) {
/* Achieve basic initialization for the driver before anything else. */
return preambleForBasicInitialization();
}
/* Get the first block. */
ARM_STORAGE_BLOCK firstBlock;
drv->GetNextBlock(NULL, &firstBlock); /* get first block */
TEST_ASSERT(ARM_STORAGE_VALID_BLOCK(&firstBlock));
TEST_ASSERT(firstBlock.size > 0);
ARM_STORAGE_INFO info;
int32_t rc = drv->GetInfo(&info);
TEST_ASSERT_EQUAL(ARM_DRIVER_OK, rc);
TEST_ASSERT(info.optimal_program_unit <= firstBlock.attributes.erase_unit);
TEST_ASSERT(firstBlock.size >= (REPEAT_INSTANCES - 1) * firstBlock.attributes.erase_unit);
/* initialize the buffer to hold the pattern. */
ARM_STORAGE_CAPABILITIES capabilities = drv->GetCapabilities();
/* Update the completion callback to 'programDataCompleteCallback'. */
if (call_count == 2) {
int32_t rc = drv->Initialize(programDataOptimalCompleteCallback);
TEST_ASSERT(rc == 1); /* Expect synchronous completion of initialization; the system must have been
* initialized by the previous iteration. */
}
/* choose an increasing address for each iteration. */
uint64_t addr = firstBlock.addr + (call_count - 2) * firstBlock.attributes.erase_unit;
/* erase the sector at 'addr' */
printf("erasing sector at addr %lu\n", (uint32_t)addr);
rc = drv->Erase(addr, firstBlock.attributes.erase_unit);
TEST_ASSERT(rc >= 0);
if (rc == ARM_DRIVER_OK) {
TEST_ASSERT_EQUAL(1, capabilities.asynchronous_ops);
return (call_count < REPEAT_INSTANCES) ? CaseTimeout(200) + CaseRepeatAll: CaseTimeout(200);
} else {
TEST_ASSERT_EQUAL(firstBlock.attributes.erase_unit, rc);
verifyBytePattern(addr, firstBlock.attributes.erase_unit, info.erased_value ? (uint8_t)0xFF : (uint8_t)0);
static const uint8_t BYTE_PATTERN = 0xAA;
size_t sizeofData = info.optimal_program_unit;
TEST_ASSERT(BUFFER_SIZE >= sizeofData);
memset(buffer, BYTE_PATTERN, sizeofData);
/* program the sector at addr */
printf("programming %u bytes at address %lu with pattern 0x%x\n", sizeofData, (uint32_t)addr, BYTE_PATTERN);
rc = drv->ProgramData((uint32_t)addr, buffer, sizeofData);
if (rc == ARM_DRIVER_OK) {
TEST_ASSERT_EQUAL(1, capabilities.asynchronous_ops);
return (call_count < REPEAT_INSTANCES) ? CaseTimeout(200) + CaseRepeatAll: CaseTimeout(200);
} else {
TEST_ASSERT_EQUAL(sizeofData, rc);
printf("verifying programmed sector at addr %lu\n", (uint32_t)addr);
verifyBytePattern(addr, sizeofData, BYTE_PATTERN);
return (call_count < REPEAT_INSTANCES) ? CaseRepeatAll : CaseNext;
}
}
}
void test_eraseWithInvalidParameters(void)
{
int32_t rc;
rc = drv->Erase(0, 0);
TEST_ASSERT_EQUAL(ARM_DRIVER_ERROR_PARAMETER, rc);
/* operate before the start of the first block. */
ARM_STORAGE_BLOCK block;
rc = drv->GetNextBlock(NULL, &block); /* get the first block */
TEST_ASSERT_EQUAL(ARM_DRIVER_OK, rc);
TEST_ASSERT(ARM_STORAGE_VALID_BLOCK(&block));
TEST_ASSERT(block.size > 0);
rc = drv->Erase(block.addr - 1, BUFFER_SIZE);
TEST_ASSERT_EQUAL(ARM_DRIVER_ERROR_PARAMETER, rc);
/* operate at an address past the end of the last block */
uint64_t endAddr = block.addr + block.size;
for (; ARM_STORAGE_VALID_BLOCK(&block); drv->GetNextBlock(&block, &block)) {
endAddr = block.addr + block.size;
}
rc = drv->Erase(endAddr + 1, BUFFER_SIZE);
TEST_ASSERT_EQUAL(ARM_DRIVER_ERROR_PARAMETER, rc);
ARM_STORAGE_INFO info;
rc = drv->GetInfo(&info);
TEST_ASSERT_EQUAL(ARM_DRIVER_OK, rc);
drv->GetNextBlock(NULL, &block); /* get the first block */
TEST_ASSERT(block.size >= block.attributes.erase_unit);
TEST_ASSERT((block.size % block.attributes.erase_unit) == 0);
rc = drv->Erase(block.addr + 1, block.attributes.erase_unit);
TEST_ASSERT_EQUAL(ARM_DRIVER_ERROR_PARAMETER, rc);
rc = drv->Erase(block.addr, block.attributes.erase_unit - 1);
TEST_ASSERT_EQUAL(ARM_DRIVER_ERROR_PARAMETER, rc);
rc = drv->Erase(block.addr, block.attributes.erase_unit + 1);
TEST_ASSERT_EQUAL(ARM_DRIVER_ERROR_PARAMETER, rc);
rc = drv->Erase(block.addr, block.attributes.erase_unit / 2);
TEST_ASSERT_EQUAL(ARM_DRIVER_ERROR_PARAMETER, rc);
}
template<size_t ERASE_UNITS_PER_ITERATION>
void eraseCompleteCallback(int32_t status, ARM_STORAGE_OPERATION operation)
{
static unsigned eraseIteration = 0;
#ifndef __CC_ARM
printf("erase<%u> complete callback: iteration %u\n", ERASE_UNITS_PER_ITERATION, eraseIteration);
#endif
TEST_ASSERT_EQUAL(operation, ARM_STORAGE_OPERATION_ERASE);
/* test that the actual sector has been erased */
ARM_STORAGE_BLOCK firstBlock;
drv->GetNextBlock(NULL, &firstBlock); /* get first block */
TEST_ASSERT(ARM_STORAGE_VALID_BLOCK(&firstBlock));
TEST_ASSERT_EQUAL(ERASE_UNITS_PER_ITERATION * firstBlock.attributes.erase_unit, status);
const uint64_t addr = firstBlock.addr + eraseIteration * ERASE_UNITS_PER_ITERATION * firstBlock.attributes.erase_unit;
++eraseIteration;
ARM_STORAGE_INFO info;
int32_t rc = drv->GetInfo(&info);
TEST_ASSERT_EQUAL(ARM_DRIVER_OK, rc);
//printf("testing erased sector at addr %lu", (uint32_t)addr);
verifyBytePattern(addr, ERASE_UNITS_PER_ITERATION * firstBlock.attributes.erase_unit, info.erased_value ? (uint8_t)0xFF : (uint8_t)0);
Harness::validate_callback();
}
template <size_t ERASE_UNITS_PER_ITERATION>
control_t test_erase(const size_t call_count)
{
static const unsigned REPEAT_INSTANCES = 5;
printf("in test_erase<%u> with call_count %u\n", ERASE_UNITS_PER_ITERATION, call_count);
if (call_count == 1) {
/* Achieve basic initialization for the driver before anything else. */
return preambleForBasicInitialization();
}
/* Get the first block. */
ARM_STORAGE_BLOCK firstBlock;
drv->GetNextBlock(NULL, &firstBlock); /* get first block */
TEST_ASSERT(ARM_STORAGE_VALID_BLOCK(&firstBlock));
TEST_ASSERT(firstBlock.size > 0);
if (firstBlock.size < ((call_count - 1) * ERASE_UNITS_PER_ITERATION * firstBlock.attributes.erase_unit)) {
printf("firstBlock isn't large enough to support instance %u of test_erase<%u>\n", call_count, ERASE_UNITS_PER_ITERATION);
return CaseNext;
}
/* Update the completion callback to 'eraseCompleteCallback'. */
if (call_count == 2) {
int32_t rc = drv->Initialize(eraseCompleteCallback<ERASE_UNITS_PER_ITERATION>);
TEST_ASSERT(rc == 1); /* Expect synchronous completion of initialization; the system must have been
* initialized by the previous iteration. */
}
ARM_STORAGE_CAPABILITIES capabilities = drv->GetCapabilities();
/* choose an increasing address for each iteration. */
uint64_t addr = firstBlock.addr + (call_count - 2) * ERASE_UNITS_PER_ITERATION * firstBlock.attributes.erase_unit;
printf("erasing %lu bytes at addr %lu\n", (ERASE_UNITS_PER_ITERATION * firstBlock.attributes.erase_unit), (uint32_t)addr);
int32_t rc = drv->Erase(addr, ERASE_UNITS_PER_ITERATION * firstBlock.attributes.erase_unit);
if (rc == ARM_DRIVER_OK) {
TEST_ASSERT_EQUAL(1, capabilities.asynchronous_ops);
return (call_count < REPEAT_INSTANCES) ? CaseTimeout(200) + CaseRepeatAll: CaseTimeout(200);
} else {
TEST_ASSERT_EQUAL(ERASE_UNITS_PER_ITERATION * firstBlock.attributes.erase_unit, rc);
ARM_STORAGE_INFO info;
rc = drv->GetInfo(&info);
TEST_ASSERT_EQUAL(ARM_DRIVER_OK, rc);
/* test that the actual sector has been erased */
printf("testing erased sector at addr %lu\n", (uint32_t)addr);
verifyBytePattern(addr, ERASE_UNITS_PER_ITERATION * firstBlock.attributes.erase_unit, (uint8_t)0xFF);
return (call_count < REPEAT_INSTANCES) ? CaseRepeatAll : CaseNext;
}
}
void eraseChipCompleteCallback(int32_t status, ARM_STORAGE_OPERATION operation)
{
#ifndef __CC_ARM
printf("eraseChip complete callback\n");
#endif
TEST_ASSERT_EQUAL(status, ARM_DRIVER_OK);
TEST_ASSERT_EQUAL(operation, ARM_STORAGE_OPERATION_ERASE_ALL);
ARM_STORAGE_BLOCK firstBlock;
drv->GetNextBlock(NULL, &firstBlock); /* get first block */
TEST_ASSERT(ARM_STORAGE_VALID_BLOCK(&firstBlock));
uint64_t addr = firstBlock.addr;
/* test that the flash has been erased */
#ifndef __CC_ARM
printf("testing erased chip\n");
#endif
unsigned index = 0;
static const unsigned MAX_VERIFY_ITERATIONS = 5;
while ((index < MAX_VERIFY_ITERATIONS) && (addr < (firstBlock.addr + firstBlock.size))) {
// printf("testing erased chip at addr %lu\n", (uint32_t)addr);
verifyBytePattern(addr, firstBlock.attributes.erase_unit, (uint8_t)0xFF);
index++;
addr += firstBlock.attributes.erase_unit;
}
Harness::validate_callback();
}
control_t test_eraseAll(const size_t call_count)
{
static const unsigned REPEAT_INSTANCES = 5;
printf("in test_eraseAll with call_count %u\n", call_count);
ARM_STORAGE_CAPABILITIES capabilities = drv->GetCapabilities();
if (!capabilities.erase_all) {
printf("chip erase not supported on this flash\n");
return CaseNext;
}
if (call_count == 1) {
/* Achieve basic initialization for the driver before anything else. */
return preambleForBasicInitialization();
}
/* Update the completion callback to 'eraseChipCompleteCallback'. */
if (call_count == 2) {
int32_t rc = drv->Initialize(eraseChipCompleteCallback);
TEST_ASSERT(rc == 1); /* Expect synchronous completion of initialization; the system must have been
* initialized by the previous iteration. */
}
/* Get the first block. */
ARM_STORAGE_BLOCK firstBlock;
drv->GetNextBlock(NULL, &firstBlock); /* get first block */
TEST_ASSERT(ARM_STORAGE_VALID_BLOCK(&firstBlock));
TEST_ASSERT(firstBlock.size > 0);
uint64_t addr = firstBlock.addr;
printf("erasing chip\n");
int32_t rc = drv->EraseAll();
if (rc == ARM_DRIVER_OK) {
TEST_ASSERT_EQUAL(1, capabilities.asynchronous_ops);
return (call_count < REPEAT_INSTANCES) ? CaseTimeout(200) + CaseRepeatAll: CaseTimeout(200);
} else {
TEST_ASSERT(rc == 1);
/* test that the flash has been erased */
unsigned index = 0;
static const unsigned MAX_VERIFY_ITERATIONS = 5;
while ((index < MAX_VERIFY_ITERATIONS) && (addr < (firstBlock.addr + firstBlock.size))) {
//printf("testing erased chip at addr %lu", (uint32_t)addr);
ARM_STORAGE_INFO info;
rc = drv->GetInfo(&info);
TEST_ASSERT_EQUAL(ARM_DRIVER_OK, rc);
verifyBytePattern(addr, firstBlock.attributes.erase_unit, info.erased_value ? (uint8_t)0xFF : (uint8_t)0);
index++;
addr += firstBlock.attributes.erase_unit;
}
return (call_count < REPEAT_INSTANCES) ? CaseRepeatAll : CaseNext;
}
}
void test_programDataWithInvalidParameters(void)
{
int32_t rc;
rc = drv->ProgramData(0, NULL, 0);
TEST_ASSERT_EQUAL(ARM_DRIVER_ERROR_PARAMETER, rc);
rc = drv->ProgramData(0, buffer, 0);
TEST_ASSERT_EQUAL(ARM_DRIVER_ERROR_PARAMETER, rc);
rc = drv->ProgramData(0, NULL, BUFFER_SIZE);
TEST_ASSERT_EQUAL(ARM_DRIVER_ERROR_PARAMETER, rc);
/* operate before the start of the first block. */
ARM_STORAGE_BLOCK block;
rc = drv->GetNextBlock(NULL, &block); /* get the first block */
TEST_ASSERT_EQUAL(ARM_DRIVER_OK, rc);
TEST_ASSERT(ARM_STORAGE_VALID_BLOCK(&block));
TEST_ASSERT(block.size > 0);
rc = drv->ProgramData(block.addr - 1, buffer, BUFFER_SIZE);
TEST_ASSERT_EQUAL(ARM_DRIVER_ERROR_PARAMETER, rc);
/* operate at an address past the end of the last block */
uint64_t endAddr = block.addr + block.size;
for (; ARM_STORAGE_VALID_BLOCK(&block); drv->GetNextBlock(&block, &block)) {
endAddr = block.addr + block.size;
}
rc = drv->ProgramData(endAddr + 1, buffer, BUFFER_SIZE);
TEST_ASSERT_EQUAL(ARM_DRIVER_ERROR_PARAMETER, rc);
ARM_STORAGE_INFO info;
rc = drv->GetInfo(&info);
TEST_ASSERT_EQUAL(ARM_DRIVER_OK, rc);
if (info.program_unit <= 1) {
return; /* if program_unit is 1 (or 0), we can't proceed with any alignment tests */
}
drv->GetNextBlock(NULL, &block); /* get the first block */
TEST_ASSERT(block.size >= info.program_unit);
rc = drv->ProgramData(block.addr + 1, buffer, info.program_unit);
TEST_ASSERT_EQUAL(ARM_DRIVER_ERROR_PARAMETER, rc);
rc = drv->ProgramData(block.addr, buffer, info.program_unit - 1);
TEST_ASSERT_EQUAL(ARM_DRIVER_ERROR_PARAMETER, rc);
rc = drv->ProgramData(block.addr, buffer, info.program_unit + 1);
TEST_ASSERT_EQUAL(ARM_DRIVER_ERROR_PARAMETER, rc);
rc = drv->ProgramData(block.addr, buffer, info.program_unit / 2);
TEST_ASSERT_EQUAL(ARM_DRIVER_ERROR_PARAMETER, rc);
}
template <size_t N_UNITS>
void programDataWithMultipleProgramUnitsCallback(int32_t status, ARM_STORAGE_OPERATION operation)
{
TEST_ASSERT(status >= ARM_DRIVER_OK);
ARM_STORAGE_BLOCK firstBlock;
drv->GetNextBlock(NULL, &firstBlock); /* get first block */
TEST_ASSERT(ARM_STORAGE_VALID_BLOCK(&firstBlock));
TEST_ASSERT(firstBlock.size > 0);
ARM_STORAGE_INFO info;
int32_t rc = drv->GetInfo(&info);
TEST_ASSERT_EQUAL(ARM_DRIVER_OK, rc);
ARM_STORAGE_CAPABILITIES capabilities = drv->GetCapabilities();
size_t rangeNeededForTest = (N_UNITS * info.program_unit);
/* round-up range to the nearest erase_unit */
rangeNeededForTest = ((rangeNeededForTest + firstBlock.attributes.erase_unit - 1) / firstBlock.attributes.erase_unit) * firstBlock.attributes.erase_unit;
static const uint32_t BYTE_PATTERN = 0xABCDEF00;
if (operation == ARM_STORAGE_OPERATION_ERASE) {
TEST_ASSERT_EQUAL(rangeNeededForTest, status);
TEST_ASSERT((N_UNITS * info.program_unit) <= BUFFER_SIZE);
/* setup byte pattern in buffer */
if (info.program_unit >= sizeof(BYTE_PATTERN)) {
for (size_t index = 0; index < ((N_UNITS * info.program_unit) / sizeof(BYTE_PATTERN)); index++) {
((uint32_t *)buffer)[index] = BYTE_PATTERN;
}
} else {
for (size_t index = 0; index < ((N_UNITS * info.program_unit)); index++) {
buffer[index] = ((const uint8_t *)&BYTE_PATTERN)[0];
}
}
#ifndef __CC_ARM
printf("Callback: programming %lu bytes at address %lu with pattern 0x%lx\n", (N_UNITS * info.program_unit), (uint32_t)firstBlock.addr, BYTE_PATTERN);
#endif
rc = drv->ProgramData(firstBlock.addr, buffer, (N_UNITS * info.program_unit));
TEST_ASSERT(rc >= ARM_DRIVER_OK);
if (rc == ARM_DRIVER_OK) {
TEST_ASSERT_EQUAL(1, capabilities.asynchronous_ops);
return; /* We've successfully pended a programData operation; we'll have another
* invocation of this callback when programming completes. */
}
status = rc;
}
TEST_ASSERT_EQUAL((N_UNITS * info.program_unit), status);
#ifndef __CC_ARM
printf("Callback: verifying programmed sector at addr %lu\n", (uint32_t)firstBlock.addr);
#endif
if (info.program_unit >= sizeof(BYTE_PATTERN)) {
verifyBytePattern(firstBlock.addr, (N_UNITS * info.program_unit), BYTE_PATTERN);
} else {
verifyBytePattern(firstBlock.addr, (N_UNITS * info.program_unit), ((const uint8_t *)&BYTE_PATTERN)[0]);
}
Harness::validate_callback();
}
template<size_t N_UNITS>
control_t test_programDataWithMultipleProgramUnits(const size_t call_count)
{
int32_t rc;
printf("in test_programDataWithMultipleProgramUnits<%u> with call_count %u\n", N_UNITS, call_count);
if (call_count == 1) {
/* Achieve basic initialization for the driver before anything else. */
return preambleForBasicInitialization();
}
/* Update the completion callback to 'programDataWithMultipleProgramUnitsCallback'. */
if (call_count == 2) {
rc = drv->Initialize(programDataWithMultipleProgramUnitsCallback<N_UNITS>);
TEST_ASSERT(rc == 1); /* Expect synchronous completion of initialization; the system must have been
* initialized by the previous iteration. */
ARM_STORAGE_BLOCK firstBlock;
drv->GetNextBlock(NULL, &firstBlock); /* get first block */
TEST_ASSERT(ARM_STORAGE_VALID_BLOCK(&firstBlock));
TEST_ASSERT(firstBlock.size > 0);
ARM_STORAGE_INFO info;
int32_t rc = drv->GetInfo(&info);
TEST_ASSERT_EQUAL(ARM_DRIVER_OK, rc);
ARM_STORAGE_CAPABILITIES capabilities = drv->GetCapabilities();
size_t rangeNeededForTest = (N_UNITS * info.program_unit);
/* round-up range to the nearest erase_unit */
rangeNeededForTest = ((rangeNeededForTest + firstBlock.attributes.erase_unit - 1) / firstBlock.attributes.erase_unit) * firstBlock.attributes.erase_unit;
if (firstBlock.size < rangeNeededForTest) {
printf("first block not large enough; rangeNeededForTest: %u\n", rangeNeededForTest);
return CaseNext; /* first block isn't large enough for the intended operation */
}
if (rangeNeededForTest > BUFFER_SIZE) {
printf("buffer (%u) not large enough; rangeNeededForTest: %u\n", BUFFER_SIZE, rangeNeededForTest);
return CaseNext;
}
// printf("erasing %u bytes at addr %lu\n", rangeNeededForTest, (uint32_t)firstBlock.addr);
rc = drv->Erase(firstBlock.addr, rangeNeededForTest);
TEST_ASSERT(rc >= 0);
if (rc == ARM_DRIVER_OK) {
TEST_ASSERT_EQUAL(1, capabilities.asynchronous_ops);
return CaseTimeout(500);
} else {
TEST_ASSERT_EQUAL(rangeNeededForTest, rc);
/* setup byte pattern in buffer */
static const uint32_t BYTE_PATTERN = 0xABCDEF00;
if (info.program_unit >= sizeof(BYTE_PATTERN)) {
for (size_t index = 0; index < ((N_UNITS * info.program_unit) / sizeof(BYTE_PATTERN)); index++) {
((uint32_t *)buffer)[index] = BYTE_PATTERN;
}
} else {
for (size_t index = 0; index < ((N_UNITS * info.program_unit)); index++) {
buffer[index] = ((const uint8_t *)&BYTE_PATTERN)[0];
}
}
printf("programming %lu bytes at address %lu with pattern 0x%lx\n", (N_UNITS * info.program_unit), (uint32_t)firstBlock.addr, BYTE_PATTERN);
rc = drv->ProgramData(firstBlock.addr, buffer, (N_UNITS * info.program_unit));
TEST_ASSERT(rc >= 0);
if (rc == ARM_DRIVER_OK) {
TEST_ASSERT_EQUAL(1, capabilities.asynchronous_ops);
return CaseTimeout(500);
} else {
TEST_ASSERT_EQUAL((N_UNITS * info.program_unit), rc);
printf("verifying programmed sector at addr %lu\n", (uint32_t)firstBlock.addr);
if (info.program_unit >= sizeof(BYTE_PATTERN)) {
verifyBytePattern(firstBlock.addr, (N_UNITS * info.program_unit), BYTE_PATTERN);
} else {
verifyBytePattern(firstBlock.addr, (N_UNITS * info.program_unit), ((const uint8_t *)&BYTE_PATTERN)[0]);
}
return CaseNext;
}
}
}
return CaseNext;
}
#ifndef AVOID_GREENTEA
// Custom setup handler required for proper Greentea support
utest::v1::status_t greentea_setup(const size_t number_of_cases)
{
GREENTEA_SETUP(60, "default_auto");
// Call the default reporting function
return greentea_test_setup_handler(number_of_cases);
}
#else
status_t default_setup(const size_t)
{
return STATUS_CONTINUE;
}
#endif
// Specify all your test cases here
Case cases[] = {
Case("get version", test_getVersion),
Case("get capabilities", test_getCapabilities),
Case("get info", test_getInfo),
Case("initialize", test_initialize),
Case("uninitialize", test_uninitialize),
Case("power control", test_powerControl),
Case("erase all", test_eraseAll),
Case("read data", test_readData),
Case("erase with invalid parameters", test_eraseWithInvalidParameters),
Case("erase single unit", test_erase<1>),
Case("erase two units", test_erase<2>),
Case("erase four units", test_erase<4>),
Case("erase eight units", test_erase<8>),
Case("program data with invalid parameters", test_programDataWithInvalidParameters),
Case("program data using program_unit", test_programDataUsingProgramUnit),
Case("program data using optimal_program_unit", test_programDataUsingOptimalProgramUnit),
Case("program data with multiple program units", test_programDataWithMultipleProgramUnits<1>),
Case("program data with multiple program units", test_programDataWithMultipleProgramUnits<2>),
Case("program data with multiple program units", test_programDataWithMultipleProgramUnits<7>),
Case("program data with multiple program units", test_programDataWithMultipleProgramUnits<8>),
Case("program data with multiple program units", test_programDataWithMultipleProgramUnits<9>),
Case("program data with multiple program units", test_programDataWithMultipleProgramUnits<31>),
Case("program data with multiple program units", test_programDataWithMultipleProgramUnits<32>),
Case("program data with multiple program units", test_programDataWithMultipleProgramUnits<33>),
Case("program data with multiple program units", test_programDataWithMultipleProgramUnits<127>),
Case("program data with multiple program units", test_programDataWithMultipleProgramUnits<128>),
Case("program data with multiple program units", test_programDataWithMultipleProgramUnits<129>),
Case("program data with multiple program units", test_programDataWithMultipleProgramUnits<1023>),
Case("program data with multiple program units", test_programDataWithMultipleProgramUnits<1024>),
Case("program data with multiple program units", test_programDataWithMultipleProgramUnits<1025>),
};
// Declare your test specification with a custom setup handler
#ifndef AVOID_GREENTEA
Specification specification(greentea_setup, cases);
#else
Specification specification(default_setup, cases);
#endif
int main(int argc, char** argv)
{
// Run the test specification
Harness::run(specification);
}

View File

@ -42,6 +42,8 @@ from .paths import (MBED_CMSIS_PATH, MBED_TARGETS_PATH, MBED_LIBRARIES,
MBED_CONFIG_FILE, MBED_LIBRARIES_DRIVERS, MBED_CONFIG_FILE, MBED_LIBRARIES_DRIVERS,
MBED_LIBRARIES_PLATFORM, MBED_LIBRARIES_HAL, MBED_LIBRARIES_PLATFORM, MBED_LIBRARIES_HAL,
BUILD_DIR) BUILD_DIR)
from .resources import Resources, FileType, FileRef
from .notifier.mock import MockNotifier
from .targets import TARGET_NAMES, TARGET_MAP from .targets import TARGET_NAMES, TARGET_MAP
from .libraries import Library from .libraries import Library
from .toolchains import TOOLCHAIN_CLASSES from .toolchains import TOOLCHAIN_CLASSES
@ -120,7 +122,7 @@ def add_result_to_report(report, result):
result_wrap = {0: result} result_wrap = {0: result}
report[target][toolchain][id_name].append(result_wrap) report[target][toolchain][id_name].append(result_wrap)
def get_config(src_paths, target, toolchain_name, app_config=None): def get_config(src_paths, target, toolchain_name=None, app_config=None):
"""Get the configuration object for a target-toolchain combination """Get the configuration object for a target-toolchain combination
Positional arguments: Positional arguments:
@ -132,17 +134,20 @@ def get_config(src_paths, target, toolchain_name, app_config=None):
if not isinstance(src_paths, list): if not isinstance(src_paths, list):
src_paths = [src_paths] src_paths = [src_paths]
# Pass all params to the unified prepare_resources() res = Resources(MockNotifier())
if toolchain_name:
toolchain = prepare_toolchain(src_paths, None, target, toolchain_name, toolchain = prepare_toolchain(src_paths, None, target, toolchain_name,
app_config=app_config) app_config=app_config)
config = toolchain.config
res.scan_with_toolchain(src_paths, toolchain, exclude=False)
else:
config = Config(target, src_paths, app_config=app_config)
res.scan_with_config(src_paths, config)
if config.has_regions:
_ = list(config.regions)
# Scan src_path for config files cfg, macros = config.get_config_data()
scan_resources(src_paths, toolchain) features = config.get_features()
if toolchain.config.has_regions:
_ = list(toolchain.config.regions)
cfg, macros = toolchain.config.get_config_data()
features = toolchain.config.get_features()
return cfg, macros, features return cfg, macros, features
def is_official_target(target_name, version): def is_official_target(target_name, version):
@ -440,46 +445,6 @@ def merge_region_list(region_list, destination, notify, padding=b'\xFF'):
(merged.maxaddr() - merged.minaddr() + 1)) (merged.maxaddr() - merged.minaddr() + 1))
merged.tofile(destination, format=format.strip(".")) merged.tofile(destination, format=format.strip("."))
def scan_resources(src_paths, toolchain, dependencies_paths=None,
inc_dirs=None, base_path=None, collect_ignores=False):
""" Scan resources using initialized toolcain
Positional arguments
src_paths - the paths to source directories
toolchain - valid toolchain object
dependencies_paths - dependency paths that we should scan for include dirs
inc_dirs - additional include directories which should be added to
the scanner resources
"""
# Scan src_path
resources = toolchain.scan_resources(src_paths[0], base_path=base_path,
collect_ignores=collect_ignores)
for path in src_paths[1:]:
resources.add(toolchain.scan_resources(path, base_path=base_path,
collect_ignores=collect_ignores))
# Scan dependency paths for include dirs
if dependencies_paths is not None:
for path in dependencies_paths:
lib_resources = toolchain.scan_resources(path)
resources.inc_dirs.extend(lib_resources.inc_dirs)
# Add additional include directories if passed
if inc_dirs:
if isinstance(inc_dirs, list):
resources.inc_dirs.extend(inc_dirs)
else:
resources.inc_dirs.append(inc_dirs)
# Load resources into the config system which might expand/modify resources
# based on config data
resources = toolchain.config.load_resources(resources)
# Set the toolchain's configuration data
toolchain.set_config_data(toolchain.config.get_config_data())
return resources
def build_project(src_paths, build_path, target, toolchain_name, def build_project(src_paths, build_path, target, toolchain_name,
libraries_paths=None, linker_script=None, clean=False, libraries_paths=None, linker_script=None, clean=False,
@ -515,7 +480,6 @@ def build_project(src_paths, build_path, target, toolchain_name,
stats_depth - depth level for memap to display file/dirs stats_depth - depth level for memap to display file/dirs
ignore - list of paths to add to mbedignore ignore - list of paths to add to mbedignore
""" """
# Convert src_path to a list if needed # Convert src_path to a list if needed
if not isinstance(src_paths, list): if not isinstance(src_paths, list):
src_paths = [src_paths] src_paths = [src_paths]
@ -555,16 +519,16 @@ def build_project(src_paths, build_path, target, toolchain_name,
vendor_label) vendor_label)
try: try:
# Call unified scan_resources resources = Resources(notify).scan_with_toolchain(
resources = scan_resources(src_paths, toolchain, inc_dirs=inc_dirs) src_paths, toolchain, inc_dirs=inc_dirs)
# Change linker script if specified # Change linker script if specified
if linker_script is not None: if linker_script is not None:
resources.linker_script = linker_script resources.add_file_ref(linker_script, linker_script)
# Compile Sources # Compile Sources
objects = toolchain.compile_sources(resources, resources.inc_dirs) objects = toolchain.compile_sources(resources, sorted(resources.get_file_paths(FileType.INC_DIR)))
resources.objects.extend(objects) resources.add_files_to_type(FileType.OBJECT, objects)
# Link Program # Link Program
if toolchain.config.has_regions: if toolchain.config.has_regions:
@ -596,7 +560,7 @@ def build_project(src_paths, build_path, target, toolchain_name,
map_html = join(build_path, name + "_map.html") map_html = join(build_path, name + "_map.html")
memap_instance.generate_output('html', stats_depth, map_html) memap_instance.generate_output('html', stats_depth, map_html)
resources.detect_duplicates(toolchain) resources.detect_duplicates()
if report != None: if report != None:
end = time() end = time()
@ -663,6 +627,7 @@ def build_library(src_paths, build_path, target, toolchain_name,
# Convert src_path to a list if needed # Convert src_path to a list if needed
if not isinstance(src_paths, list): if not isinstance(src_paths, list):
src_paths = [src_paths] src_paths = [src_paths]
src_paths = [relpath(s) for s in src_paths]
# Build path # Build path
if archive: if archive:
@ -714,31 +679,25 @@ def build_library(src_paths, build_path, target, toolchain_name,
raise Exception(error_msg) raise Exception(error_msg)
try: try:
# Call unified scan_resources res = Resources(notify).scan_with_toolchain(
resources = scan_resources(src_paths, toolchain, src_paths, toolchain, dependencies_paths, inc_dirs=inc_dirs)
dependencies_paths=dependencies_paths,
inc_dirs=inc_dirs)
# Copy headers, objects and static libraries - all files needed for # Copy headers, objects and static libraries - all files needed for
# static lib # static lib
toolchain.copy_files(resources.headers, build_path, resources=resources) to_copy = (
toolchain.copy_files(resources.objects, build_path, resources=resources) res.get_file_refs(FileType.HEADER) +
toolchain.copy_files(resources.libraries, build_path, res.get_file_refs(FileType.OBJECT) +
resources=resources) res.get_file_refs(FileType.LIB) +
toolchain.copy_files(resources.json_files, build_path, res.get_file_refs(FileType.JSON) +
resources=resources) res.get_file_refs(FileType.LD_SCRIPT) +
if resources.linker_script: res.get_file_refs(FileType.HEX) +
toolchain.copy_files(resources.linker_script, build_path, res.get_file_refs(FileType.BIN)
resources=resources) )
toolchain.copy_files(to_copy, build_path)
if resources.hex_files:
toolchain.copy_files(resources.hex_files, build_path,
resources=resources)
# Compile Sources # Compile Sources
objects = toolchain.compile_sources(resources, resources.inc_dirs) objects = toolchain.compile_sources(
resources.objects.extend(objects) res, res.get_file_paths(FileType.INC_DIR))
res.add_files_to_type(FileType.OBJECT, objects)
if archive: if archive:
toolchain.build_library(objects, build_path, name) toolchain.build_library(objects, build_path, name)
@ -752,8 +711,6 @@ def build_library(src_paths, build_path, target, toolchain_name,
end = time() end = time()
cur_result["elapsed_time"] = end - start cur_result["elapsed_time"] = end - start
cur_result["result"] = "OK" cur_result["result"] = "OK"
add_result_to_report(report, cur_result) add_result_to_report(report, cur_result)
return True return True
@ -819,7 +776,6 @@ def build_lib(lib_id, target, toolchain_name, clean=False, macros=None,
build_path = lib.build_dir build_path = lib.build_dir
dependencies_paths = lib.dependencies dependencies_paths = lib.dependencies
inc_dirs = lib.inc_dirs inc_dirs = lib.inc_dirs
inc_dirs_ext = lib.inc_dirs_ext
if not isinstance(src_paths, list): if not isinstance(src_paths, list):
src_paths = [src_paths] src_paths = [src_paths]
@ -827,7 +783,7 @@ def build_lib(lib_id, target, toolchain_name, clean=False, macros=None,
# The first path will give the name to the library # The first path will give the name to the library
name = basename(src_paths[0]) name = basename(src_paths[0])
if report != None: if report is not None:
start = time() start = time()
id_name = name.upper() id_name = name.upper()
description = name description = name
@ -874,41 +830,15 @@ def build_lib(lib_id, target, toolchain_name, clean=False, macros=None,
config.add_config_files([MBED_CONFIG_FILE]) config.add_config_files([MBED_CONFIG_FILE])
# Scan Resources # Scan Resources
resources = [] resources = Resources(notify).scan_with_toolchain(
for src_path in src_paths: src_paths + (lib.inc_dirs_ext or []), toolchain,
resources.append(toolchain.scan_resources(src_path)) inc_dirs=inc_dirs, dependencies_paths=dependencies_paths)
# Add extra include directories / files which are required by library
# This files usually are not in the same directory as source files so
# previous scan will not include them
if inc_dirs_ext is not None:
for inc_ext in inc_dirs_ext:
resources.append(toolchain.scan_resources(inc_ext))
# Dependencies Include Paths
dependencies_include_dir = []
if dependencies_paths is not None:
for path in dependencies_paths:
lib_resources = toolchain.scan_resources(path)
dependencies_include_dir.extend(lib_resources.inc_dirs)
dependencies_include_dir.extend(map(dirname, lib_resources.inc_dirs))
if inc_dirs:
dependencies_include_dir.extend(inc_dirs)
# Add other discovered configuration data to the configuration object
for res in resources:
config.load_resources(res)
toolchain.set_config_data(toolchain.config.get_config_data())
# Copy Headers # Copy Headers
for resource in resources: toolchain.copy_files(
toolchain.copy_files(resource.headers, build_path, resources.get_file_refs(FileType.HEADER), build_path)
resources=resource)
dependencies_include_dir.extend( dependencies_include_dir = Resources(notify).sacn_with_toolchain([build_path], toolchain).inc_dirs
toolchain.scan_resources(build_path).inc_dirs)
# Compile Sources # Compile Sources
objects = [] objects = []
@ -936,13 +866,31 @@ def build_lib(lib_id, target, toolchain_name, clean=False, macros=None,
# Let Exception propagate # Let Exception propagate
raise raise
# We do have unique legacy conventions about how we build and package the mbed
# library # A number of compiled files need to be copied as objects as the linker
# will not search for weak symbol overrides in archives. These are:
# - mbed_retarget.o: to make sure that the C standard lib symbols get
# overridden
# - mbed_board.o: `mbed_die` is weak
# - mbed_overrides.o: this contains platform overrides of various
# weak SDK functions
# - mbed_main.o: this contains main redirection
# - mbed_sdk_boot.o: this contains the main boot code in
# - PeripheralPins.o: PinMap can be weak
SEPARATE_NAMES = [
'PeripheralPins.o',
'mbed_retarget.o',
'mbed_board.o',
'mbed_overrides.o',
'mbed_main.o',
'mbed_sdk_boot.o',
]
def build_mbed_libs(target, toolchain_name, clean=False, macros=None, def build_mbed_libs(target, toolchain_name, clean=False, macros=None,
notify=None, jobs=1, report=None, properties=None, notify=None, jobs=1, report=None, properties=None,
build_profile=None, ignore=None): build_profile=None, ignore=None):
""" Function returns True is library was built and false if building was """ Build legacy libraries for a target and toolchain pair
skipped
Positional arguments: Positional arguments:
target - the MCU or board that the project will compile for target - the MCU or board that the project will compile for
@ -957,32 +905,36 @@ def build_mbed_libs(target, toolchain_name, clean=False, macros=None,
properties - UUUUHHHHH beats me properties - UUUUHHHHH beats me
build_profile - a dict of flags that will be passed to the compiler build_profile - a dict of flags that will be passed to the compiler
ignore - list of paths to add to mbedignore ignore - list of paths to add to mbedignore
Return - True if target + toolchain built correctly, False if not supported
""" """
if report != None: if report is not None:
start = time() start = time()
id_name = "MBED" id_name = "MBED"
description = "mbed SDK" description = "mbed SDK"
vendor_label = target.extra_labels[0] vendor_label = target.extra_labels[0]
cur_result = None cur_result = None
prep_report(report, target.name, toolchain_name, id_name) prep_report(report, target.name, toolchain_name, id_name)
cur_result = create_result(target.name, toolchain_name, id_name, cur_result = create_result(
description) target.name, toolchain_name, id_name, description)
if properties is not None:
prep_properties(
properties, target.name, toolchain_name, vendor_label)
if properties != None:
prep_properties(properties, target.name, toolchain_name,
vendor_label)
# Check toolchain support
if toolchain_name not in target.supported_toolchains: if toolchain_name not in target.supported_toolchains:
supported_toolchains_text = ", ".join(target.supported_toolchains) supported_toolchains_text = ", ".join(target.supported_toolchains)
print('%s target is not yet supported by toolchain %s' % notify.info('The target {} does not support the toolchain {}'.format(
(target.name, toolchain_name)) target.name,
print('%s target supports %s toolchain%s' % toolchain_name
(target.name, supported_toolchains_text, 's' ))
if len(target.supported_toolchains) > 1 else '')) notify.info('{} supports {} toolchain{}'.format(
target.name,
supported_toolchains_text,
's' if len(target.supported_toolchains) > 1 else ''
))
if report != None: if report is not None:
cur_result["result"] = "SKIP" cur_result["result"] = "SKIP"
add_result_to_report(report, cur_result) add_result_to_report(report, cur_result)
@ -990,78 +942,59 @@ def build_mbed_libs(target, toolchain_name, clean=False, macros=None,
try: try:
# Source and Build Paths # Source and Build Paths
build_target = join(MBED_LIBRARIES, "TARGET_" + target.name) build_toolchain = join(
build_toolchain = join(MBED_LIBRARIES, mbed2_obj_path(target.name, toolchain_name)) MBED_LIBRARIES, mbed2_obj_path(target.name, toolchain_name))
mkdir(build_toolchain) mkdir(build_toolchain)
# Toolchain tmp_path = join(
tmp_path = join(MBED_LIBRARIES, '.temp', mbed2_obj_path(target.name, toolchain_name)) MBED_LIBRARIES,
'.temp',
mbed2_obj_path(target.name, toolchain_name)
)
mkdir(tmp_path) mkdir(tmp_path)
# Toolchain and config
toolchain = prepare_toolchain( toolchain = prepare_toolchain(
[""], tmp_path, target, toolchain_name, macros=macros, notify=notify, [""], tmp_path, target, toolchain_name, macros=macros, notify=notify,
build_profile=build_profile, jobs=jobs, clean=clean, ignore=ignore) build_profile=build_profile, jobs=jobs, clean=clean, ignore=ignore)
# Take into account the library configuration (MBED_CONFIG_FILE)
config = toolchain.config config = toolchain.config
config.add_config_files([MBED_CONFIG_FILE]) config.add_config_files([MBED_CONFIG_FILE])
toolchain.set_config_data(toolchain.config.get_config_data()) toolchain.set_config_data(toolchain.config.get_config_data())
# mbed # distribute header files
notify.info("Building library %s (%s, %s)" % toolchain.copy_files(
('MBED', target.name, toolchain_name)) [FileRef(basename(MBED_HEADER),MBED_HEADER)], MBED_LIBRARIES)
# Common Headers
toolchain.copy_files([MBED_HEADER], MBED_LIBRARIES)
library_incdirs = [dirname(MBED_LIBRARIES), MBED_LIBRARIES] library_incdirs = [dirname(MBED_LIBRARIES), MBED_LIBRARIES]
for dir, dest in [(MBED_DRIVERS, MBED_LIBRARIES_DRIVERS), for dir, dest in [(MBED_DRIVERS, MBED_LIBRARIES_DRIVERS),
(MBED_PLATFORM, MBED_LIBRARIES_PLATFORM), (MBED_PLATFORM, MBED_LIBRARIES_PLATFORM),
(MBED_HAL, MBED_LIBRARIES_HAL)]: (MBED_HAL, MBED_LIBRARIES_HAL)]:
resources = toolchain.scan_resources(dir) resources = Resources(notify).scan_with_toolchain([dir], toolchain)
toolchain.copy_files(resources.headers, dest) toolchain.copy_files(
[FileRef(basename(p), p) for p
in resources.get_file_paths(FileType.HEADER)] ,
dest)
library_incdirs.append(dest) library_incdirs.append(dest)
cmsis_implementation = toolchain.scan_resources(MBED_CMSIS_PATH) # collect resources of the libs to compile
toolchain.copy_files(cmsis_implementation.headers, build_target) cmsis_res = Resources(notify).scan_with_toolchain(
toolchain.copy_files(cmsis_implementation.linker_script, build_toolchain) [MBED_CMSIS_PATH], toolchain)
toolchain.copy_files(cmsis_implementation.bin_files, build_toolchain) hal_res = Resources(notify).scan_with_toolchain(
[MBED_TARGETS_PATH], toolchain)
mbed_resources = Resources(notify).scan_with_toolchain(
[MBED_DRIVERS, MBED_PLATFORM, MBED_HAL], toolchain)
hal_implementation = toolchain.scan_resources(MBED_TARGETS_PATH) incdirs = cmsis_res.inc_dirs + hal_res.inc_dirs + library_incdirs
toolchain.copy_files(hal_implementation.headers +
hal_implementation.hex_files +
hal_implementation.libraries +
[MBED_CONFIG_FILE],
build_target, resources=hal_implementation)
toolchain.copy_files(hal_implementation.linker_script, build_toolchain)
toolchain.copy_files(hal_implementation.bin_files, build_toolchain)
incdirs = toolchain.scan_resources(build_target).inc_dirs
objects = toolchain.compile_sources(cmsis_implementation + hal_implementation,
library_incdirs + incdirs + [tmp_path])
toolchain.copy_files(objects, build_toolchain)
# Common Sources # Build Things
mbed_resources = None notify.info("Building library %s (%s, %s)" %
for dir in [MBED_DRIVERS, MBED_PLATFORM, MBED_HAL]: ('MBED', target.name, toolchain_name))
mbed_resources += toolchain.scan_resources(dir) objects = toolchain.compile_sources(mbed_resources, incdirs)
separate_objects = []
objects = toolchain.compile_sources(mbed_resources,
library_incdirs + incdirs)
# A number of compiled files need to be copied as objects as opposed to
# way the linker search for symbols in archives. These are:
# - mbed_retarget.o: to make sure that the C standard lib symbols get
# overridden
# - mbed_board.o: mbed_die is weak
# - mbed_overrides.o: this contains platform overrides of various
# weak SDK functions
# - mbed_main.o: this contains main redirection
# - PeripheralPins.o: PinMap can be weak
separate_names, separate_objects = ['PeripheralPins.o', 'mbed_retarget.o', 'mbed_board.o',
'mbed_overrides.o', 'mbed_main.o', 'mbed_sdk_boot.o'], []
for obj in objects: for obj in objects:
for name in separate_names: for name in SEPARATE_NAMES:
if obj.endswith(name): if obj.endswith(name):
separate_objects.append(obj) separate_objects.append(obj)
@ -1069,21 +1002,41 @@ def build_mbed_libs(target, toolchain_name, clean=False, macros=None,
objects.remove(obj) objects.remove(obj)
toolchain.build_library(objects, build_toolchain, "mbed") toolchain.build_library(objects, build_toolchain, "mbed")
notify.info("Building library %s (%s, %s)" %
('CMSIS', target.name, toolchain_name))
cmsis_objects = toolchain.compile_sources(cmsis_res, incdirs + [tmp_path])
notify.info("Building library %s (%s, %s)" %
('HAL', target.name, toolchain_name))
hal_objects = toolchain.compile_sources(hal_res, incdirs + [tmp_path])
for obj in separate_objects: # Copy everything into the build directory
toolchain.copy_files(obj, build_toolchain) to_copy_paths = [
hal_res.get_file_paths(FileType.HEADER),
hal_res.get_file_paths(FileType.HEX),
hal_res.get_file_paths(FileType.BIN),
hal_res.get_file_paths(FileType.LIB),
cmsis_res.get_file_paths(FileType.HEADER),
cmsis_res.get_file_paths(FileType.BIN),
cmsis_res.get_file_paths(FileType.LD_SCRIPT),
hal_res.get_file_paths(FileType.LD_SCRIPT),
[MBED_CONFIG_FILE],
cmsis_objects,
hal_objects,
separate_objects,
]
to_copy = [FileRef(basename(p), p) for p in sum(to_copy_paths, [])]
toolchain.copy_files(to_copy, build_toolchain)
if report != None: if report is not None:
end = time() end = time()
cur_result["elapsed_time"] = end - start cur_result["elapsed_time"] = end - start
cur_result["result"] = "OK" cur_result["result"] = "OK"
add_result_to_report(report, cur_result) add_result_to_report(report, cur_result)
return True return True
except Exception as exc: except Exception as exc:
if report != None: if report is not None:
end = time() end = time()
cur_result["result"] = "FAIL" cur_result["result"] = "FAIL"
cur_result["elapsed_time"] = end - start cur_result["elapsed_time"] = end - start
@ -1091,8 +1044,6 @@ def build_mbed_libs(target, toolchain_name, clean=False, macros=None,
cur_result["output"] += str(exc) cur_result["output"] += str(exc)
add_result_to_report(report, cur_result) add_result_to_report(report, cur_result)
# Let Exception propagate
raise raise

View File

@ -168,23 +168,16 @@ if __name__ == '__main__':
toolchains = toolchainSet.intersection(set((options.toolchains).split(','))) toolchains = toolchainSet.intersection(set((options.toolchains).split(',')))
for toolchain in toolchains: for toolchain in toolchains:
id = "%s::%s" % (target_name, toolchain) built_mbed_lib = build_mbed_libs(
TARGET_MAP[target_name],
profile = extract_profile(parser, options, toolchain)
notify = TerminalNotifier(options.verbose)
try:
built_mbed_lib = build_mbed_libs(TARGET_MAP[target_name],
toolchain, toolchain,
notify=notify, notify=TerminalNotifier(options.verbose),
jobs=options.jobs, jobs=options.jobs,
report=build_report, report=build_report,
properties=build_properties, properties=build_properties,
build_profile=profile) build_profile=extract_profile(parser, options, toolchain),
)
except Exception, e:
print str(e)
status = False
# copy targets.json file as part of the release # copy targets.json file as part of the release
copy(join(dirname(abspath(__file__)), '..', 'targets', 'targets.json'), MBED_LIBRARIES) copy(join(dirname(abspath(__file__)), '..', 'targets', 'targets.json'), MBED_LIBRARIES)

View File

@ -30,6 +30,7 @@ from jinja2 import FileSystemLoader, StrictUndefined
from jinja2.environment import Environment from jinja2.environment import Environment
from jsonschema import Draft4Validator, RefResolver from jsonschema import Draft4Validator, RefResolver
from ..resources import FileType
from ..utils import (json_file_to_dict, intelhex_offset, integer, from ..utils import (json_file_to_dict, intelhex_offset, integer,
NotSupportedException) NotSupportedException)
from ..arm_pack_manager import Cache from ..arm_pack_manager import Cache
@ -61,6 +62,14 @@ RAM_OVERRIDES = set([
BOOTLOADER_OVERRIDES = ROM_OVERRIDES | RAM_OVERRIDES BOOTLOADER_OVERRIDES = ROM_OVERRIDES | RAM_OVERRIDES
ALLOWED_FEATURES = [
"UVISOR", "BLE", "CLIENT", "IPV4", "LWIP", "COMMON_PAL", "STORAGE",
"NANOSTACK","CRYPTOCELL310",
# Nanostack configurations
"LOWPAN_BORDER_ROUTER", "LOWPAN_HOST", "LOWPAN_ROUTER", "NANOSTACK_FULL",
"THREAD_BORDER_ROUTER", "THREAD_END_DEVICE", "THREAD_ROUTER",
"ETHERNET_HOST",
]
# Base class for all configuration exceptions # Base class for all configuration exceptions
class ConfigException(Exception): class ConfigException(Exception):
@ -396,13 +405,6 @@ class Config(object):
__unused_overrides = set(["target.bootloader_img", "target.restrict_size", __unused_overrides = set(["target.bootloader_img", "target.restrict_size",
"target.mbed_app_start", "target.mbed_app_size"]) "target.mbed_app_start", "target.mbed_app_size"])
# Allowed features in configurations
__allowed_features = [
"UVISOR", "BLE", "CLIENT", "IPV4", "LWIP", "COMMON_PAL", "STORAGE", "NANOSTACK","CRYPTOCELL310",
# Nanostack configurations
"LOWPAN_BORDER_ROUTER", "LOWPAN_HOST", "LOWPAN_ROUTER", "NANOSTACK_FULL", "THREAD_BORDER_ROUTER", "THREAD_END_DEVICE", "THREAD_ROUTER", "ETHERNET_HOST"
]
@classmethod @classmethod
def find_app_config(cls, top_level_dirs): def find_app_config(cls, top_level_dirs):
app_config_location = None app_config_location = None
@ -1043,7 +1045,7 @@ class Config(object):
.update_target(self.target) .update_target(self.target)
for feature in self.target.features: for feature in self.target.features:
if feature not in self.__allowed_features: if feature not in ALLOWED_FEATURES:
raise ConfigException( raise ConfigException(
"Feature '%s' is not a supported features" % feature) "Feature '%s' is not a supported features" % feature)
@ -1084,16 +1086,16 @@ class Config(object):
while True: while True:
# Add/update the configuration with any .json files found while # Add/update the configuration with any .json files found while
# scanning # scanning
self.add_config_files(resources.json_files) self.add_config_files(
f.path for f in resources.get_file_refs(FileType.JSON)
)
# Add features while we find new ones # Add features while we find new ones
features = set(self.get_features()) features = set(self.get_features())
if features == prev_features: if features == prev_features:
break break
for feature in features: resources.add_features(features)
if feature in resources.features:
resources.add(resources.features[feature])
prev_features = features prev_features = features
self.validate_config() self.validate_config()
@ -1103,8 +1105,6 @@ class Config(object):
"rtos" in self.lib_config_data): "rtos" in self.lib_config_data):
raise NotSupportedException("Target does not support mbed OS 5") raise NotSupportedException("Target does not support mbed OS 5")
return resources
@staticmethod @staticmethod
def config_to_header(config, fname=None): def config_to_header(config, fname=None):
""" Convert the configuration data to the content of a C header file, """ Convert the configuration data to the content of a C header file,

View File

@ -18,15 +18,16 @@
from __future__ import print_function, division, absolute_import from __future__ import print_function, division, absolute_import
import sys import sys
from os.path import join, abspath, dirname, exists from os.path import join, abspath, dirname, exists, isfile
from os.path import basename, relpath, normpath, splitext from os.path import basename, relpath, normpath, splitext
from os import makedirs, walk from os import makedirs, walk
import copy import copy
from shutil import rmtree, copyfile from shutil import rmtree, copyfile
import zipfile import zipfile
from ..build_api import prepare_toolchain, scan_resources from ..resources import Resources, FileType, FileRef
from ..toolchains import Resources from ..config import ALLOWED_FEATURES
from ..build_api import prepare_toolchain
from ..targets import TARGET_NAMES from ..targets import TARGET_NAMES
from . import (lpcxpresso, ds5_5, iar, makefile, embitz, coide, kds, simplicity, from . import (lpcxpresso, ds5_5, iar, makefile, embitz, coide, kds, simplicity,
atmelstudio, mcuxpresso, sw4stm32, e2studio, zip, cmsis, uvision, atmelstudio, mcuxpresso, sw4stm32, e2studio, zip, cmsis, uvision,
@ -161,22 +162,23 @@ def generate_project_files(resources, export_path, target, name, toolchain, ide,
return files, exporter return files, exporter
def _inner_zip_export(resources, inc_repos): def _inner_zip_export(resources, prj_files, inc_repos):
for loc, res in resources.items(): to_zip = sum((resources.get_file_refs(ftype) for ftype
to_zip = ( in Resources.ALL_FILE_TYPES),
res.headers + res.s_sources + res.c_sources +\ [])
res.cpp_sources + res.libraries + res.hex_files + \ to_zip.extend(FileRef(basename(pfile), pfile) for pfile in prj_files)
[res.linker_script] + res.bin_files + res.objects + \ for dest, source in resources.get_file_refs(FileType.BLD_REF):
res.json_files + res.lib_refs + res.lib_builds) target_dir, _ = splitext(dest)
dest = join(target_dir, ".bld", "bldrc")
to_zip.append(FileRef(dest, source))
if inc_repos: if inc_repos:
for directory in res.repo_dirs: for dest, source in resources.get_file_refs(FileType.REPO_DIRS):
for root, _, files in walk(directory): for root, _, files in walk(source):
for repo_file in files: for repo_file in files:
source = join(root, repo_file) file_source = join(root, repo_file)
to_zip.append(source) file_dest = join(dest, relpath(file_source, source))
res.file_basepath[source] = res.base_path to_zip.append(FileRef(file_dest, file_source))
to_zip += res.repo_files return to_zip
yield loc, to_zip
def zip_export(file_name, prefix, resources, project_files, inc_repos, notify): def zip_export(file_name, prefix, resources, project_files, inc_repos, notify):
"""Create a zip file from an exported project. """Create a zip file from an exported project.
@ -188,32 +190,19 @@ def zip_export(file_name, prefix, resources, project_files, inc_repos, notify):
project_files - a list of extra files to be added to the root of the prefix project_files - a list of extra files to be added to the root of the prefix
directory directory
""" """
to_zip_list = list(_inner_zip_export(resources, inc_repos)) to_zip_list = sorted(set(_inner_zip_export(
total_files = sum(len(to_zip) for _, to_zip in to_zip_list) resources, project_files, inc_repos)))
total_files += len(project_files) total_files = len(to_zip_list)
zipped = 0 zipped = 0
with zipfile.ZipFile(file_name, "w") as zip_file: with zipfile.ZipFile(file_name, "w") as zip_file:
for prj_file in project_files: for dest, source in to_zip_list:
zip_file.write(prj_file, join(prefix, basename(prj_file))) if source and isfile(source):
for loc, to_zip in to_zip_list: zip_file.write(source, join(prefix, dest))
res = resources[loc] zipped += 1
for source in to_zip:
if source:
zip_file.write(
source,
join(prefix, loc,
relpath(source, res.file_basepath[source])))
notify.progress("Zipping", source, notify.progress("Zipping", source,
100 * (zipped / total_files)) 100 * (zipped / total_files))
else:
zipped += 1 zipped += 1
for lib, res in resources.items():
for source in res.lib_builds:
target_dir, _ = splitext(source)
dest = join(prefix, loc,
relpath(target_dir, res.file_basepath[source]),
".bld", "bldrc")
zip_file.write(source, dest)
def export_project(src_paths, export_path, target, ide, libraries_paths=None, def export_project(src_paths, export_path, target, ide, libraries_paths=None,
@ -275,23 +264,16 @@ def export_project(src_paths, export_path, target, ide, libraries_paths=None,
if name is None: if name is None:
name = basename(normpath(abspath(src_paths[0]))) name = basename(normpath(abspath(src_paths[0])))
resource_dict = {loc: sum((toolchain.scan_resources(p, collect_ignores=True) resources = Resources(notify, collect_ignores=True)
for p in path), resources.add_toolchain_labels(toolchain)
Resources()) for loc, path in src_paths.items():
for loc, path in src_paths.items()} for p in path:
resources = Resources() resources.add_directory(p, into_path=loc)
for loc, res in resource_dict.items():
temp = copy.deepcopy(res)
temp.subtract_basepath(".", loc)
resources.add(temp)
toolchain.build_dir = export_path toolchain.build_dir = export_path
toolchain.config.load_resources(resources) toolchain.config.load_resources(resources)
toolchain.set_config_data(toolchain.config.get_config_data()) toolchain.set_config_data(toolchain.config.get_config_data())
config_header = toolchain.get_config_header() config_header = toolchain.get_config_header()
resources.headers.append(config_header) resources.add_file_ref(FileType.HEADER, basename(config_header), config_header)
resources.file_basepath[config_header] = dirname(config_header)
# Change linker script if specified # Change linker script if specified
if linker_script is not None: if linker_script is not None:
@ -300,16 +282,13 @@ def export_project(src_paths, export_path, target, ide, libraries_paths=None,
files, exporter = generate_project_files(resources, export_path, files, exporter = generate_project_files(resources, export_path,
target, name, toolchain, ide, target, name, toolchain, ide,
macros=macros) macros=macros)
files.append(config_header)
if zip_proj: if zip_proj:
for resource in resource_dict.values(): resources.add_features(ALLOWED_FEATURES)
for label, res in resource.features.items():
resource.add(res)
if isinstance(zip_proj, basestring): if isinstance(zip_proj, basestring):
zip_export(join(export_path, zip_proj), name, resource_dict, zip_export(join(export_path, zip_proj), name, resources,
files + list(exporter.static_files), inc_repos, notify) files + list(exporter.static_files), inc_repos, notify)
else: else:
zip_export(zip_proj, name, resource_dict, zip_export(zip_proj, name, resources,
files + list(exporter.static_files), inc_repos, notify) files + list(exporter.static_files), inc_repos, notify)
else: else:
for static_file in exporter.static_files: for static_file in exporter.static_files:

View File

@ -59,7 +59,7 @@ class AtmelStudio(Exporter):
source_folders.append(e) source_folders.append(e)
libraries = [] libraries = []
for lib in self.resources.libraries: for lib in self.libraries:
l, _ = splitext(basename(lib)) l, _ = splitext(basename(lib))
libraries.append(l[3:]) libraries.append(l[3:])

View File

@ -69,13 +69,11 @@ class CCES(Exporter):
Skip macros because headless tools handles them separately Skip macros because headless tools handles them separately
""" """
config_header = self.toolchain.get_config_header()
flags = {key + "_flags": copy.deepcopy(value) for key, value \ flags = {key + "_flags": copy.deepcopy(value) for key, value \
in self.toolchain.flags.iteritems()} in self.toolchain.flags.iteritems()}
config_header = self.config_header_ref
if config_header: if config_header:
config_header = os.path.relpath(config_header, \ config_header = "\\\"" + self.format_inc_path(config_header.name) \
self.resources.file_basepath[config_header])
config_header = "\\\"" + self.format_inc_path(config_header) \
+ "\\\"" + "\\\""
header_options = self.toolchain.get_config_option(config_header) header_options = self.toolchain.get_config_option(config_header)
flags['c_flags'] += header_options flags['c_flags'] += header_options
@ -320,7 +318,7 @@ class CCES(Exporter):
cxx_flags = self.flags['cxx_flags'] + self.flags['common_flags'] cxx_flags = self.flags['cxx_flags'] + self.flags['common_flags']
libs = [] libs = []
for libpath in self.resources.libraries: for libpath in self.libraries:
lib = os.path.splitext(os.path.basename(libpath))[0] lib = os.path.splitext(os.path.basename(libpath))[0]
libs.append(lib[3:]) # skip 'lib' prefix libs.append(lib[3:]) # skip 'lib' prefix

View File

@ -68,7 +68,7 @@ class CMake(Exporter):
srcs = [re.sub(r'^[.]/', '', f) for f in srcs] srcs = [re.sub(r'^[.]/', '', f) for f in srcs]
# additional libraries # additional libraries
libraries = [self.prepare_lib(basename(lib)) for lib in self.resources.libraries] libraries = [self.prepare_lib(basename(lib)) for lib in self.libraries]
sys_libs = [self.prepare_sys_lib(lib) for lib in self.toolchain.sys_libs] sys_libs = [self.prepare_sys_lib(lib) for lib in self.toolchain.sys_libs]
# sort includes reverse, so the deepest dir comes first (ensures short includes) # sort includes reverse, so the deepest dir comes first (ensures short includes)

View File

@ -143,7 +143,7 @@ class CMSIS(Exporter):
def generate(self): def generate(self):
srcs = self.resources.headers + self.resources.s_sources + \ srcs = self.resources.headers + self.resources.s_sources + \
self.resources.c_sources + self.resources.cpp_sources + \ self.resources.c_sources + self.resources.cpp_sources + \
self.resources.objects + self.resources.libraries + \ self.resources.objects + self.libraries + \
[self.resources.linker_script] [self.resources.linker_script]
srcs = [fileCMSIS(src, src) for src in srcs if src] srcs = [fileCMSIS(src, src) for src in srcs if src]
ctx = { ctx = {

View File

@ -90,8 +90,7 @@ class CodeBlocks(GccArm):
not x.startswith('obj'))]; not x.startswith('obj'))];
c_sources = sorted([self.filter_dot(s) for s in self.resources.c_sources]) c_sources = sorted([self.filter_dot(s) for s in self.resources.c_sources])
libraries = [self.prepare_lib(basename(lib)) for lib libraries = [self.prepare_lib(basename(lib)) for lib in self.libraries]
in self.resources.libraries]
sys_libs = [self.prepare_sys_lib(lib) for lib sys_libs = [self.prepare_sys_lib(lib) for lib
in self.toolchain.sys_libs] in self.toolchain.sys_libs]
ncs36510fib = (hasattr(self.toolchain.target, 'post_binary_hook') and ncs36510fib = (hasattr(self.toolchain.target, 'post_binary_hook') and

View File

@ -88,7 +88,7 @@ class CoIDE(Exporter):
}) })
libraries = [] libraries = []
for lib in self.resources.libraries: for lib in self.libraries:
l, _ = splitext(basename(lib)) l, _ = splitext(basename(lib))
libraries.append(l[3:]) libraries.append(l[3:])

View File

@ -60,7 +60,7 @@ class DS5_5(Exporter):
'name': self.project_name, 'name': self.project_name,
'include_paths': self.resources.inc_dirs, 'include_paths': self.resources.inc_dirs,
'scatter_file': self.resources.linker_script, 'scatter_file': self.resources.linker_script,
'object_files': self.resources.objects + self.resources.libraries, 'object_files': self.resources.objects + self.libraries,
'source_files': source_files, 'source_files': source_files,
'symbols': self.toolchain.get_symbols() 'symbols': self.toolchain.get_symbols()
} }

View File

@ -60,7 +60,7 @@ class EmBitz(Exporter):
}) })
libraries = [] libraries = []
for lib in self.resources.libraries: for lib in self.libraries:
l, _ = splitext(basename(lib)) l, _ = splitext(basename(lib))
libraries.append(l[3:]) libraries.append(l[3:])

View File

@ -10,6 +10,7 @@ import copy
from tools.targets import TARGET_MAP from tools.targets import TARGET_MAP
from tools.utils import mkdir from tools.utils import mkdir
from tools.resources import FileType
class TargetNotSupportedException(Exception): class TargetNotSupportedException(Exception):
@ -87,12 +88,8 @@ class Exporter(object):
return self.TOOLCHAIN return self.TOOLCHAIN
def add_config(self): def add_config(self):
"""Add the containgin directory of mbed_config.h to include dirs""" """Add the containing directory of mbed_config.h to include dirs"""
config = self.toolchain.get_config_header() pass
if config:
self.resources.inc_dirs.append(
dirname(relpath(config,
self.resources.file_basepath[config])))
@property @property
def flags(self): def flags(self):
@ -104,9 +101,7 @@ class Exporter(object):
asm_flags - assembler flags asm_flags - assembler flags
common_flags - common options common_flags - common options
""" """
config_header = self.toolchain.get_config_header() flags = self.toolchain_flags(self.toolchain)
flags = {key + "_flags": copy.deepcopy(value) for key, value
in self.toolchain.flags.items()}
asm_defines = self.toolchain.get_compile_options( asm_defines = self.toolchain.get_compile_options(
self.toolchain.get_symbols(for_asm=True), self.toolchain.get_symbols(for_asm=True),
filter(None, self.resources.inc_dirs), filter(None, self.resources.inc_dirs),
@ -115,14 +110,52 @@ class Exporter(object):
flags['asm_flags'] += asm_defines flags['asm_flags'] += asm_defines
flags['c_flags'] += c_defines flags['c_flags'] += c_defines
flags['cxx_flags'] += c_defines flags['cxx_flags'] += c_defines
config_header = self.config_header_ref
if config_header: if config_header:
config_header = relpath(config_header, flags['c_flags'] += self.toolchain.get_config_option(
self.resources.file_basepath[config_header]) config_header.name)
flags['c_flags'] += self.toolchain.get_config_option(config_header)
flags['cxx_flags'] += self.toolchain.get_config_option( flags['cxx_flags'] += self.toolchain.get_config_option(
config_header) config_header.name)
return flags return flags
@property
def libraries(self):
return [l for l in self.resources.get_file_names(FileType.LIB)
if l.endswith(self.toolchain.LIBRARY_EXT)]
def toolchain_flags(self, toolchain):
"""Returns a dictionary of toolchain flags.
Keys of the dictionary are:
cxx_flags - c++ flags
c_flags - c flags
ld_flags - linker flags
asm_flags - assembler flags
common_flags - common options
The difference from the above is that it takes a parameter.
"""
flags = {key + "_flags": copy.deepcopy(value) for key, value
in toolchain.flags.items()}
config_header = self.config_header_ref
if config_header:
header_options = self.toolchain.get_config_option(
config_header.name)
flags['c_flags'] += header_options
flags['cxx_flags'] += header_options
return flags
@property
def config_header_ref(self):
config_header = self.toolchain.get_config_header()
if config_header:
def is_config_header(f):
return f.path == config_header
return filter(
is_config_header, self.resources.get_file_refs(FileType.HEADER)
)[0]
else:
return None
def get_source_paths(self): def get_source_paths(self):
"""Returns a list of the directories where source files are contained""" """Returns a list of the directories where source files are contained"""
source_keys = ['s_sources', 'c_sources', 'cpp_sources', 'hex_files', source_keys = ['s_sources', 'c_sources', 'cpp_sources', 'hex_files',
@ -181,8 +214,7 @@ class Exporter(object):
Positional Arguments: Positional Arguments:
src - the src's location src - the src's location
""" """
rel_path = relpath(src, self.resources.file_basepath[src]) path_list = os.path.normpath(src).split(os.sep)
path_list = os.path.normpath(rel_path).split(os.sep)
assert len(path_list) >= 1 assert len(path_list) >= 1
if len(path_list) == 1: if len(path_list) == 1:
key = self.project_name key = self.project_name

View File

@ -77,57 +77,6 @@ class GNUARMEclipse(Exporter):
return apply_supported_whitelist( return apply_supported_whitelist(
cls.TOOLCHAIN, POST_BINARY_WHITELIST, target) cls.TOOLCHAIN, POST_BINARY_WHITELIST, target)
# override
@property
def flags(self):
"""Returns a dictionary of toolchain flags.
Keys of the dictionary are:
cxx_flags - c++ flags
c_flags - c flags
ld_flags - linker flags
asm_flags - assembler flags
common_flags - common options
The difference from the parent function is that it does not
add macro definitions, since they are passed separately.
"""
config_header = self.toolchain.get_config_header()
flags = {key + "_flags": copy.deepcopy(value) for key, value
in self.toolchain.flags.items()}
if config_header:
config_header = relpath(config_header,
self.resources.file_basepath[config_header])
flags['c_flags'] += self.toolchain.get_config_option(config_header)
flags['cxx_flags'] += self.toolchain.get_config_option(
config_header)
return flags
def toolchain_flags(self, toolchain):
"""Returns a dictionary of toolchain flags.
Keys of the dictionary are:
cxx_flags - c++ flags
c_flags - c flags
ld_flags - linker flags
asm_flags - assembler flags
common_flags - common options
The difference from the above is that it takes a parameter.
"""
# Note: use the config options from the currently selected toolchain.
config_header = self.toolchain.get_config_header()
flags = {key + "_flags": copy.deepcopy(value) for key, value
in toolchain.flags.items()}
if config_header:
config_header = relpath(config_header,
self.resources.file_basepath[config_header])
header_options = self.toolchain.get_config_option(config_header)
flags['c_flags'] += header_options
flags['cxx_flags'] += header_options
return flags
def validate_resources(self): def validate_resources(self):
if not self.resources.linker_script: if not self.resources.linker_script:
raise NotSupportedException("No linker script found.") raise NotSupportedException("No linker script found.")
@ -141,7 +90,7 @@ class GNUARMEclipse(Exporter):
# TODO: use some logger to display additional info if verbose # TODO: use some logger to display additional info if verbose
libraries = [] libraries = []
for lib in self.resources.libraries: for lib in self.libraries:
l, _ = splitext(basename(lib)) l, _ = splitext(basename(lib))
libraries.append(l[3:]) libraries.append(l[3:])

View File

@ -109,7 +109,7 @@ class IAR(Exporter):
raise NotSupportedException("No linker script found.") raise NotSupportedException("No linker script found.")
srcs = self.resources.headers + self.resources.s_sources + \ srcs = self.resources.headers + self.resources.s_sources + \
self.resources.c_sources + self.resources.cpp_sources + \ self.resources.c_sources + self.resources.cpp_sources + \
self.resources.objects + self.resources.libraries self.resources.objects + self.libraries
flags = self.flags flags = self.flags
c_flags = list(set(flags['common_flags'] c_flags = list(set(flags['common_flags']
+ flags['c_flags'] + flags['c_flags']

View File

@ -33,7 +33,7 @@ class KDS(Exporter):
def generate(self): def generate(self):
libraries = [] libraries = []
for lib in self.resources.libraries: for lib in self.libraries:
l, _ = splitext(basename(lib)) l, _ = splitext(basename(lib))
libraries.append(l[3:]) libraries.append(l[3:])

View File

@ -44,7 +44,7 @@ class LPCXpresso(Exporter):
def generate(self): def generate(self):
libraries = [] libraries = []
for lib in self.resources.libraries: for lib in self.libraries:
l, _ = splitext(basename(lib)) l, _ = splitext(basename(lib))
libraries.append(l[3:]) libraries.append(l[3:])

View File

@ -24,6 +24,7 @@ import sys
from subprocess import check_output, CalledProcessError, Popen, PIPE from subprocess import check_output, CalledProcessError, Popen, PIPE
import shutil import shutil
from jinja2.exceptions import TemplateNotFound from jinja2.exceptions import TemplateNotFound
from tools.resources import FileType
from tools.export.exporters import Exporter, apply_supported_whitelist from tools.export.exporters import Exporter, apply_supported_whitelist
from tools.utils import NotSupportedException from tools.utils import NotSupportedException
from tools.targets import TARGET_MAP from tools.targets import TARGET_MAP
@ -69,7 +70,7 @@ class Makefile(Exporter):
self.resources.cpp_sources] self.resources.cpp_sources]
libraries = [self.prepare_lib(basename(lib)) for lib libraries = [self.prepare_lib(basename(lib)) for lib
in self.resources.libraries] in self.libraries]
sys_libs = [self.prepare_sys_lib(lib) for lib sys_libs = [self.prepare_sys_lib(lib) for lib
in self.toolchain.sys_libs] in self.toolchain.sys_libs]
@ -237,11 +238,12 @@ class Arm(Makefile):
def generate(self): def generate(self):
if self.resources.linker_script: if self.resources.linker_script:
sct_file = self.resources.linker_script sct_file = self.resources.get_file_refs(FileType.LD_SCRIPT)[-1]
new_script = self.toolchain.correct_scatter_shebang( new_script = self.toolchain.correct_scatter_shebang(
sct_file, join(self.resources.file_basepath[sct_file], "BUILD")) sct_file.path, join("..", dirname(sct_file.name)))
if new_script is not sct_file: if new_script is not sct_file:
self.resources.linker_script = new_script self.resources.add_files_to_type(
FileType.LD_SCRIPT, [new_script])
self.generated_files.append(new_script) self.generated_files.append(new_script)
return super(Arm, self).generate() return super(Arm, self).generate()

View File

@ -76,12 +76,13 @@ class MCUXpresso(GNUARMEclipse):
# TODO: use some logger to display additional info if verbose # TODO: use some logger to display additional info if verbose
self.libraries = [] libraries = []
# print 'libraries' # print 'libraries'
# print self.resources.libraries # print self.resources.libraries
for lib in self.resources.libraries: for lib in self.libraries:
l, _ = splitext(basename(lib)) l, _ = splitext(basename(lib))
self.libraries.append(l[3:]) libraries.append(l[3:])
self.libraries = libraries
self.system_libraries = [ self.system_libraries = [
'stdc++', 'supc++', 'm', 'c', 'gcc', 'nosys' 'stdc++', 'supc++', 'm', 'c', 'gcc', 'nosys'

View File

@ -36,31 +36,6 @@ class GNUARMNetbeans(Exporter):
def prepare_sys_lib(libname): def prepare_sys_lib(libname):
return "-l" + libname return "-l" + libname
def toolchain_flags(self, toolchain):
"""Returns a dictionary of toolchain flags.
Keys of the dictionary are:
cxx_flags - c++ flags
c_flags - c flags
ld_flags - linker flags
asm_flags - assembler flags
common_flags - common options
The difference from the above is that it takes a parameter.
"""
# Note: use the config options from the currently selected toolchain.
config_header = self.toolchain.get_config_header()
flags = {key + "_flags": copy.deepcopy(value) for key, value
in toolchain.flags.items()}
if config_header:
config_header = relpath(config_header,
self.resources.file_basepath[config_header])
header_options = self.toolchain.get_config_option(config_header)
flags['c_flags'] += header_options
flags['cxx_flags'] += header_options
return flags
@staticmethod @staticmethod
def get_defines_and_remove_from_flags(flags_in, str_key): def get_defines_and_remove_from_flags(flags_in, str_key):
defines = [] defines = []

View File

@ -144,7 +144,7 @@ class SimplicityV3(Exporter):
main_files.append(source) main_files.append(source)
libraries = [] libraries = []
for lib in self.resources.libraries: for lib in self.libraries:
l, _ = splitext(basename(lib)) l, _ = splitext(basename(lib))
if l[3:] not in EXCLUDED_LIBS: if l[3:] not in EXCLUDED_LIBS:
libraries.append(l[3:]) libraries.append(l[3:])

View File

@ -433,12 +433,8 @@ class Sw4STM32(GNUARMEclipse):
self.resources.win_to_unix() self.resources.win_to_unix()
config_header = self.toolchain.get_config_header()
if config_header:
config_header = relpath(config_header, self.resources.file_basepath[config_header])
libraries = [] libraries = []
for lib in self.resources.libraries: for lib in self.libraries:
library, _ = splitext(basename(lib)) library, _ = splitext(basename(lib))
libraries.append(library[3:]) libraries.append(library[3:])
@ -531,7 +527,7 @@ class Sw4STM32(GNUARMEclipse):
'name': self.project_name, 'name': self.project_name,
'platform': platform, 'platform': platform,
'include_paths': self.include_path, 'include_paths': self.include_path,
'config_header': config_header, 'config_header': self.config_header_ref.name,
'exclude_paths': '|'.join(self.excluded_folders), 'exclude_paths': '|'.join(self.excluded_folders),
'ld_script': ld_script, 'ld_script': ld_script,
'library_paths': lib_dirs, 'library_paths': lib_dirs,

View File

@ -2,7 +2,7 @@ from __future__ import print_function, absolute_import
from builtins import str from builtins import str
import os import os
from os.path import sep, normpath, join, exists from os.path import sep, normpath, join, exists, dirname
import ntpath import ntpath
import copy import copy
from collections import namedtuple from collections import namedtuple
@ -10,6 +10,7 @@ import shutil
from subprocess import Popen, PIPE from subprocess import Popen, PIPE
import re import re
from tools.resources import FileType
from tools.arm_pack_manager import Cache from tools.arm_pack_manager import Cache
from tools.targets import TARGET_MAP from tools.targets import TARGET_MAP
from tools.export.exporters import Exporter, apply_supported_whitelist from tools.export.exporters import Exporter, apply_supported_whitelist
@ -217,7 +218,7 @@ class Uvision(Exporter):
srcs = self.resources.headers + self.resources.s_sources + \ srcs = self.resources.headers + self.resources.s_sources + \
self.resources.c_sources + self.resources.cpp_sources + \ self.resources.c_sources + self.resources.cpp_sources + \
self.resources.objects + self.resources.libraries self.resources.objects + self.libraries
ctx = { ctx = {
'name': self.project_name, 'name': self.project_name,
# project_files => dict of generators - file group to generator of # project_files => dict of generators - file group to generator of
@ -228,10 +229,10 @@ class Uvision(Exporter):
self.resources.inc_dirs).encode('utf-8'), self.resources.inc_dirs).encode('utf-8'),
'device': DeviceUvision(self.target), 'device': DeviceUvision(self.target),
} }
sct_file = self.resources.linker_script sct_name, sct_path = self.resources.get_file_refs(FileType.LD_SCRIPT)[0]
ctx['linker_script'] = self.toolchain.correct_scatter_shebang( ctx['linker_script'] = self.toolchain.correct_scatter_shebang(
sct_file, self.resources.file_basepath[sct_file]) sct_path, dirname(sct_name))
if ctx['linker_script'] != sct_file: if ctx['linker_script'] != sct_path:
self.generated_files.append(ctx['linker_script']) self.generated_files.append(ctx['linker_script'])
core = ctx['device'].core core = ctx['device'].core
ctx['cputype'] = core.rstrip("FD") ctx['cputype'] = core.rstrip("FD")

View File

@ -53,16 +53,15 @@ if __name__ == '__main__':
args_error(parser, "argument -m/--mcu is required") args_error(parser, "argument -m/--mcu is required")
target = extract_mcus(parser, options)[0] target = extract_mcus(parser, options)[0]
# Toolchain
if options.tool is None:
args_error(parser, "argument -t/--toolchain is required")
toolchain = options.tool[0]
options.prefix = options.prefix or [""] options.prefix = options.prefix or [""]
try: try:
params, macros, features = get_config( params, macros, features = get_config(
options.source_dir, target, toolchain, app_config=options.app_config) options.source_dir,
target,
options.tool[0] if options.tool else None,
app_config=options.app_config
)
if not params and not macros: if not params and not macros:
print("No configuration data available.") print("No configuration data available.")
sys.exit(0) sys.exit(0)

View File

@ -1,26 +0,0 @@
from os import walk
from os.path import join, abspath, dirname, basename, splitext
import sys
ROOT = abspath(join(dirname(__file__), "..", ".."))
sys.path.insert(0, ROOT)
from tools.toolchains.gcc import GCC_ARM
from tools.targets import TARGET_MAP
from argparse import ArgumentParser
if __name__ == "__main__":
parser = ArgumentParser("Find duplicate file names within a directory structure")
parser.add_argument("dirs", help="Directories to search for duplicate file names"
, nargs="*")
parser.add_argument("--silent", help="Supress printing of filenames, just return number of duplicates", action="store_true")
args = parser.parse_args()
toolchain = GCC_ARM(TARGET_MAP["K64F"])
resources = sum([toolchain.scan_resources(d) for d in args.dirs], None)
scanned_files = {}
exit(resources.detect_duplicates(toolchain))

View File

@ -55,10 +55,11 @@ def resolve_exporter_alias(ide):
def setup_project( def setup_project(
ide, ide,
target, target,
program=None, zip,
source_dir=None, program,
build=None, source_dir,
export_path=None build,
export_path,
): ):
"""Generate a name, if not provided, and find dependencies """Generate a name, if not provided, and find dependencies
@ -82,6 +83,9 @@ def setup_project(
project_name = TESTS[program] project_name = TESTS[program]
else: else:
project_name = basename(normpath(realpath(source_dir[0]))) project_name = basename(normpath(realpath(source_dir[0])))
if zip:
src_paths = {path.strip(".\\/"): [path] for path in source_dir}
else:
src_paths = {relpath(path, project_dir): [path] for path in source_dir} src_paths = {relpath(path, project_dir): [path] for path in source_dir}
lib_paths = None lib_paths = None
else: else:
@ -124,6 +128,7 @@ def export(target, ide, build=None, src=None, macros=None, project_id=None,
project_dir, name, src, lib = setup_project( project_dir, name, src, lib = setup_project(
ide, ide,
target, target,
bool(zip_proj),
program=project_id, program=project_id,
source_dir=src, source_dir=src,
build=build, build=build,
@ -289,6 +294,13 @@ def get_args(argv):
default=None default=None
) )
parser.add_argument(
"-z",
action="store_true",
default=None,
dest="zip",
)
parser.add_argument( parser.add_argument(
"--ignore", "--ignore",
dest="ignore", dest="ignore",
@ -352,7 +364,7 @@ def main():
src=options.source_dir, src=options.source_dir,
macros=options.macros, macros=options.macros,
project_id=options.program, project_id=options.program,
zip_proj=not bool(options.source_dir), zip_proj=not bool(options.source_dir) or options.zip,
build_profile=profile, build_profile=profile,
app_config=options.app_config, app_config=options.app_config,
export_path=options.build_dir, export_path=options.build_dir,

542
tools/resources/__init__.py Normal file
View File

@ -0,0 +1,542 @@
# mbed SDK
# Copyright (c) 2011-2013 ARM Limited
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
# The scanning rules and Resources object.
A project in Mbed OS contains metadata in the file system as directory names.
These directory names adhere to a set of rules referred to as scanning rules.
The following are the English version of the scanning rules:
Directory names starting with "TEST_", "TARGET_", "TOOLCHAIN_" and "FEATURE_"
are excluded from a build unless one of the following is true:
* The suffix after "TARGET_" is a target label (see target.labels).
* The suffix after "TOOLCHAIN_" is a toolchain label, defined by the
inheritance hierarchy of the toolchain class.
* The suffix after "FEATURE_" is a member of `target.features`.
"""
from __future__ import print_function, division, absolute_import
import fnmatch
import re
from collections import namedtuple, defaultdict
from copy import copy
from itertools import chain
from os import walk, sep
from os.path import (join, splitext, dirname, relpath, basename, split, normcase,
abspath, exists)
# Support legacy build conventions: the original mbed build system did not have
# standard labels for the "TARGET_" and "TOOLCHAIN_" specific directories, but
# had the knowledge of a list of these directories to be ignored.
LEGACY_IGNORE_DIRS = set([
# Legacy Targets
'LPC11U24',
'LPC1768',
'LPC2368',
'LPC4088',
'LPC812',
'KL25Z',
# Legacy Toolchains
'ARM',
'uARM',
'IAR',
'GCC_ARM',
'GCC_CS',
'GCC_CR',
'GCC_CW',
'GCC_CW_EWL',
'GCC_CW_NEWLIB',
'ARMC6',
# Tests, here for simplicity
'TESTS',
])
LEGACY_TOOLCHAIN_NAMES = {
'ARM_STD':'ARM',
'ARM_MICRO': 'uARM',
'GCC_ARM': 'GCC_ARM',
'GCC_CR': 'GCC_CR',
'IAR': 'IAR',
'ARMC6': 'ARMC6',
}
FileRef = namedtuple("FileRef", "name path")
class FileType(object):
C_SRC = "c"
CPP_SRC = "c++"
ASM_SRC = "s"
HEADER = "header"
INC_DIR = "inc"
LIB_DIR = "libdir"
LIB = "lib"
OBJECT = "o"
HEX = "hex"
BIN = "bin"
JSON = "json"
LD_SCRIPT = "ld"
LIB_REF = "libref"
BLD_REF = "bldref"
REPO_DIR = "repodir"
def __init__(self):
raise NotImplemented
class Resources(object):
ALL_FILE_TYPES = [
FileType.C_SRC,
FileType.CPP_SRC,
FileType.ASM_SRC,
FileType.HEADER,
FileType.INC_DIR,
FileType.LIB_DIR,
FileType.LIB,
FileType.OBJECT,
FileType.HEX,
FileType.BIN,
FileType.JSON,
FileType.LD_SCRIPT,
FileType.LIB_REF,
FileType.BLD_REF,
FileType.REPO_DIR,
]
def __init__(self, notify, collect_ignores=False):
# publicly accessible things
self.ignored_dirs = []
# Pre-mbed 2.0 ignore dirs
self._legacy_ignore_dirs = (LEGACY_IGNORE_DIRS)
# Primate parameters
self._notify = notify
self._collect_ignores = collect_ignores
# Storage for file references, indexed by file type
self._file_refs = defaultdict(set)
# Incremental scan related
self._label_paths = []
self._labels = {"TARGET": [], "TOOLCHAIN": [], "FEATURE": []}
# Should we convert all paths to unix-style?
self._win_to_unix = False
# Ignore patterns from .mbedignore files and add_ignore_patters
self._ignore_patterns = []
self._ignore_regex = re.compile("$^")
def ignore_dir(self, directory):
if self._collect_ignores:
self.ignored_dirs.append(directory)
def _collect_duplicates(self, dupe_dict, dupe_headers):
for filename in self.s_sources + self.c_sources + self.cpp_sources:
objname, _ = splitext(basename(filename))
dupe_dict.setdefault(objname, set())
dupe_dict[objname] |= set([filename])
for filename in self.headers:
headername = basename(filename)
dupe_headers.setdefault(headername, set())
dupe_headers[headername] |= set([headername])
return dupe_dict, dupe_headers
def detect_duplicates(self):
"""Detect all potential ambiguities in filenames and report them with
a toolchain notification
"""
count = 0
dupe_dict, dupe_headers = self._collect_duplicates(dict(), dict())
for objname, filenames in dupe_dict.items():
if len(filenames) > 1:
count+=1
self._notify.tool_error(
"Object file %s.o is not unique! It could be made from: %s"\
% (objname, " ".join(filenames)))
for headername, locations in dupe_headers.items():
if len(locations) > 1:
count+=1
self._notify.tool_error(
"Header file %s is not unique! It could be: %s" %\
(headername, " ".join(locations)))
return count
def win_to_unix(self):
self._win_to_unix = True
for file_type in self.ALL_FILE_TYPES:
v = [f._replace(name=f.name.replace('\\', '/')) for
f in self.get_file_refs(file_type)]
self._file_refs[file_type] = v
def __str__(self):
s = []
for (label, file_type) in (
('Include Directories', FileType.INC_DIR),
('Headers', FileType.HEADER),
('Assembly sources', FileType.ASM_SRC),
('C sources', FileType.C_SRC),
('C++ sources', FileType.CPP_SRC),
('Library directories', FileType.LIB_DIR),
('Objects', FileType.OBJECT),
('Libraries', FileType.LIB),
('Hex files', FileType.HEX),
('Bin files', FileType.BIN),
('Linker script', FileType.LD_SCRIPT)
):
resources = self.get_file_refs(file_type)
if resources:
s.append('%s:\n ' % label + '\n '.join(
"%s -> %s" % (name, path) for name, path in resources))
return '\n'.join(s)
def _add_labels(self, prefix, labels):
self._labels[prefix].extend(labels)
prefixed_labels = set("%s_%s" % (prefix, label) for label in labels)
for path, base_path, into_path in self._label_paths:
if basename(path) in prefixed_labels:
self.add_directory(path, base_path, into_path)
self._label_paths = [(p, b, i) for p, b, i in self._label_paths
if basename(p) not in prefixed_labels]
def add_target_labels(self, target):
self._add_labels("TARGET", target.labels)
def add_features(self, features):
self._add_labels("FEATURE", features)
def add_toolchain_labels(self, toolchain):
for prefix, value in toolchain.get_labels().items():
self._add_labels(prefix, value)
self._legacy_ignore_dirs -= set(
[toolchain.target.name, LEGACY_TOOLCHAIN_NAMES[toolchain.name]])
def is_ignored(self, file_path):
"""Check if file path is ignored by any .mbedignore thus far"""
return self._ignore_regex.match(normcase(file_path))
def add_ignore_patterns(self, root, base_path, patterns):
"""Add a series of patterns to the ignored paths
Positional arguments:
root - the directory containing the ignore file
base_path - the location that the scan started from
patterns - the list of patterns we will ignore in the future
"""
real_base = relpath(root, base_path)
if real_base == ".":
self._ignore_patterns.extend(normcase(p) for p in patterns)
else:
self._ignore_patterns.extend(
normcase(join(real_base, pat)) for pat in patterns)
if self._ignore_patterns:
self._ignore_regex = re.compile("|".join(
fnmatch.translate(p) for p in self._ignore_patterns))
def _not_current_label(self, dirname, label_type):
return (dirname.startswith(label_type + "_") and
dirname[len(label_type) + 1:] not in self._labels[label_type])
def add_file_ref(self, file_type, file_name, file_path):
if self._win_to_unix:
ref = FileRef(file_name.replace("\\", "/"), file_path)
else:
ref = FileRef(file_name, file_path)
self._file_refs[file_type].add(ref)
def get_file_refs(self, file_type):
"""Return a list of FileRef for every file of the given type"""
return list(self._file_refs[file_type])
@staticmethod
def _all_parents(files):
for name in files:
components = name.split(sep)
for n in range(1, len(components)):
parent = join(*components[:n])
yield parent
def _get_from_refs(self, file_type, key):
if file_type is FileType.INC_DIR:
parents = set(self._all_parents(self._get_from_refs(
FileType.HEADER, key)))
parents.add(".")
else:
parents = set()
return sorted(
list(parents) + [key(f) for f in self.get_file_refs(file_type)]
)
def get_file_names(self, file_type):
return self._get_from_refs(file_type, lambda f: f.name)
def get_file_paths(self, file_type):
return self._get_from_refs(file_type, lambda f: f.path)
def add_files_to_type(self, file_type, files):
for f in files:
self.add_file_ref(file_type, f, f)
@property
def inc_dirs(self):
return self.get_file_names(FileType.INC_DIR)
@property
def headers(self):
return self.get_file_names(FileType.HEADER)
@property
def s_sources(self):
return self.get_file_names(FileType.ASM_SRC)
@property
def c_sources(self):
return self.get_file_names(FileType.C_SRC)
@property
def cpp_sources(self):
return self.get_file_names(FileType.CPP_SRC)
@property
def lib_dirs(self):
return self.get_file_names(FileType.LIB_DIR)
@property
def objects(self):
return self.get_file_names(FileType.OBJECT)
@property
def libraries(self):
return self.get_file_names(FileType.LIB)
@property
def lib_builds(self):
return self.get_file_names(FileType.BLD_REF)
@property
def lib_refs(self):
return self.get_file_names(FileType.LIB_REF)
@property
def linker_script(self):
options = self.get_file_names(FileType.LD_SCRIPT)
if options:
return options[-1]
else:
return None
@property
def hex_files(self):
return self.get_file_names(FileType.HEX)
@property
def bin_files(self):
return self.get_file_names(FileType.BIN)
@property
def json_files(self):
return self.get_file_names(FileType.JSON)
def add_directory(
self,
path,
base_path=None,
into_path=None,
exclude_paths=None,
):
""" Scan a directory and include its resources in this resources obejct
Positional arguments:
path - the path to search for resources
Keyword arguments
base_path - If this is part of an incremental scan, include the origin
directory root of the scan here
into_path - Pretend that scanned files are within the specified
directory within a project instead of using their actual path
exclude_paths - A list of paths that are to be excluded from a build
"""
self._notify.progress("scan", abspath(path))
if base_path is None:
base_path = path
if into_path is None:
into_path = path
if self._collect_ignores and path in self.ignored_dirs:
self.ignored_dirs.remove(path)
if exclude_paths:
self.add_ignore_patterns(
path, base_path, [join(e, "*") for e in exclude_paths])
for root, dirs, files in walk(path, followlinks=True):
# Check if folder contains .mbedignore
if ".mbedignore" in files:
with open (join(root,".mbedignore"), "r") as f:
lines=f.readlines()
lines = [l.strip() for l in lines
if l.strip() != "" and not l.startswith("#")]
self.add_ignore_patterns(root, base_path, lines)
root_path =join(relpath(root, base_path))
if self.is_ignored(join(root_path,"")):
self.ignore_dir(root_path)
dirs[:] = []
continue
for d in copy(dirs):
dir_path = join(root, d)
if d == '.hg' or d == '.git':
fake_path = join(into_path, relpath(dir_path, base_path))
self.add_file_ref(FileType.REPO_DIR, fake_path, dir_path)
if (any(self._not_current_label(d, t) for t
in ['TARGET', 'TOOLCHAIN', 'FEATURE'])):
self._label_paths.append((dir_path, base_path, into_path))
self.ignore_dir(dir_path)
dirs.remove(d)
elif (d.startswith('.') or d in self._legacy_ignore_dirs or
self.is_ignored(join(root_path, d, ""))):
self.ignore_dir(dir_path)
dirs.remove(d)
# Add root to include paths
root = root.rstrip("/")
for file in files:
file_path = join(root, file)
self._add_file(file_path, base_path, into_path)
_EXT = {
".c": FileType.C_SRC,
".cc": FileType.CPP_SRC,
".cpp": FileType.CPP_SRC,
".s": FileType.ASM_SRC,
".h": FileType.HEADER,
".hh": FileType.HEADER,
".hpp": FileType.HEADER,
".o": FileType.OBJECT,
".hex": FileType.HEX,
".bin": FileType.BIN,
".json": FileType.JSON,
".a": FileType.LIB,
".ar": FileType.LIB,
".sct": FileType.LD_SCRIPT,
".ld": FileType.LD_SCRIPT,
".icf": FileType.LD_SCRIPT,
".lib": FileType.LIB_REF,
".bld": FileType.BLD_REF,
}
_DIR_EXT = {
".a": FileType.LIB_DIR,
".ar": FileType.LIB_DIR,
}
def _add_file(self, file_path, base_path, into_path):
""" Add a single file into the resources object that was found by
scanning starting as base_path
"""
if (self.is_ignored(relpath(file_path, base_path)) or
basename(file_path).startswith(".")):
self.ignore_dir(relpath(file_path, base_path))
return
fake_path = join(into_path, relpath(file_path, base_path))
_, ext = splitext(file_path)
try:
file_type = self._EXT[ext.lower()]
self.add_file_ref(file_type, fake_path, file_path)
except KeyError:
pass
try:
dir_type = self._DIR_EXT[ext.lower()]
self.add_file_ref(dir_type, dirname(fake_path), dirname(file_path))
except KeyError:
pass
def scan_with_toolchain(self, src_paths, toolchain, dependencies_paths=None,
inc_dirs=None, exclude=True):
""" Scan resources using initialized toolcain
Positional arguments
src_paths - the paths to source directories
toolchain - valid toolchain object
Keyword arguments
dependencies_paths - dependency paths that we should scan for include dirs
inc_dirs - additional include directories which should be added to
the scanner resources
exclude - Exclude the toolchain's build directory from the resources
"""
self.add_toolchain_labels(toolchain)
for path in src_paths:
if exists(path):
into_path = relpath(path).strip(".\\/")
if exclude:
self.add_directory(
path,
into_path=into_path,
exclude_paths=[toolchain.build_dir]
)
else:
self.add_directory(path, into_path=into_path)
# Scan dependency paths for include dirs
if dependencies_paths is not None:
toolchain.progress("dep", dependencies_paths)
for dep in dependencies_paths:
lib_self = self.__class__(self._notify, self._collect_ignores)\
.scan_with_toolchain([dep], toolchain)
self.inc_dirs.extend(lib_self.inc_dirs)
# Add additional include directories if passed
if inc_dirs:
if isinstance(inc_dirs, list):
self.inc_dirs.extend(inc_dirs)
else:
self.inc_dirs.append(inc_dirs)
# Load self into the config system which might expand/modify self
# based on config data
toolchain.config.load_resources(self)
# Set the toolchain's configuration data
toolchain.set_config_data(toolchain.config.get_config_data())
return self
def scan_with_config(self, src_paths, config):
if config.target:
self.add_target_labels(config.target)
for path in src_paths:
if exists(path):
self.add_directory(path)
config.load_resources(self)
return self

View File

@ -18,8 +18,8 @@ limitations under the License.
import unittest import unittest
from collections import namedtuple from collections import namedtuple
from mock import patch, MagicMock from mock import patch, MagicMock
from tools.build_api import prepare_toolchain, build_project, build_library,\ from tools.build_api import prepare_toolchain, build_project, build_library
scan_resources from tools.resources import Resources
from tools.toolchains import TOOLCHAINS from tools.toolchains import TOOLCHAINS
from tools.notifier.mock import MockNotifier from tools.notifier.mock import MockNotifier
@ -67,7 +67,8 @@ class BuildApiTests(unittest.TestCase):
toolchain = prepare_toolchain(self.src_paths, self.build_path, self.target, toolchain = prepare_toolchain(self.src_paths, self.build_path, self.target,
self.toolchain_name, notify=notify) self.toolchain_name, notify=notify)
res = scan_resources(self.src_paths, toolchain) res = Resources(MockNotifier()).scan_with_toolchain(
self.src_paths, toolchain)
toolchain.RESPONSE_FILES=False toolchain.RESPONSE_FILES=False
toolchain.config_processed = True toolchain.config_processed = True
@ -116,7 +117,7 @@ class BuildApiTests(unittest.TestCase):
mock_config_init.assert_called_once_with(self.target, self.src_paths, mock_config_init.assert_called_once_with(self.target, self.src_paths,
app_config=None) app_config=None)
@patch('tools.build_api.scan_resources') @patch('tools.build_api.Resources')
@patch('tools.build_api.mkdir') @patch('tools.build_api.mkdir')
@patch('os.path.exists') @patch('os.path.exists')
@patch('tools.build_api.prepare_toolchain') @patch('tools.build_api.prepare_toolchain')
@ -127,7 +128,7 @@ class BuildApiTests(unittest.TestCase):
:param mock_prepare_toolchain: mock of function prepare_toolchain :param mock_prepare_toolchain: mock of function prepare_toolchain
:param mock_exists: mock of function os.path.exists :param mock_exists: mock of function os.path.exists
:param _: mock of function mkdir (not tested) :param _: mock of function mkdir (not tested)
:param __: mock of function scan_resources (not tested) :param __: mock of class Resources (not tested)
:return: :return:
""" """
notify = MockNotifier() notify = MockNotifier()
@ -146,7 +147,7 @@ class BuildApiTests(unittest.TestCase):
self.assertEqual(args[1]['app_config'], app_config, self.assertEqual(args[1]['app_config'], app_config,
"prepare_toolchain was called with an incorrect app_config") "prepare_toolchain was called with an incorrect app_config")
@patch('tools.build_api.scan_resources') @patch('tools.build_api.Resources')
@patch('tools.build_api.mkdir') @patch('tools.build_api.mkdir')
@patch('os.path.exists') @patch('os.path.exists')
@patch('tools.build_api.prepare_toolchain') @patch('tools.build_api.prepare_toolchain')
@ -157,7 +158,7 @@ class BuildApiTests(unittest.TestCase):
:param mock_prepare_toolchain: mock of function prepare_toolchain :param mock_prepare_toolchain: mock of function prepare_toolchain
:param mock_exists: mock of function os.path.exists :param mock_exists: mock of function os.path.exists
:param _: mock of function mkdir (not tested) :param _: mock of function mkdir (not tested)
:param __: mock of function scan_resources (not tested) :param __: mock of class Resources (not tested)
:return: :return:
""" """
notify = MockNotifier() notify = MockNotifier()
@ -176,7 +177,7 @@ class BuildApiTests(unittest.TestCase):
self.assertEqual(args[1]['app_config'], None, self.assertEqual(args[1]['app_config'], None,
"prepare_toolchain was called with an incorrect app_config") "prepare_toolchain was called with an incorrect app_config")
@patch('tools.build_api.scan_resources') @patch('tools.build_api.Resources')
@patch('tools.build_api.mkdir') @patch('tools.build_api.mkdir')
@patch('os.path.exists') @patch('os.path.exists')
@patch('tools.build_api.prepare_toolchain') @patch('tools.build_api.prepare_toolchain')
@ -187,7 +188,7 @@ class BuildApiTests(unittest.TestCase):
:param mock_prepare_toolchain: mock of function prepare_toolchain :param mock_prepare_toolchain: mock of function prepare_toolchain
:param mock_exists: mock of function os.path.exists :param mock_exists: mock of function os.path.exists
:param _: mock of function mkdir (not tested) :param _: mock of function mkdir (not tested)
:param __: mock of function scan_resources (not tested) :param __: mock of class Resources (not tested)
:return: :return:
""" """
notify = MockNotifier() notify = MockNotifier()
@ -203,7 +204,7 @@ class BuildApiTests(unittest.TestCase):
self.assertEqual(args[1]['app_config'], app_config, self.assertEqual(args[1]['app_config'], app_config,
"prepare_toolchain was called with an incorrect app_config") "prepare_toolchain was called with an incorrect app_config")
@patch('tools.build_api.scan_resources') @patch('tools.build_api.Resources')
@patch('tools.build_api.mkdir') @patch('tools.build_api.mkdir')
@patch('os.path.exists') @patch('os.path.exists')
@patch('tools.build_api.prepare_toolchain') @patch('tools.build_api.prepare_toolchain')
@ -214,7 +215,7 @@ class BuildApiTests(unittest.TestCase):
:param mock_prepare_toolchain: mock of function prepare_toolchain :param mock_prepare_toolchain: mock of function prepare_toolchain
:param mock_exists: mock of function os.path.exists :param mock_exists: mock of function os.path.exists
:param _: mock of function mkdir (not tested) :param _: mock of function mkdir (not tested)
:param __: mock of function scan_resources (not tested) :param __: mock of class Resources (not tested)
:return: :return:
""" """
notify = MockNotifier() notify = MockNotifier()

View File

@ -33,29 +33,3 @@ def setUp(self):
self.base_dir = 'base_dir' self.base_dir = 'base_dir'
self.target = "K64F" self.target = "K64F"
self.toolchain_name = "ARM" self.toolchain_name = "ARM"
@pytest.mark.parametrize("base_dir", ["base_dir"])
@pytest.mark.parametrize("target", ["K64F"])
@pytest.mark.parametrize("toolchain_name", ["ARM"])
@pytest.mark.parametrize("app_config", ["app_config", None])
def test_find_tests_app_config(base_dir, target, toolchain_name, app_config):
"""
Test find_tests for correct use of app_config
:param base_dir: dummy value for the test base directory
:param target: the target to "test" for
:param toolchain_name: the toolchain to use for "testing"
:param app_config: Application configuration parameter to find tests
"""
set_targets_json_location()
with patch('tools.test_api.scan_resources') as mock_scan_resources,\
patch('tools.test_api.prepare_toolchain') as mock_prepare_toolchain:
mock_scan_resources().inc_dirs.return_value = []
find_tests(base_dir, target, toolchain_name, app_config=app_config)
args = mock_prepare_toolchain.call_args
assert 'app_config' in args[1],\
"prepare_toolchain was not called with app_config"
assert args[1]['app_config'] == app_config,\
"prepare_toolchain was called with an incorrect app_config"

View File

@ -11,9 +11,13 @@ ROOT = os.path.abspath(os.path.join(os.path.dirname(__file__), "..", "..",
"..")) ".."))
sys.path.insert(0, ROOT) sys.path.insert(0, ROOT)
from tools.toolchains import TOOLCHAIN_CLASSES, LEGACY_TOOLCHAIN_NAMES,\ from tools.toolchains import (
Resources, TOOLCHAIN_PATHS, mbedToolchain TOOLCHAIN_CLASSES,
from tools.targets import TARGET_MAP TOOLCHAIN_PATHS,
mbedToolchain,
)
from tools.resources import LEGACY_TOOLCHAIN_NAMES, Resources, FileType
from tools.targets import TARGET_MAP, set_targets_json_location
from tools.notifier.mock import MockNotifier from tools.notifier.mock import MockNotifier
ALPHABET = [char for char in printable if char not in [u'.', u'/', u'\\']] ALPHABET = [char for char in printable if char not in [u'.', u'/', u'\\']]
@ -21,6 +25,7 @@ ALPHABET = [char for char in printable if char not in [u'.', u'/', u'\\']]
@patch('tools.toolchains.arm.run_cmd') @patch('tools.toolchains.arm.run_cmd')
def test_arm_version_check(_run_cmd): def test_arm_version_check(_run_cmd):
set_targets_json_location()
_run_cmd.return_value = (""" _run_cmd.return_value = ("""
Product: ARM Compiler 5.06 Product: ARM Compiler 5.06
Component: ARM Compiler 5.06 update 5 (build 528) Component: ARM Compiler 5.06 update 5 (build 528)
@ -48,6 +53,7 @@ def test_arm_version_check(_run_cmd):
@patch('tools.toolchains.iar.run_cmd') @patch('tools.toolchains.iar.run_cmd')
def test_iar_version_check(_run_cmd): def test_iar_version_check(_run_cmd):
set_targets_json_location()
_run_cmd.return_value = (""" _run_cmd.return_value = ("""
IAR ANSI C/C++ Compiler V7.80.1.28/LNX for ARM IAR ANSI C/C++ Compiler V7.80.1.28/LNX for ARM
""", "", 0) """, "", 0)
@ -69,6 +75,7 @@ def test_iar_version_check(_run_cmd):
@patch('tools.toolchains.gcc.run_cmd') @patch('tools.toolchains.gcc.run_cmd')
def test_gcc_version_check(_run_cmd): def test_gcc_version_check(_run_cmd):
set_targets_json_location()
_run_cmd.return_value = (""" _run_cmd.return_value = ("""
arm-none-eabi-gcc (Arch Repository) 6.4.4 arm-none-eabi-gcc (Arch Repository) 6.4.4
Copyright (C) 2018 Free Software Foundation, Inc. Copyright (C) 2018 Free Software Foundation, Inc.
@ -111,6 +118,7 @@ def test_toolchain_profile_c(profile, source_file):
filename = deepcopy(source_file) filename = deepcopy(source_file)
filename[-1] += ".c" filename[-1] += ".c"
to_compile = os.path.join(*filename) to_compile = os.path.join(*filename)
set_targets_json_location()
with patch('os.mkdir') as _mkdir: with patch('os.mkdir') as _mkdir:
for _, tc_class in TOOLCHAIN_CLASSES.items(): for _, tc_class in TOOLCHAIN_CLASSES.items():
toolchain = tc_class(TARGET_MAP["K64F"], build_profile=profile, toolchain = tc_class(TARGET_MAP["K64F"], build_profile=profile,
@ -241,12 +249,11 @@ def test_detect_duplicates(filenames):
s_sources = [os.path.join(name, "dupe.s") for name in filenames] s_sources = [os.path.join(name, "dupe.s") for name in filenames]
cpp_sources = [os.path.join(name, "dupe.cpp") for name in filenames] cpp_sources = [os.path.join(name, "dupe.cpp") for name in filenames]
notify = MockNotifier() notify = MockNotifier()
toolchain = TOOLCHAIN_CLASSES["ARM"](TARGET_MAP["K64F"], notify=notify) res = Resources(notify)
res = Resources() res.add_files_to_type(FileType.C_SRC, c_sources)
res.c_sources = c_sources res.add_files_to_type(FileType.ASM_SRC, s_sources)
res.s_sources = s_sources res.add_files_to_type(FileType.CPP_SRC, cpp_sources)
res.cpp_sources = cpp_sources assert res.detect_duplicates() == 1,\
assert res.detect_duplicates(toolchain) == 1,\
"Not Enough duplicates found" "Not Enough duplicates found"
notification = notify.messages[0] notification = notify.messages[0]

View File

@ -40,7 +40,7 @@ try:
from Queue import Queue, Empty from Queue import Queue, Empty
except ImportError: except ImportError:
from queue import Queue, Empty from queue import Queue, Empty
from os.path import join, exists, basename, relpath from os.path import join, exists, basename, relpath, isdir
from threading import Thread, Lock from threading import Thread, Lock
from multiprocessing import Pool, cpu_count from multiprocessing import Pool, cpu_count
from subprocess import Popen, PIPE from subprocess import Popen, PIPE
@ -65,8 +65,8 @@ from tools.build_api import prep_properties
from tools.build_api import create_result from tools.build_api import create_result
from tools.build_api import add_result_to_report from tools.build_api import add_result_to_report
from tools.build_api import prepare_toolchain from tools.build_api import prepare_toolchain
from tools.build_api import scan_resources
from tools.build_api import get_config from tools.build_api import get_config
from tools.resources import Resources
from tools.libraries import LIBRARIES, LIBRARY_MAP from tools.libraries import LIBRARIES, LIBRARY_MAP
from tools.options import extract_profile from tools.options import extract_profile
from tools.toolchains import TOOLCHAIN_PATHS from tools.toolchains import TOOLCHAIN_PATHS
@ -2082,52 +2082,34 @@ def find_tests(base_dir, target_name, toolchain_name, app_config=None):
# List of common folders: (predicate function, path) tuple # List of common folders: (predicate function, path) tuple
commons = [] commons = []
# Prepare the toolchain
toolchain = prepare_toolchain([base_dir], None, target_name, toolchain_name,
app_config=app_config)
# Scan the directory for paths to probe for 'TESTS' folders # Scan the directory for paths to probe for 'TESTS' folders
base_resources = scan_resources([base_dir], toolchain) base_resources = Resources(MockNotifier(), collect_ignores=True)
base_resources.add_directory(base_dir)
dirs = base_resources.inc_dirs dirs = [d for d in base_resources.ignored_dirs if basename(d) == 'TESTS']
for directory in dirs: for directory in dirs:
subdirs = os.listdir(directory) for test_group_directory in os.listdir(directory):
grp_dir = join(directory, test_group_directory)
# If the directory contains a subdirectory called 'TESTS', scan it for test cases if not isdir(grp_dir):
if 'TESTS' in subdirs: continue
walk_base_dir = join(directory, 'TESTS') for test_case_directory in os.listdir(grp_dir):
test_resources = toolchain.scan_resources(walk_base_dir, base_path=base_dir) d = join(directory, test_group_directory, test_case_directory)
if not isdir(d):
# Loop through all subdirectories continue
for d in test_resources.inc_dirs:
# If the test case folder is not called 'host_tests' or 'COMMON' and it is
# located two folders down from the main 'TESTS' folder (ex. TESTS/testgroup/testcase)
# then add it to the tests
relative_path = relpath(d, walk_base_dir)
relative_path_parts = os.path.normpath(relative_path).split(os.sep)
if len(relative_path_parts) == 2:
test_group_directory_path, test_case_directory = os.path.split(d)
test_group_directory = os.path.basename(test_group_directory_path)
# Check to make sure discoverd folder is not in a host test directory or common directory
special_dirs = ['host_tests', 'COMMON'] special_dirs = ['host_tests', 'COMMON']
if test_group_directory not in special_dirs and test_case_directory not in special_dirs: if test_group_directory not in special_dirs and test_case_directory not in special_dirs:
test_name = test_path_to_name(d, base_dir) test_name = test_path_to_name(d, base_dir)
tests[(test_name, walk_base_dir, test_group_directory, test_case_directory)] = [d] tests[(test_name, directory, test_group_directory, test_case_directory)] = [d]
if test_case_directory == 'COMMON':
# Also find any COMMON paths, we'll add these later once we find all the base tests
if 'COMMON' in relative_path_parts:
if relative_path_parts[0] != 'COMMON':
def predicate(base_pred, group_pred, name_base_group_case): def predicate(base_pred, group_pred, name_base_group_case):
(name, base, group, case) = name_base_group_case (name, base, group, case) = name_base_group_case
return base == base_pred and group == group_pred return base == base_pred and group == group_pred
commons.append((functools.partial(predicate, walk_base_dir, relative_path_parts[0]), d)) commons.append((functools.partial(predicate, directory, test_group_directory), d))
else: if test_group_directory == 'COMMON':
def predicate(base_pred, name_base_group_case): def predicate(base_pred, name_base_group_case):
(name, base, group, case) = name_base_group_case (name, base, group, case) = name_base_group_case
return base == base_pred return base == base_pred
commons.append((functools.partial(predicate, walk_base_dir), d)) commons.append((functools.partial(predicate, directory), grp_dir))
# Apply common directories # Apply common directories
for pred, path in commons: for pred, path in commons:
@ -2230,7 +2212,7 @@ def build_tests(tests, base_source_paths, build_path, target, toolchain_name,
else: else:
target_name = target target_name = target
target = TARGET_MAP[target_name] target = TARGET_MAP[target_name]
cfg, _, _ = get_config(base_source_paths, target_name, toolchain_name, app_config=app_config) cfg, _, _ = get_config(base_source_paths, target, app_config=app_config)
baud_rate = 9600 baud_rate = 9600
if 'platform.stdio-baud-rate' in cfg: if 'platform.stdio-baud-rate' in cfg:

View File

@ -25,7 +25,6 @@ from time import time, sleep
from shutil import copyfile from shutil import copyfile
from os.path import (join, splitext, exists, relpath, dirname, basename, split, from os.path import (join, splitext, exists, relpath, dirname, basename, split,
abspath, isfile, isdir, normcase) abspath, isfile, isdir, normcase)
from itertools import chain
from inspect import getmro from inspect import getmro
from copy import deepcopy from copy import deepcopy
from collections import namedtuple from collections import namedtuple
@ -33,13 +32,13 @@ from abc import ABCMeta, abstractmethod
from distutils.spawn import find_executable from distutils.spawn import find_executable
from multiprocessing import Pool, cpu_count from multiprocessing import Pool, cpu_count
from hashlib import md5 from hashlib import md5
import fnmatch
from ..utils import (run_cmd, mkdir, rel_path, ToolException, from ..utils import (run_cmd, mkdir, rel_path, ToolException,
NotSupportedException, split_path, compile_worker) NotSupportedException, split_path, compile_worker)
from ..settings import MBED_ORG_USER, PRINT_COMPILER_OUTPUT_AS_LINK from ..settings import MBED_ORG_USER, PRINT_COMPILER_OUTPUT_AS_LINK
from .. import hooks from .. import hooks
from ..notifier.term import TerminalNotifier from ..notifier.term import TerminalNotifier
from ..resources import FileType
from ..memap import MemapParser from ..memap import MemapParser
from ..config import ConfigException from ..config import ConfigException
@ -48,314 +47,6 @@ from ..config import ConfigException
CPU_COUNT_MIN = 1 CPU_COUNT_MIN = 1
CPU_COEF = 1 CPU_COEF = 1
class LazyDict(object):
def __init__(self):
self.eager = {}
self.lazy = {}
def add_lazy(self, key, thunk):
if key in self.eager:
del self.eager[key]
self.lazy[key] = thunk
def __getitem__(self, key):
if (key not in self.eager
and key in self.lazy):
self.eager[key] = self.lazy[key]()
del self.lazy[key]
return self.eager[key]
def __setitem__(self, key, value):
self.eager[key] = value
def __delitem__(self, key):
if key in self.eager:
del self.eager[key]
else:
del self.lazy[key]
def __contains__(self, key):
return key in self.eager or key in self.lazy
def __iter__(self):
return chain(iter(self.eager), iter(self.lazy))
def __len__(self):
return len(self.eager) + len(self.lazy)
def __str__(self):
return "Lazy{%s}" % (
", ".join("%r: %r" % (k, v) for k, v in
chain(self.eager.items(), ((k, "not evaluated")
for k in self.lazy))))
def update(self, other):
if isinstance(other, LazyDict):
self.eager.update(other.eager)
self.lazy.update(other.lazy)
else:
self.eager.update(other)
def items(self):
"""Warning: This forces the evaluation all of the items in this LazyDict
that are iterated over."""
for k, v in self.eager.items():
yield k, v
for k in self.lazy.keys():
yield k, self[k]
def apply(self, fn):
"""Delay the application of a computation to all items of the lazy dict.
Does no computation now. Instead the comuptation is performed when a
consumer attempts to access a value in this LazyDict"""
new_lazy = {}
for k, f in self.lazy.items():
def closure(f=f):
return fn(f())
new_lazy[k] = closure
for k, v in self.eager.items():
def closure(v=v):
return fn(v)
new_lazy[k] = closure
self.lazy = new_lazy
self.eager = {}
class Resources:
def __init__(self, base_path=None, collect_ignores=False):
self.base_path = base_path
self.collect_ignores = collect_ignores
self.file_basepath = {}
self.inc_dirs = []
self.headers = []
self.s_sources = []
self.c_sources = []
self.cpp_sources = []
self.lib_dirs = set([])
self.objects = []
self.libraries = []
# mbed special files
self.lib_builds = []
self.lib_refs = []
self.repo_dirs = []
self.repo_files = []
self.linker_script = None
# Other files
self.hex_files = []
self.bin_files = []
self.json_files = []
# Features
self.features = LazyDict()
self.ignored_dirs = []
def __add__(self, resources):
if resources is None:
return self
else:
return self.add(resources)
def __radd__(self, resources):
if resources is None:
return self
else:
return self.add(resources)
def ignore_dir(self, directory):
if self.collect_ignores:
self.ignored_dirs.append(directory)
def add(self, resources):
self.file_basepath.update(resources.file_basepath)
self.inc_dirs += resources.inc_dirs
self.headers += resources.headers
self.s_sources += resources.s_sources
self.c_sources += resources.c_sources
self.cpp_sources += resources.cpp_sources
self.lib_dirs |= resources.lib_dirs
self.objects += resources.objects
self.libraries += resources.libraries
self.lib_builds += resources.lib_builds
self.lib_refs += resources.lib_refs
self.repo_dirs += resources.repo_dirs
self.repo_files += resources.repo_files
if resources.linker_script is not None:
self.linker_script = resources.linker_script
self.hex_files += resources.hex_files
self.bin_files += resources.bin_files
self.json_files += resources.json_files
self.features.update(resources.features)
self.ignored_dirs += resources.ignored_dirs
return self
def rewrite_basepath(self, file_name, export_path, loc):
""" Replace the basepath of filename with export_path
Positional arguments:
file_name - the absolute path to a file
export_path - the final destination of the file after export
"""
new_f = join(loc, relpath(file_name, self.file_basepath[file_name]))
self.file_basepath[new_f] = export_path
return new_f
def subtract_basepath(self, export_path, loc=""):
""" Rewrite all of the basepaths with the export_path
Positional arguments:
export_path - the final destination of the resources with respect to the
generated project files
"""
keys = ['s_sources', 'c_sources', 'cpp_sources', 'hex_files',
'objects', 'libraries', 'inc_dirs', 'headers', 'linker_script',
'lib_dirs']
for key in keys:
vals = getattr(self, key)
if isinstance(vals, set):
vals = list(vals)
if isinstance(vals, list):
new_vals = []
for val in vals:
new_vals.append(self.rewrite_basepath(
val, export_path, loc))
if isinstance(getattr(self, key), set):
setattr(self, key, set(new_vals))
else:
setattr(self, key, new_vals)
elif vals:
setattr(self, key, self.rewrite_basepath(
vals, export_path, loc))
def closure(res, export_path=export_path, loc=loc):
res.subtract_basepath(export_path, loc)
return res
self.features.apply(closure)
def _collect_duplicates(self, dupe_dict, dupe_headers):
for filename in self.s_sources + self.c_sources + self.cpp_sources:
objname, _ = splitext(basename(filename))
dupe_dict.setdefault(objname, set())
dupe_dict[objname] |= set([filename])
for filename in self.headers:
headername = basename(filename)
dupe_headers.setdefault(headername, set())
dupe_headers[headername] |= set([headername])
return dupe_dict, dupe_headers
def detect_duplicates(self, toolchain):
"""Detect all potential ambiguities in filenames and report them with
a toolchain notification
Positional Arguments:
toolchain - used for notifications
"""
count = 0
dupe_dict, dupe_headers = self._collect_duplicates(dict(), dict())
for objname, filenames in dupe_dict.items():
if len(filenames) > 1:
count+=1
toolchain.notify.tool_error(
"Object file %s.o is not unique! It could be made from: %s"\
% (objname, " ".join(filenames)))
for headername, locations in dupe_headers.items():
if len(locations) > 1:
count+=1
toolchain.notify.tool_error(
"Header file %s is not unique! It could be: %s" %\
(headername, " ".join(locations)))
return count
def relative_to(self, base, dot=False):
for field in ['inc_dirs', 'headers', 's_sources', 'c_sources',
'cpp_sources', 'lib_dirs', 'objects', 'libraries',
'lib_builds', 'lib_refs', 'repo_dirs', 'repo_files',
'hex_files', 'bin_files', 'json_files']:
v = [rel_path(f, base, dot) for f in getattr(self, field)]
setattr(self, field, v)
def to_apply(feature, base=base, dot=dot):
feature.relative_to(base, dot)
self.features.apply(to_apply)
if self.linker_script is not None:
self.linker_script = rel_path(self.linker_script, base, dot)
def win_to_unix(self):
for field in ['inc_dirs', 'headers', 's_sources', 'c_sources',
'cpp_sources', 'lib_dirs', 'objects', 'libraries',
'lib_builds', 'lib_refs', 'repo_dirs', 'repo_files',
'hex_files', 'bin_files', 'json_files']:
v = [f.replace('\\', '/') for f in getattr(self, field)]
setattr(self, field, v)
def to_apply(feature):
feature.win_to_unix()
self.features.apply(to_apply)
if self.linker_script is not None:
self.linker_script = self.linker_script.replace('\\', '/')
def __str__(self):
s = []
for (label, resources) in (
('Include Directories', self.inc_dirs),
('Headers', self.headers),
('Assembly sources', self.s_sources),
('C sources', self.c_sources),
('C++ sources', self.cpp_sources),
('Library directories', self.lib_dirs),
('Objects', self.objects),
('Libraries', self.libraries),
('Hex files', self.hex_files),
('Bin files', self.bin_files),
('Features', self.features),
):
if resources:
s.append('%s:\n ' % label + '\n '.join(resources))
if self.linker_script:
s.append('Linker Script: ' + self.linker_script)
return '\n'.join(s)
# Support legacy build conventions: the original mbed build system did not have
# standard labels for the "TARGET_" and "TOOLCHAIN_" specific directories, but
# had the knowledge of a list of these directories to be ignored.
LEGACY_IGNORE_DIRS = set([
'LPC11U24', 'LPC1768', 'LPC2368', 'LPC4088', 'LPC812', 'KL25Z',
'ARM', 'uARM', 'IAR',
'GCC_ARM', 'GCC_CS', 'GCC_CR', 'GCC_CW', 'GCC_CW_EWL', 'GCC_CW_NEWLIB',
'ARMC6'
])
LEGACY_TOOLCHAIN_NAMES = {
'ARM_STD':'ARM', 'ARM_MICRO': 'uARM',
'GCC_ARM': 'GCC_ARM', 'GCC_CR': 'GCC_CR',
'IAR': 'IAR',
'ARMC6': 'ARMC6',
}
class mbedToolchain: class mbedToolchain:
# Verbose logging # Verbose logging
VERBOSE = True VERBOSE = True
@ -440,12 +131,6 @@ class mbedToolchain:
# Number of concurrent build jobs. 0 means auto (based on host system cores) # Number of concurrent build jobs. 0 means auto (based on host system cores)
self.jobs = 0 self.jobs = 0
# Ignore patterns from .mbedignore files
self.ignore_patterns = []
self._ignore_regex = re.compile("$^")
# Pre-mbed 2.0 ignore dirs
self.legacy_ignore_dirs = (LEGACY_IGNORE_DIRS | TOOLCHAINS) - set([target.name, LEGACY_TOOLCHAIN_NAMES[self.name]])
# Output notify function # Output notify function
# This function is passed all events, and expected to handle notification of the # This function is passed all events, and expected to handle notification of the
@ -584,185 +269,6 @@ class mbedToolchain:
return False return False
def is_ignored(self, file_path):
"""Check if file path is ignored by any .mbedignore thus far"""
return self._ignore_regex.match(normcase(file_path))
def add_ignore_patterns(self, root, base_path, patterns):
"""Add a series of patterns to the ignored paths
Positional arguments:
root - the directory containing the ignore file
base_path - the location that the scan started from
patterns - the list of patterns we will ignore in the future
"""
real_base = relpath(root, base_path)
if real_base == ".":
self.ignore_patterns.extend(normcase(p) for p in patterns)
else:
self.ignore_patterns.extend(normcase(join(real_base, pat)) for pat in patterns)
if self.ignore_patterns:
self._ignore_regex = re.compile("|".join(fnmatch.translate(p) for p in self.ignore_patterns))
# Create a Resources object from the path pointed to by *path* by either traversing a
# a directory structure, when *path* is a directory, or adding *path* to the resources,
# when *path* is a file.
# The parameter *base_path* is used to set the base_path attribute of the Resources
# object and the parameter *exclude_paths* is used by the directory traversal to
# exclude certain paths from the traversal.
def scan_resources(self, path, exclude_paths=None, base_path=None,
collect_ignores=False):
self.progress("scan", path)
resources = Resources(path, collect_ignores=collect_ignores)
if not base_path:
if isfile(path):
base_path = dirname(path)
else:
base_path = path
resources.base_path = base_path
if isfile(path):
self._add_file(path, resources, base_path, exclude_paths=exclude_paths)
else:
self._add_dir(path, resources, base_path, exclude_paths=exclude_paths)
return resources
# A helper function for scan_resources. _add_dir traverses *path* (assumed to be a
# directory) and heeds the ".mbedignore" files along the way. _add_dir calls _add_file
# on every file it considers adding to the resources object.
def _add_dir(self, path, resources, base_path, exclude_paths=None):
""" os.walk(top[, topdown=True[, onerror=None[, followlinks=False]]])
When topdown is True, the caller can modify the dirnames list in-place
(perhaps using del or slice assignment), and walk() will only recurse into
the subdirectories whose names remain in dirnames; this can be used to prune
the search, impose a specific order of visiting, or even to inform walk()
about directories the caller creates or renames before it resumes walk()
again. Modifying dirnames when topdown is False is ineffective, because in
bottom-up mode the directories in dirnames are generated before dirpath
itself is generated.
"""
labels = self.get_labels()
for root, dirs, files in walk(path, followlinks=True):
# Check if folder contains .mbedignore
if ".mbedignore" in files:
with open (join(root,".mbedignore"), "r") as f:
lines=f.readlines()
lines = [l.strip() for l in lines] # Strip whitespaces
lines = [l for l in lines if l != ""] # Strip empty lines
lines = [l for l in lines if not re.match("^#",l)] # Strip comment lines
# Append root path to glob patterns and append patterns to ignore_patterns
self.add_ignore_patterns(root, base_path, lines)
# Skip the whole folder if ignored, e.g. .mbedignore containing '*'
root_path =join(relpath(root, base_path))
if (self.is_ignored(join(root_path,"")) or
self.build_dir == root_path):
resources.ignore_dir(root_path)
dirs[:] = []
continue
for d in copy(dirs):
dir_path = join(root, d)
# Add internal repo folders/files. This is needed for exporters
if d == '.hg' or d == '.git':
resources.repo_dirs.append(dir_path)
if ((d.startswith('.') or d in self.legacy_ignore_dirs) or
# Ignore targets that do not match the TARGET in extra_labels list
(d.startswith('TARGET_') and d[7:] not in labels['TARGET']) or
# Ignore toolchain that do not match the current TOOLCHAIN
(d.startswith('TOOLCHAIN_') and d[10:] not in labels['TOOLCHAIN']) or
# Ignore .mbedignore files
self.is_ignored(join(relpath(root, base_path), d,"")) or
# Ignore TESTS dir
(d == 'TESTS')):
resources.ignore_dir(dir_path)
dirs.remove(d)
elif d.startswith('FEATURE_'):
# Recursively scan features but ignore them in the current scan.
# These are dynamically added by the config system if the conditions are matched
def closure (dir_path=dir_path, base_path=base_path):
return self.scan_resources(dir_path, base_path=base_path,
collect_ignores=resources.collect_ignores)
resources.features.add_lazy(d[8:], closure)
resources.ignore_dir(dir_path)
dirs.remove(d)
elif exclude_paths:
for exclude_path in exclude_paths:
rel_path = relpath(dir_path, exclude_path)
if not (rel_path.startswith('..')):
resources.ignore_dir(dir_path)
dirs.remove(d)
break
# Add root to include paths
root = root.rstrip("/")
resources.inc_dirs.append(root)
resources.file_basepath[root] = base_path
for file in files:
file_path = join(root, file)
self._add_file(file_path, resources, base_path)
# A helper function for both scan_resources and _add_dir. _add_file adds one file
# (*file_path*) to the resources object based on the file type.
def _add_file(self, file_path, resources, base_path, exclude_paths=None):
if (self.is_ignored(relpath(file_path, base_path)) or
basename(file_path).startswith(".")):
resources.ignore_dir(relpath(file_path, base_path))
return
resources.file_basepath[file_path] = base_path
_, ext = splitext(file_path)
ext = ext.lower()
if ext == '.s':
resources.s_sources.append(file_path)
elif ext == '.c':
resources.c_sources.append(file_path)
elif ext == '.cpp' or ext == '.cc':
resources.cpp_sources.append(file_path)
elif ext == '.h' or ext == '.hpp' or ext == '.hh':
resources.headers.append(file_path)
elif ext == '.o':
resources.objects.append(file_path)
elif ext == self.LIBRARY_EXT:
resources.libraries.append(file_path)
resources.lib_dirs.add(dirname(file_path))
elif ext == self.LINKER_EXT:
if resources.linker_script is not None:
self.notify.info("Warning: Multiple linker scripts detected: %s -> %s" % (resources.linker_script, file_path))
resources.linker_script = file_path
elif ext == '.lib':
resources.lib_refs.append(file_path)
elif ext == '.bld':
resources.lib_builds.append(file_path)
elif basename(file_path) == '.hgignore':
resources.repo_files.append(file_path)
elif basename(file_path) == '.gitignore':
resources.repo_files.append(file_path)
elif ext == '.hex':
resources.hex_files.append(file_path)
elif ext == '.bin':
resources.bin_files.append(file_path)
elif ext == '.json':
resources.json_files.append(file_path)
def scan_repository(self, path): def scan_repository(self, path):
resources = [] resources = []
@ -779,36 +285,24 @@ class mbedToolchain:
return resources return resources
def copy_files(self, files_paths, trg_path, resources=None, rel_path=None): def copy_files(self, files_paths, trg_path, resources=None):
# Handle a single file # Handle a single file
if not isinstance(files_paths, list): if not isinstance(files_paths, list):
files_paths = [files_paths] files_paths = [files_paths]
for source in files_paths: for dest, source in files_paths:
if source is None: target = join(trg_path, dest)
files_paths.remove(source)
for source in files_paths:
if resources is not None and source in resources.file_basepath:
relative_path = relpath(source, resources.file_basepath[source])
elif rel_path is not None:
relative_path = relpath(source, rel_path)
else:
_, relative_path = split(source)
target = join(trg_path, relative_path)
if (target != source) and (self.need_update(target, [source])): if (target != source) and (self.need_update(target, [source])):
self.progress("copy", relative_path) self.progress("copy", dest)
mkdir(dirname(target)) mkdir(dirname(target))
copyfile(source, target) copyfile(source, target)
# THIS METHOD IS BEING OVERRIDDEN BY THE MBED ONLINE BUILD SYSTEM # THIS METHOD IS BEING OVERRIDDEN BY THE MBED ONLINE BUILD SYSTEM
# ANY CHANGE OF PARAMETERS OR RETURN VALUES WILL BREAK COMPATIBILITY # ANY CHANGE OF PARAMETERS OR RETURN VALUES WILL BREAK COMPATIBILITY
def relative_object_path(self, build_path, base_dir, source): def relative_object_path(self, build_path, file_ref):
source_dir, name, _ = split_path(source) source_dir, name, _ = split_path(file_ref.name)
obj_dir = relpath(join(build_path, relpath(source_dir, base_dir))) obj_dir = relpath(join(build_path, source_dir))
if obj_dir is not self.prev_dir: if obj_dir is not self.prev_dir:
self.prev_dir = obj_dir self.prev_dir = obj_dir
mkdir(obj_dir) mkdir(obj_dir)
@ -863,13 +357,17 @@ class mbedToolchain:
# ANY CHANGE OF PARAMETERS OR RETURN VALUES WILL BREAK COMPATIBILITY # ANY CHANGE OF PARAMETERS OR RETURN VALUES WILL BREAK COMPATIBILITY
def compile_sources(self, resources, inc_dirs=None): def compile_sources(self, resources, inc_dirs=None):
# Web IDE progress bar for project build # Web IDE progress bar for project build
files_to_compile = resources.s_sources + resources.c_sources + resources.cpp_sources files_to_compile = (
resources.get_file_refs(FileType.ASM_SRC) +
resources.get_file_refs(FileType.C_SRC) +
resources.get_file_refs(FileType.CPP_SRC)
)
self.to_be_compiled = len(files_to_compile) self.to_be_compiled = len(files_to_compile)
self.compiled = 0 self.compiled = 0
self.notify.cc_verbose("Macros: "+' '.join(['-D%s' % s for s in self.get_symbols()])) self.notify.cc_verbose("Macros: "+' '.join(['-D%s' % s for s in self.get_symbols()]))
inc_paths = resources.inc_dirs inc_paths = resources.get_file_paths(FileType.INC_DIR)
if inc_dirs is not None: if inc_dirs is not None:
if isinstance(inc_dirs, list): if isinstance(inc_dirs, list):
inc_paths.extend(inc_dirs) inc_paths.extend(inc_dirs)
@ -894,11 +392,10 @@ class mbedToolchain:
# Sort compile queue for consistency # Sort compile queue for consistency
files_to_compile.sort() files_to_compile.sort()
for source in files_to_compile: for source in files_to_compile:
object = self.relative_object_path( object = self.relative_object_path(self.build_dir, source)
self.build_dir, resources.file_basepath[source], source)
# Queue mode (multiprocessing) # Queue mode (multiprocessing)
commands = self.compile_command(source, object, inc_paths) commands = self.compile_command(source.path, object, inc_paths)
if commands is not None: if commands is not None:
queue.append({ queue.append({
'source': source, 'source': source,
@ -924,7 +421,7 @@ class mbedToolchain:
result = compile_worker(item) result = compile_worker(item)
self.compiled += 1 self.compiled += 1
self.progress("compile", item['source'], build_update=True) self.progress("compile", item['source'].name, build_update=True)
for res in result['results']: for res in result['results']:
self.notify.cc_verbose("Compile: %s" % ' '.join(res['command']), result['source']) self.notify.cc_verbose("Compile: %s" % ' '.join(res['command']), result['source'])
self.compile_output([ self.compile_output([
@ -962,7 +459,7 @@ class mbedToolchain:
results.remove(r) results.remove(r)
self.compiled += 1 self.compiled += 1
self.progress("compile", result['source'], build_update=True) self.progress("compile", result['source'].name, build_update=True)
for res in result['results']: for res in result['results']:
self.notify.cc_verbose("Compile: %s" % ' '.join(res['command']), result['source']) self.notify.cc_verbose("Compile: %s" % ' '.join(res['command']), result['source'])
self.compile_output([ self.compile_output([
@ -1123,15 +620,20 @@ class mbedToolchain:
bin = None if ext == 'elf' else full_path bin = None if ext == 'elf' else full_path
map = join(tmp_path, name + '.map') map = join(tmp_path, name + '.map')
r.objects = sorted(set(r.objects)) objects = sorted(set(r.get_file_paths(FileType.OBJECT)))
config_file = ([self.config.app_config_location] config_file = ([self.config.app_config_location]
if self.config.app_config_location else []) if self.config.app_config_location else [])
dependencies = r.objects + r.libraries + [r.linker_script] + config_file linker_script = [path for _, path in r.get_file_refs(FileType.LD_SCRIPT)
if path.endswith(self.LINKER_EXT)][-1]
lib_dirs = r.get_file_paths(FileType.LIB_DIR)
libraries = [l for l in r.get_file_paths(FileType.LIB)
if l.endswith(self.LIBRARY_EXT)]
dependencies = objects + libraries + [linker_script] + config_file
dependencies.append(join(self.build_dir, self.PROFILE_FILE_NAME + "-ld")) dependencies.append(join(self.build_dir, self.PROFILE_FILE_NAME + "-ld"))
if self.need_update(elf, dependencies): if self.need_update(elf, dependencies):
needed_update = True needed_update = True
self.progress("link", name) self.progress("link", name)
self.link(elf, r.objects, r.libraries, r.lib_dirs, r.linker_script) self.link(elf, objects, libraries, lib_dirs, linker_script)
if bin and self.need_update(bin, [elf]): if bin and self.need_update(bin, [elf]):
needed_update = True needed_update = True
@ -1246,6 +748,8 @@ class mbedToolchain:
# Set the configuration data # Set the configuration data
def set_config_data(self, config_data): def set_config_data(self, config_data):
self.config_data = config_data self.config_data = config_data
# new configuration data can change labels, so clear the cache
self.labels = None
self.add_regions() self.add_regions()
# Creates the configuration header if needed: # Creates the configuration header if needed:

View File

@ -230,12 +230,16 @@ class ARM(mbedToolchain):
def compile_cpp(self, source, object, includes): def compile_cpp(self, source, object, includes):
return self.compile(self.cppc, source, object, includes) return self.compile(self.cppc, source, object, includes)
def correct_scatter_shebang(self, scatter_file, base_path=curdir): def correct_scatter_shebang(self, scatter_file, cur_dir_name=None):
"""Correct the shebang at the top of a scatter file. """Correct the shebang at the top of a scatter file.
Positional arguments: Positional arguments:
scatter_file -- the scatter file to correct scatter_file -- the scatter file to correct
Keyword arguments:
cur_dir_name -- the name (not path) of the directory containing the
scatter file
Return: Return:
The location of the correct scatter file The location of the correct scatter file
@ -249,8 +253,9 @@ class ARM(mbedToolchain):
return scatter_file return scatter_file
else: else:
new_scatter = join(self.build_dir, ".link_script.sct") new_scatter = join(self.build_dir, ".link_script.sct")
self.SHEBANG += " -I %s" % relpath(dirname(scatter_file), if cur_dir_name is None:
base_path) cur_dir_name = dirname(scatter_file)
self.SHEBANG += " -I %s" % cur_dir_name
if self.need_update(new_scatter, [scatter_file]): if self.need_update(new_scatter, [scatter_file]):
with open(new_scatter, "w") as out: with open(new_scatter, "w") as out:
out.write(self.SHEBANG) out.write(self.SHEBANG)