diff --git a/.gitignore b/.gitignore
index f5be8aaa..4d8e69af 100644
--- a/.gitignore
+++ b/.gitignore
@@ -45,6 +45,7 @@ pip-log.txt
.coverage
.tox
nosetests.xml
+htmlcov/
# Translations
*.mo
diff --git a/mbed_greentea/mbed_greentea_cli.py b/mbed_greentea/mbed_greentea_cli.py
index a8eaa400..f66dd9d0 100644
--- a/mbed_greentea/mbed_greentea_cli.py
+++ b/mbed_greentea/mbed_greentea_cli.py
@@ -18,8 +18,10 @@
import os
import sys
import optparse
-import threading
from time import time, sleep
+from Queue import Queue
+from threading import Thread
+
from mbed_greentea.mbed_test_api import run_host_test
from mbed_greentea.mbed_test_api import TEST_RESULTS
@@ -41,8 +43,6 @@
from mbed_greentea.mbed_greentea_dlm import greentea_clean_kettle
from mbed_greentea.mbed_yotta_api import build_with_yotta
-from Queue import Queue
-from threading import Thread
try:
import mbed_lstools
@@ -55,7 +55,19 @@
RET_NO_DEVICES = 1001
RET_YOTTA_BUILD_FAIL = -1
-
+LOCAL_HOST_TESTS_DIR = './test/host_tests' # Used by mbedhtrun -e
+
+def get_local_host_tests_dir(path):
+ """! Forms path to local host tests. Performs additional basic checks if directory exists etc.
+ """
+ # If specified path exist return path
+ if path and os.path.exists(path) and os.path.isdir(path):
+ return path
+ # If specified path is not set or doesn't exist returns default path
+ if not path and os.path.exists(LOCAL_HOST_TESTS_DIR) and os.path.isdir(LOCAL_HOST_TESTS_DIR):
+ return LOCAL_HOST_TESTS_DIR
+ return None
+
def print_version(verbose=True):
"""! Print current package version
"""
@@ -99,8 +111,12 @@ def main():
parser.add_option('', '--parallel',
dest='parallel_test_exec',
default=1,
- help='Experimental, you execute test runners for connected to your host MUTs in parallel (speeds up test result collection)')
-
+ help='Experimental, you execute test runners for connected to your host MUTs in parallel (speeds up test result collection)')
+
+ parser.add_option("-e", "--enum-host-tests",
+ dest="enum_host_tests",
+ help="Define directory with yotta module local host tests. Default: ./test/host_tests")
+
parser.add_option('', '--config',
dest='verbose_test_configuration_only',
default=False,
@@ -195,7 +211,7 @@ def main():
(opts, args) = parser.parse_args()
cli_ret = 0
-
+
start = time()
if opts.lock_by_target:
# We are using Greentea proprietary locking mechanism to lock between platforms and targets
@@ -238,14 +254,14 @@ def run_test_thread(test_result_queue, test_queue, opts, mut, mut_info, yotta_ta
test_platforms_match = 0
test_report = {}
#greentea_acquire_target_id(mut['target_id'], gt_instance_uuid)
-
+
while not test_queue.empty():
try:
test = test_queue.get(False)
except Exception as e:
print(str(e))
break
-
+
test_result = 'SKIPPED'
disk = mut['mount_point']
@@ -254,6 +270,7 @@ def run_test_thread(test_result_queue, test_queue, opts, mut, mut_info, yotta_ta
program_cycle_s = mut_info['properties']['program_cycle_s']
copy_method = opts.copy_method if opts.copy_method else 'shell'
verbose = opts.verbose_test_result_only
+ enum_host_tests_path = get_local_host_tests_dir(opts.enum_host_tests)
test_platforms_match += 1
#gt_log_tab("running host test...")
@@ -265,6 +282,7 @@ def run_test_thread(test_result_queue, test_queue, opts, mut, mut_info, yotta_ta
program_cycle_s=program_cycle_s,
digest_source=opts.digest_source,
json_test_cfg=opts.json_test_configuration,
+ enum_host_tests_path=enum_host_tests_path,
verbose=verbose)
single_test_result, single_test_output, single_testduration, single_timeout = host_test_result
@@ -286,20 +304,20 @@ def run_test_thread(test_result_queue, test_queue, opts, mut, mut_info, yotta_ta
test_report[yotta_target_name][test_name]['copy_method'] = copy_method
gt_log("test on hardware with target id: %s \n\ttest '%s' %s %s in %.2f sec"% (mut['target_id'], test['test_bin'], '.' * (80 - len(test['test_bin'])), test_result, single_testduration))
-
+
if single_test_result != 'OK' and not verbose and opts.report_fails:
# In some cases we want to print console to see why test failed
# even if we are not in verbose mode
gt_log_tab("test failed, reporting console output (specified with --report-fails option)")
print
- print single_test_output
-
+ print single_test_output
+
#greentea_release_target_id(mut['target_id'], gt_instance_uuid)
- test_result_queue.put({'test_platforms_match': test_platforms_match,
- 'test_exec_retcode': test_exec_retcode,
+ test_result_queue.put({'test_platforms_match': test_platforms_match,
+ 'test_exec_retcode': test_exec_retcode,
'test_report': test_report})
return
-
+
def main_cli(opts, args, gt_instance_uuid=None):
"""! This is main CLI function with all command line parameters
@details This function also implements CLI workflow depending on CLI parameters inputed
@@ -326,10 +344,12 @@ def main_cli(opts, args, gt_instance_uuid=None):
# Capture alternative test console inputs, used e.g. in 'yotta test command'
if opts.digest_source:
+ enum_host_tests_path = get_local_host_tests_dir(opts.enum_host_tests)
host_test_result = run_host_test(image_path=None,
disk=None,
port=None,
digest_source=opts.digest_source,
+ enum_host_tests_path=enum_host_tests_path,
verbose=opts.verbose_test_result_only)
single_test_result, single_test_output, single_testduration, single_timeout = host_test_result
@@ -450,7 +470,7 @@ def main_cli(opts, args, gt_instance_uuid=None):
muts_to_test = [] # MUTs to actually be tested
test_queue = Queue() # contains information about test_bin and image_path for each test case
test_result_queue = Queue() # used to store results of each thread
- execute_threads = [] # list of threads to run test cases
+ execute_threads = [] # list of threads to run test cases
### check if argument of --parallel mode is a integer and greater or equal 1
try:
@@ -460,8 +480,8 @@ def main_cli(opts, args, gt_instance_uuid=None):
except ValueError:
gt_log_err("argument of mode --parallel is not a int, disable parallel mode")
parallel_test_exec = 1
-
-
+
+
### Testing procedures, for each target, for each target's compatible platform
for yotta_target_name in yt_target_platform_map:
gt_log("processing '%s' yotta target compatible platforms..."% gt_bright(yotta_target_name))
@@ -484,7 +504,7 @@ def main_cli(opts, args, gt_instance_uuid=None):
gt_log_tab("%s = '%s'"% (k, mbed_dev[k]))
if number_of_parallel_instances < parallel_test_exec:
number_of_parallel_instances += 1
- else:
+ else:
break
# Configuration print mode:
@@ -503,7 +523,7 @@ def main_cli(opts, args, gt_instance_uuid=None):
micro = mut['platform_name']
program_cycle_s = mut_info_map[platfrom_name]['properties']['program_cycle_s']
copy_method = opts.copy_method if opts.copy_method else 'shell'
- verbose = opts.verbose_test_result_only
+ enum_host_tests_path = get_local_host_tests_dir(opts.enum_host_tests)
test_platforms_match += 1
host_test_result = run_host_test(opts.run_app,
@@ -515,6 +535,7 @@ def main_cli(opts, args, gt_instance_uuid=None):
digest_source=opts.digest_source,
json_test_cfg=opts.json_test_configuration,
run_app=opts.run_app,
+ enum_host_tests_path=enum_host_tests_path,
verbose=True)
single_test_result, single_test_output, single_testduration, single_timeout = host_test_result
@@ -571,36 +592,37 @@ def main_cli(opts, args, gt_instance_uuid=None):
gt_log_tab("note: test case names are case sensitive")
gt_log_tab("note: see list of available test cases below")
list_binaries_for_targets(verbose_footer=False)
-
+
gt_log("running %d test%s for target '%s' and platform '%s'"% (
len(filtered_ctest_test_list),
"s" if len(filtered_ctest_test_list) != 1 else "",
gt_bright(yotta_target_name),
gt_bright(platform_name)
))
-
+
for test_bin, image_path in filtered_ctest_test_list.iteritems():
test = {"test_bin":test_bin, "image_path":image_path}
test_queue.put(test)
-
+
number_of_threads = 0
for mut in muts_to_test:
#################################################################
# Experimental, parallel test execution
#################################################################
if number_of_threads < parallel_test_exec:
- t = threading.Thread(target=run_test_thread, args = (test_result_queue, test_queue, opts, mut, mut_info, yotta_target_name))
+ args = (test_result_queue, test_queue, opts, mut, mut_info, yotta_target_name)
+ t = Thread(target=run_test_thread, args=args)
execute_threads.append(t)
- number_of_threads += 1
+ number_of_threads += 1
gt_log_tab("use %s instance%s for testing" % (len(execute_threads), 's' if len(execute_threads) != 1 else ''))
for t in execute_threads:
t.daemon = True
t.start()
- while test_result_queue.qsize() != len(execute_threads):
- sleep(1)
-
- # merge partial test reports from diffrent threads to final test report
+ while test_result_queue.qsize() != len(execute_threads):
+ sleep(1)
+
+ # merge partial test reports from diffrent threads to final test report
for t in execute_threads:
t.join()
test_return_data = test_result_queue.get(False)
@@ -614,7 +636,7 @@ def main_cli(opts, args, gt_instance_uuid=None):
test_report.update(partial_test_report)
else:
test_report[report_key].update(partial_test_report[report_key])
-
+
if opts.verbose_test_configuration_only:
print
print "Example: execute 'mbedgt --target=TARGET_NAME' to start testing for TARGET_NAME target"
diff --git a/mbed_greentea/mbed_test_api.py b/mbed_greentea/mbed_test_api.py
index 91cf8c16..b227667f 100644
--- a/mbed_greentea/mbed_test_api.py
+++ b/mbed_greentea/mbed_test_api.py
@@ -97,6 +97,7 @@ def run_host_test(image_path,
digest_source=None,
json_test_cfg=None,
max_failed_properties=5,
+ enum_host_tests_path=None,
run_app=None):
"""! This function runs host test supervisor (executes mbedhtrun) and checks output from host test process.
@return Tuple with test results, test output and test duration times
@@ -112,6 +113,7 @@ def run_host_test(image_path,
@param program_cycle_s Wait after flashing delay (sec)
@param json_test_cfg Additional test configuration file path passed to host tests in JSON format
@param max_failed_properties After how many unknown properties we will assume test is not ported
+ @param enum_host_tests_path Directory where locally defined host tests may reside
@param run_app Run application mode flag (we run application and grab serial port data)
@param digest_source if None mbedhtrun will be executed. If 'stdin',
stdin will be used via StdInObserver or file (if
@@ -250,6 +252,8 @@ def get_auto_property_value(property_name, line):
cmd += ["--test-cfg", '"%s"' % str(json_test_cfg)]
if run_app is not None:
cmd += ["--run"] # -f stores binary name!
+ if enum_host_tests_path:
+ cmd += ["-e", '"%s"'% enum_host_tests_path]
if verbose:
gt_log_tab("calling mbedhtrun: %s"% " ".join(cmd))
diff --git a/test/mbed_gt_test_parallel.py b/test/mbed_gt_test_parallel.py
index 108dd9c5..e41e1f4f 100644
--- a/test/mbed_gt_test_parallel.py
+++ b/test/mbed_gt_test_parallel.py
@@ -19,10 +19,9 @@
from mock import patch
from mbed_greentea import mbed_greentea_cli
-"""
-Mbed greentea parallelisation tests
-"""
class TestmbedGt(unittest.TestCase):
+ """! Mbed greentea parallelisation tests
+ """
def setUp(self):
"""
Called before test function
@@ -36,45 +35,63 @@ def tearDown(self):
:return:
"""
pass
-
+
@patch('mbed_greentea.mbed_greentea_cli.optparse.OptionParser')
@patch('mbed_greentea.mbed_greentea_cli.load_ctest_testsuite')
@patch('mbed_greentea.mbed_greentea_cli.mbed_lstools.create')
@patch('mbed_greentea.mbed_test_api.Popen')
- def test_basic(self, popen_mock, mbedLstools_mock, loadCtestTestsuite_mock, optionParser_mock):
+ def test_basic(self, popen_mock, mbedLstools_mock, loadCtestTestsuite_mock, optionParser_mock):
#runHostTest_mock.side_effect = run_host_test_mock
popen_mock.side_effect = PopenMock
mbedLstools_mock.side_effect = MbedsMock
loadCtestTestsuite_mock.return_value = load_ctest_testsuite_mock()
-
- my_gt_opts = GtOptions(list_of_targets="frdm-k64f-gcc", parallel_test_exec=3, test_by_names="mbed-drivers-test-stdio",
- use_target_ids="02400203A0811E505D7DE3D9", report_junit_file_name="junitTestReport")
+
+ my_gt_opts = GtOptions(list_of_targets="frdm-k64f-gcc",
+ parallel_test_exec=3,
+ test_by_names="mbed-drivers-test-stdio",
+ use_target_ids="02400203A0811E505D7DE3D9",
+ report_junit_file_name="junitTestReport")
OptionParserMock.static_options = my_gt_opts
optionParser_mock.side_effect = OptionParserMock
-
+
mbed_greentea_cli.main()
class PopenMock:
def __init__(self, *args, **kwargs):
self.stdout = StdOutMock()
-
+
def communicate(self):
return "_stdout", "_stderr"
def returncode(self):
return 0
-
+
def terminate(self):
- pass
-
+ pass
+
class StdOutMock:
def __init__(self):
- self.str = "MBED: Instrumentation: 'COM11' and disk: 'E:'\nHOST: Copy image onto target...\n\t1 file(s) copied.\nHOST: Initialize serial port...\n...port ready!\nHOST: Reset target...\nHOST: Detecting test case properties...\nHOST: Property 'timeout' = '20'\nHOST: Property 'host_test_name' = 'echo'\nHOST: Property 'description' = 'serial interrupt test'\nHOST: Property 'test_id' = 'MBED_14'\nHOST: Start test...\n...port ready!\nHOST: Starting the ECHO test\n..................................................\n{{success}}\n{{end}}"
+ self.str = """MBED: Instrumentation: 'COM11' and disk: 'E:'\nHOST: Copy image onto target...
+\t1 file(s) copied.
+HOST: Initialize serial port...
+...port ready!
+HOST: Reset target...
+HOST: Detecting test case properties...
+HOST: Property 'timeout' = '20'
+HOST: Property 'host_test_name' = 'echo'
+HOST: Property 'description' = 'serial interrupt test'
+HOST: Property 'test_id' = 'MBED_14'
+HOST: Start test...
+...port ready!
+HOST: Starting the ECHO test
+..................................................
+{{success}}
+{{end}}"""
self.offset = 0
-
+
def read(self, size):
if self.offset < len(self.str):
- ret = [self.str[i] for i in range (self.offset, self.offset + size)]
+ ret = [self.str[i] for i in range(self.offset, self.offset + size)]
self.offset += size
return ''.join(ret)
else:
@@ -82,34 +99,66 @@ def read(self, size):
self.offset = 0
return None
-
+
def run_host_test_mock(*args, **kwargs):
random_testduration = uniform(0.1, 2)
time.sleep(random_testduration)
return ('OK', 'single_test_output', random_testduration, 10)
-
-
+
+
def load_ctest_testsuite_mock():
- return {'mbed-drivers-test-echo': '.\\build\\frdm-k64f-gcc\\test\\mbed-drivers-test-echo.bin', 'mbed-drivers-test-time_us': '.\\build\\frdm-k64f-gcc\\test\\mbed-drivers-test-time_us.bin',
- 'mbed-drivers-test-serial_interrupt': '.\\build\\frdm-k64f-gcc\\test\\mbed-drivers-test-serial_interrupt.bin', 'mbed-drivers-test-blinky': '.\\build\\frdm-k64f-gcc\\test\\mbed-drivers-test-blinky.bin',
- 'mbed-drivers-test-functionpointer': '.\\build\\frdm-k64f-gcc\\test\\mbed-drivers-test-functionpointer.bin', 'mbed-drivers-test-stdio': '.\\build\\frdm-k64f-gcc\\test\\mbed-drivers-test-stdio.bin',
- 'mbed-drivers-test-eventhandler': '.\\build\\frdm-k64f-gcc\\test\\mbed-drivers-test-eventhandler.bin', 'mbed-drivers-test-stl': '.\\build\\frdm-k64f-gcc\\test\\mbed-drivers-test-stl.bin',
- 'mbed-drivers-test-div': '.\\build\\frdm-k64f-gcc\\test\\mbed-drivers-test-div.bin', 'mbed-drivers-test-rtc': '.\\build\\frdm-k64f-gcc\\test\\mbed-drivers-test-rtc.bin',
- 'mbed-drivers-test-cstring': '.\\build\\frdm-k64f-gcc\\test\\mbed-drivers-test-cstring.bin', 'mbed-drivers-test-cpp': '.\\build\\frdm-k64f-gcc\\test\\mbed-drivers-test-cpp.bin',
- 'mbed-drivers-test-timeout': '.\\build\\frdm-k64f-gcc\\test\\mbed-drivers-test-timeout.bin', 'mbed-drivers-test-ticker_3': '.\\build\\frdm-k64f-gcc\\test\\mbed-drivers-test-ticker_3.bin',
- 'mbed-drivers-test-ticker_2': '.\\build\\frdm-k64f-gcc\\test\\mbed-drivers-test-ticker_2.bin', 'mbed-drivers-test-heap_and_stack': '.\\build\\frdm-k64f-gcc\\test\\mbed-drivers-test-heap_and_stack.bin',
- 'mbed-drivers-test-hello': '.\\build\\frdm-k64f-gcc\\test\\mbed-drivers-test-hello.bin', 'mbed-drivers-test-ticker': '.\\build\\frdm-k64f-gcc\\test\\mbed-drivers-test-ticker.bin',
- 'mbed-drivers-test-dev_null': '.\\build\\frdm-k64f-gcc\\test\\mbed-drivers-test-dev_null.bin', 'mbed-drivers-test-basic': '.\\build\\frdm-k64f-gcc\\test\\mbed-drivers-test-basic.bin',
- 'mbed-drivers-test-asynch_spi': '.\\build\\frdm-k64f-gcc\\test\\mbed-drivers-test-asynch_spi.bin', 'mbed-drivers-test-sleep_timeout': '.\\build\\frdm-k64f-gcc\\test\\mbed-drivers-test-sleep_timeout.bin',
+ return {'mbed-drivers-test-echo': '.\\build\\frdm-k64f-gcc\\test\\mbed-drivers-test-echo.bin',
+ 'mbed-drivers-test-time_us': '.\\build\\frdm-k64f-gcc\\test\\mbed-drivers-test-time_us.bin',
+ 'mbed-drivers-test-serial_interrupt': '.\\build\\frdm-k64f-gcc\\test\\mbed-drivers-test-serial_interrupt.bin',
+ 'mbed-drivers-test-blinky': '.\\build\\frdm-k64f-gcc\\test\\mbed-drivers-test-blinky.bin',
+ 'mbed-drivers-test-functionpointer': '.\\build\\frdm-k64f-gcc\\test\\mbed-drivers-test-functionpointer.bin',
+ 'mbed-drivers-test-stdio': '.\\build\\frdm-k64f-gcc\\test\\mbed-drivers-test-stdio.bin',
+ 'mbed-drivers-test-eventhandler': '.\\build\\frdm-k64f-gcc\\test\\mbed-drivers-test-eventhandler.bin',
+ 'mbed-drivers-test-stl': '.\\build\\frdm-k64f-gcc\\test\\mbed-drivers-test-stl.bin',
+ 'mbed-drivers-test-div': '.\\build\\frdm-k64f-gcc\\test\\mbed-drivers-test-div.bin',
+ 'mbed-drivers-test-rtc': '.\\build\\frdm-k64f-gcc\\test\\mbed-drivers-test-rtc.bin',
+ 'mbed-drivers-test-cstring': '.\\build\\frdm-k64f-gcc\\test\\mbed-drivers-test-cstring.bin',
+ 'mbed-drivers-test-cpp': '.\\build\\frdm-k64f-gcc\\test\\mbed-drivers-test-cpp.bin',
+ 'mbed-drivers-test-timeout': '.\\build\\frdm-k64f-gcc\\test\\mbed-drivers-test-timeout.bin',
+ 'mbed-drivers-test-ticker_3': '.\\build\\frdm-k64f-gcc\\test\\mbed-drivers-test-ticker_3.bin',
+ 'mbed-drivers-test-ticker_2': '.\\build\\frdm-k64f-gcc\\test\\mbed-drivers-test-ticker_2.bin',
+ 'mbed-drivers-test-heap_and_stack': '.\\build\\frdm-k64f-gcc\\test\\mbed-drivers-test-heap_and_stack.bin',
+ 'mbed-drivers-test-hello': '.\\build\\frdm-k64f-gcc\\test\\mbed-drivers-test-hello.bin',
+ 'mbed-drivers-test-ticker': '.\\build\\frdm-k64f-gcc\\test\\mbed-drivers-test-ticker.bin',
+ 'mbed-drivers-test-dev_null': '.\\build\\frdm-k64f-gcc\\test\\mbed-drivers-test-dev_null.bin',
+ 'mbed-drivers-test-basic': '.\\build\\frdm-k64f-gcc\\test\\mbed-drivers-test-basic.bin',
+ 'mbed-drivers-test-asynch_spi': '.\\build\\frdm-k64f-gcc\\test\\mbed-drivers-test-asynch_spi.bin',
+ 'mbed-drivers-test-sleep_timeout': '.\\build\\frdm-k64f-gcc\\test\\mbed-drivers-test-sleep_timeout.bin',
'mbed-drivers-test-detect': '.\\build\\frdm-k64f-gcc\\test\\mbed-drivers-test-detect.bin'}
-
-
+
+
class GtOptions:
- def __init__(self, list_of_targets, test_by_names=None, only_build_tests=False, skip_yotta_build=True, copy_method=None, parallel_test_exec=0,
- verbose_test_configuration_only=False, build_to_release=False, build_to_debug=False, list_binaries=False, map_platform_to_yt_target={},
- use_target_ids=False, lock_by_target=False, digest_source=None, json_test_configuration=None, run_app=None, report_junit_file_name=None,
- report_text_file_name=None, report_json=False, report_fails=False, verbose_test_result_only=False, verbose=True, version=False):
-
+ def __init__(self,
+ list_of_targets,
+ test_by_names=None,
+ only_build_tests=False,
+ skip_yotta_build=True,
+ copy_method=None,
+ parallel_test_exec=0,
+ verbose_test_configuration_only=False,
+ build_to_release=False,
+ build_to_debug=False,
+ list_binaries=False,
+ map_platform_to_yt_target={},
+ use_target_ids=False,
+ lock_by_target=False,
+ digest_source=None,
+ json_test_configuration=None,
+ run_app=None,
+ report_junit_file_name=None,
+ report_text_file_name=None,
+ report_json=False,
+ report_fails=False,
+ verbose_test_result_only=False,
+ enum_host_tests=None,
+ verbose=True,
+ version=False):
+
self.list_of_targets = list_of_targets
self.test_by_names = test_by_names
self.only_build_tests = only_build_tests
@@ -131,39 +180,39 @@ def __init__(self, list_of_targets, test_by_names=None, only_build_tests=False,
self.report_json = report_json
self.report_fails = report_fails
self.verbose_test_result_only = verbose_test_result_only
+ self.enum_host_tests = enum_host_tests
self.verbose = verbose
self.version = version
-
-
+
+
class OptionParserMock:
static_options = {}
-
+
def __init__(self):
pass
-
+
def add_option(self, *args, **kwargs):
pass
-
+
def parse_args(self):
return (OptionParserMock.static_options, [])
-
-
+
+
class MbedsMock:
def __init__(self):
pass
-
+
def list_mbeds(self):
return [{'target_id_mbed_htm': '02400203A0811E505D7DE3E8', 'mount_point': 'E:', 'target_id': '02400203A0811E505D7DE3E8', 'serial_port': u'COM11', 'target_id_usb_id': '02400203A0811E505D7DE3E8', 'platform_name': 'K64F'},
{'target_id_mbed_htm': '02400203A0811E505D7DE3D9', 'mount_point': 'F:', 'target_id': '02400203A0811E505D7DE3D9', 'serial_port': u'COM12', 'target_id_usb_id': '02400203A0811E505D7DE3D9', 'platform_name': 'K64F'}]
-
+
def list_mbeds_ext(self):
return [{'target_id_mbed_htm': '02400203A0811E505D7DE3E8', 'mount_point': 'E:', 'target_id': '02400203A0811E505D7DE3E8', 'serial_port': u'COM11', 'target_id_usb_id': '02400203A0811E505D7DE3E8', 'platform_name': 'K64F', 'platform_name_unique': 'K64F[0]'},
{'target_id_mbed_htm': '02400203A0811E505D7DE3D9', 'mount_point': 'F:', 'target_id': '02400203A0811E505D7DE3D9', 'serial_port': u'COM12', 'target_id_usb_id': '02400203A0811E505D7DE3D9', 'platform_name': 'K64F', 'platform_name_unique': 'K64F[1]'},
{'target_id_mbed_htm': '02400203A0811E505D7DE3A7', 'mount_point': 'G:', 'target_id': '02400203A0811E505D7DE3A7', 'serial_port': u'COM13', 'target_id_usb_id': '02400203A0811E505D7DE3A7', 'platform_name': 'K64F', 'platform_name_unique': 'K64F[2]'},
{'target_id_mbed_htm': '09400203A0811E505D7DE3B6', 'mount_point': 'H:', 'target_id': '09400203A0811E505D7DE3B6', 'serial_port': u'COM14', 'target_id_usb_id': '09400203A0811E505D7DE3B6', 'platform_name': 'K100F', 'platform_name_unique': 'K100F[0]'}]
-
+
def list_platforms_ext(self):
return {'K64F': 2}
-
-
-
\ No newline at end of file
+
+