2 - name: ssd_mobilenet_v1_coco
4 # list of launchers for your topology.
6 # launcher framework (e.g. caffe, dlsdk)
8 # device for infer (e.g. for dlsdk cpu, gpu, hetero:cpu, gpu ...)
10 # topology IR (*.prototxt for caffe, *.xml for InferenceEngine, etc)
11 # path to topology is prefixed with directory, specified in "-m/--models" option
12 tf_model: ssd_mobilenet_v1_coco.pb
13 # launcher returns raw result, so it should be converted
14 # to an appropriate representation with adapter
19 tensorflow_use_custom_operations_config: ssd_v2_support.json
20 tensorflow_object_detection_api_pipeline_config: ssd_mobilenet_v1_coco.config
22 # metrics, preprocessing and postprocessing are typically dataset specific, so dataset field
23 # specifies data and all other steps required to validate topology
24 # there is typically definitions file, which contains options for common datasets and which is merged
25 # during evaluation, but since "sample_dataset" is not used anywhere else, this config contains full definition
27 # uniquely distinguishable name for dataset
28 # note that all other steps are specific for this dataset only
29 # if you need to test topology on multiple datasets, you need to specify
30 # every step explicitly for each dataset
31 - name: COCO2017_90cl_bkgr
33 # list of metrics, calculated on dataset
37 ignore_difficult: True
38 presenter: print_scalar
40 - type: coco_precision