X-Git-Url: http://review.tizen.org/git/?a=blobdiff_plain;f=inference-engine%2Finclude%2Fcldnn%2Fcldnn_config.hpp;h=64ded2d2d621c54f08352ff4467abdb7e5a8666a;hb=6dfc778940ec1e52737404ddc5c9634a40064b4d;hp=3fac6a28530671307441687f4af332a1381d9968;hpb=866530fb047cd17af6bd2dbbde5f5cb35f876840;p=platform%2Fupstream%2Fdldt.git diff --git a/inference-engine/include/cldnn/cldnn_config.hpp b/inference-engine/include/cldnn/cldnn_config.hpp index 3fac6a2..64ded2d 100644 --- a/inference-engine/include/cldnn/cldnn_config.hpp +++ b/inference-engine/include/cldnn/cldnn_config.hpp @@ -1,5 +1,4 @@ -// Copyright (C) 2018 Intel Corporation -// +// Copyright (C) 2018-2019 Intel Corporation // SPDX-License-Identifier: Apache-2.0 // @@ -12,10 +11,13 @@ #pragma once #include -#include "../ie_plugin_config.hpp" +#include "ie_plugin_config.hpp" namespace InferenceEngine { +/** + * @brief GPU plugin configuration + */ namespace CLDNNConfigParams { /** @@ -57,5 +59,10 @@ DECLARE_CLDNN_CONFIG_KEY(GRAPH_DUMPS_DIR); */ DECLARE_CLDNN_CONFIG_KEY(SOURCES_DUMPS_DIR); +/** +* @brief This key turns usage of int8 optimizations and qunatized models on. +*/ +DECLARE_CLDNN_CONFIG_KEY(INT8_ENABLED); + } // namespace CLDNNConfigParams } // namespace InferenceEngine