Skip to content

Commit

Permalink
[VitisAI] Refactor the VAIEP to use MSFT's standalone API (#19058)
Browse files Browse the repository at this point in the history
### Description
<!-- Describe your changes. -->
Refactor the VAIEP to use MSFT's standalone API


### Motivation and Context
<!-- - Why is this change required? What problem does it solve?
- If it fixes an open issue, please link to the issue here. -->
Vitis ONNX RT VAI should switch to using the standalone API for ONNX EPs
in order to decouple the EP from onnxruntime.dll and the providers.dll.
This will help to simplify customer deployment of applications and use
cases that need to share their onnxruntime.dll with other applications.

---------

Co-authored-by: Zhenze Wang <[email protected]>
Co-authored-by: zz002 <[email protected]>
  • Loading branch information
3 people committed Feb 1, 2024
1 parent 68b6064 commit 1d6f13f
Show file tree
Hide file tree
Showing 42 changed files with 1,000 additions and 1,101 deletions.
1 change: 0 additions & 1 deletion cmake/onnxruntime.cmake
Original file line number Diff line number Diff line change
Expand Up @@ -189,7 +189,6 @@ set(onnxruntime_INTERNAL_LIBRARIES
${PROVIDERS_SNPE}
${PROVIDERS_TVM}
${PROVIDERS_RKNPU}
${PROVIDERS_VITISAI}
${PROVIDERS_XNNPACK}
${PROVIDERS_WEBNN}
${PROVIDERS_AZURE}
Expand Down
32 changes: 19 additions & 13 deletions cmake/onnxruntime_providers_vitisai.cmake
Original file line number Diff line number Diff line change
Expand Up @@ -14,14 +14,19 @@
"${ONNXRUNTIME_ROOT}/core/providers/vitisai/*.h"
"${ONNXRUNTIME_ROOT}/core/providers/vitisai/imp/*.cc"
"${ONNXRUNTIME_ROOT}/core/providers/vitisai/imp/*.h"
"${ONNXRUNTIME_ROOT}/core/providers/shared_library/*.h"
"${ONNXRUNTIME_ROOT}/core/providers/shared_library/*.cc"
)
source_group(TREE ${ONNXRUNTIME_ROOT}/core FILES ${onnxruntime_providers_vitisai_cc_srcs})
onnxruntime_add_static_library(onnxruntime_providers_vitisai ${onnxruntime_providers_vitisai_cc_srcs})
onnxruntime_add_include_to_target(onnxruntime_providers_vitisai onnxruntime_common onnxruntime_framework onnx onnx_proto)
target_link_libraries(onnxruntime_providers_vitisai PRIVATE onnx protobuf::libprotobuf nlohmann_json::nlohmann_json)
if(NOT MSVC)
target_compile_options(onnxruntime_providers_vitisai PUBLIC $<$<CONFIG:DEBUG>:-U_FORTIFY_SOURCE -D_FORTIFY_SOURCE=0>)
endif(NOT MSVC)
onnxruntime_add_shared_library(onnxruntime_providers_vitisai ${onnxruntime_providers_vitisai_cc_srcs})
onnxruntime_add_include_to_target(onnxruntime_providers_vitisai ${ONNXRUNTIME_PROVIDERS_SHARED} nlohmann_json::nlohmann_json safeint_interface flatbuffers::flatbuffers)
target_link_libraries(onnxruntime_providers_vitisai PRIVATE ${ONNXRUNTIME_PROVIDERS_SHARED})
if(MSVC)
onnxruntime_add_include_to_target(onnxruntime_providers_vitisai dbghelp)
set_property(TARGET onnxruntime_providers_vitisai APPEND_STRING PROPERTY LINK_FLAGS "-DEF:${ONNXRUNTIME_ROOT}/core/providers/vitisai/symbols.def")
else(MSVC)
set_property(TARGET onnxruntime_providers_vitisai APPEND_STRING PROPERTY LINK_FLAGS "-Xlinker --version-script=${ONNXRUNTIME_ROOT}/core/providers/vitisai/version_script.lds -Xlinker --gc-sections")
endif(MSVC)

target_include_directories(onnxruntime_providers_vitisai PRIVATE "${ONNXRUNTIME_ROOT}/core/providers/vitisai/include" ${XRT_INCLUDE_DIRS} ${CMAKE_CURRENT_BINARY_DIR}/VitisAI)
if(MSVC)
Expand All @@ -30,17 +35,18 @@
target_compile_options(onnxruntime_providers_vitisai PRIVATE "/wd4251")
# for unused formal parameter
target_compile_options(onnxruntime_providers_vitisai PRIVATE "/wd4100")
# for type name first seen using 'class' now seen using 'struct'
target_compile_options(onnxruntime_providers_vitisai PRIVATE "/wd4099")
else(MSVC)
target_compile_options(onnxruntime_providers_vitisai PUBLIC $<$<CONFIG:DEBUG>:-U_FORTIFY_SOURCE -D_FORTIFY_SOURCE=0>)
target_compile_options(onnxruntime_providers_vitisai PRIVATE -Wno-unused-parameter)
endif(MSVC)

set_target_properties(onnxruntime_providers_vitisai PROPERTIES FOLDER "ONNXRuntime")
set_target_properties(onnxruntime_providers_vitisai PROPERTIES LINKER_LANGUAGE CXX)

if (NOT onnxruntime_BUILD_SHARED_LIB)
install(TARGETS onnxruntime_providers_vitisai
ARCHIVE DESTINATION ${CMAKE_INSTALL_LIBDIR}
LIBRARY DESTINATION ${CMAKE_INSTALL_LIBDIR}
RUNTIME DESTINATION ${CMAKE_INSTALL_BINDIR}
FRAMEWORK DESTINATION ${CMAKE_INSTALL_BINDIR})
endif()
install(TARGETS onnxruntime_providers_vitisai
ARCHIVE DESTINATION ${CMAKE_INSTALL_LIBDIR}
LIBRARY DESTINATION ${CMAKE_INSTALL_LIBDIR}
RUNTIME DESTINATION ${CMAKE_INSTALL_BINDIR}
FRAMEWORK DESTINATION ${CMAKE_INSTALL_BINDIR})
11 changes: 10 additions & 1 deletion cmake/onnxruntime_python.cmake
Original file line number Diff line number Diff line change
Expand Up @@ -170,7 +170,6 @@ target_link_libraries(onnxruntime_pybind11_state PRIVATE
onnxruntime_session
${onnxruntime_libs}
${PROVIDERS_TVM}
${PROVIDERS_VITISAI}
${PROVIDERS_NNAPI}
${PROVIDERS_XNNPACK}
${PROVIDERS_COREML}
Expand Down Expand Up @@ -852,6 +851,16 @@ if (onnxruntime_USE_DNNL)
)
endif()

if (onnxruntime_USE_VITISAI)
add_custom_command(
TARGET onnxruntime_pybind11_state POST_BUILD
COMMAND ${CMAKE_COMMAND} -E copy
${DNNL_DLL_PATH} $<TARGET_FILE:onnxruntime_providers_vitisai>
$<TARGET_FILE:onnxruntime_providers_shared>
$<TARGET_FILE_DIR:${build_output_target}>/onnxruntime/capi/
)
endif()

if (onnxruntime_USE_TENSORRT)
add_custom_command(
TARGET onnxruntime_pybind11_state POST_BUILD
Expand Down
1 change: 0 additions & 1 deletion cmake/onnxruntime_unittests.cmake
Original file line number Diff line number Diff line change
Expand Up @@ -591,7 +591,6 @@ set(ONNXRUNTIME_TEST_LIBS
# CUDA, ROCM, TENSORRT, MIGRAPHX, DNNL, and OpenVINO are dynamically loaded at runtime
${PROVIDERS_NNAPI}
${PROVIDERS_JS}
${PROVIDERS_VITISAI}
${PROVIDERS_QNN}
${PROVIDERS_SNPE}
${PROVIDERS_RKNPU}
Expand Down
17 changes: 17 additions & 0 deletions include/onnxruntime/core/session/onnxruntime_c_api.h
Original file line number Diff line number Diff line change
Expand Up @@ -4569,6 +4569,23 @@ struct OrtApi {
_In_reads_(num_keys) const char* const* provider_options_keys,
_In_reads_(num_keys) const char* const* provider_options_values,
_In_ size_t num_keys);

/** \brief Append VitisAI provider to session options
*
* If VitisAI is not available (due to a non VitisAI enabled build, or if VitisAI is not installed on the system), this function will return failure.
*
* \param[in] options
* \param[in] provider_options_keys
* \param[in] provider_options_values
* \param[in] num_keys
*
* \snippet{doc} snippets.dox OrtStatus Return Value
*/
ORT_API2_STATUS(SessionOptionsAppendExecutionProvider_VitisAI,
_In_ OrtSessionOptions* options,
_In_reads_(num_keys) const char* const* provider_options_keys,
_In_reads_(num_keys) const char* const* provider_options_values,
_In_ size_t num_keys);
};

/*
Expand Down
3 changes: 3 additions & 0 deletions include/onnxruntime/core/session/onnxruntime_cxx_api.h
Original file line number Diff line number Diff line change
Expand Up @@ -901,6 +901,9 @@ struct SessionOptionsImpl : ConstSessionOptionsImpl<T> {
SessionOptionsImpl& RegisterCustomOpsLibrary(const ORTCHAR_T* library_name, const CustomOpConfigs& custom_op_configs = {});

SessionOptionsImpl& RegisterCustomOpsUsingFunction(const char* function_name); ///< Wraps OrtApi::RegisterCustomOpsUsingFunction

///< Wraps OrtApi::SessionOptionsAppendExecutionProvider_VitisAI
SessionOptionsImpl& AppendExecutionProvider_VitisAI(const std::unordered_map<std::string, std::string>& provider_options = {});
};
} // namespace detail

Expand Down
19 changes: 19 additions & 0 deletions include/onnxruntime/core/session/onnxruntime_cxx_inline.h
Original file line number Diff line number Diff line change
Expand Up @@ -885,6 +885,25 @@ inline SessionOptionsImpl<T>& SessionOptionsImpl<T>::AppendExecutionProvider_Ope
return *this;
}

template <typename T>
inline SessionOptionsImpl<T>& SessionOptionsImpl<T>::AppendExecutionProvider_VitisAI(const std::unordered_map<std::string, std::string>& provider_options) {
auto num_entries = provider_options.size();
std::vector<const char*> keys, values;
if (num_entries > 0) {
keys.reserve(num_entries);
values.reserve(num_entries);

for (const auto& entry : provider_options) {
keys.push_back(entry.first.c_str());
values.push_back(entry.second.c_str());
}
}

ThrowOnError(GetApi().SessionOptionsAppendExecutionProvider_VitisAI(this->p_, keys.data(), values.data(), num_entries));

return *this;
}

template <typename T>
inline SessionOptionsImpl<T>& SessionOptionsImpl<T>::RegisterCustomOpsLibrary(const ORTCHAR_T* library_name,
const CustomOpConfigs& custom_op_configs) {
Expand Down
4 changes: 0 additions & 4 deletions onnxruntime/core/providers/provider_factory_creators.h
Original file line number Diff line number Diff line change
Expand Up @@ -78,10 +78,6 @@
#include "core/providers/tvm/tvm_provider_factory_creator.h"
#endif

#if defined(USE_VITISAI)
#include "core/providers/vitisai/vitisai_provider_factory_creator.h"
#endif

#if defined(USE_XNNPACK)
#include "core/providers/xnnpack/xnnpack_provider_factory_creator.h"
#endif
Expand Down
9 changes: 8 additions & 1 deletion onnxruntime/core/providers/shared_library/provider_api.h
Original file line number Diff line number Diff line change
Expand Up @@ -95,12 +95,15 @@ enum OperatorStatus : int {
};

// onnx Protobuf types (All of these are direct mappings to the onnx types except for the Repeated*Field ones which map to a Repeated*Field type)
struct int64s; // RepeatedField
struct int64s; // RepeatedField
struct float32s; // RepeatedField
struct AttributeProto;
struct GraphProto;
struct ModelProto;
struct NodeProto;
struct SparseTensorProto;
struct StringStringEntryProto;
struct StringStringEntryProtos; // RepeatedPtrField
struct TensorProto;
struct TensorProtos; // RepeatedPtrField
struct TensorShapeProto_Dimension;
Expand All @@ -113,6 +116,9 @@ struct TypeProto_Sequence;
struct TypeProto;
struct ValueInfoProto;
struct ValueInfoProtos; // RepeatedPtrField
struct InferenceContext;
class GraphInferencer;
using InferenceFunction = std::function<void(InferenceContext&)>;
} // namespace ONNX_NAMESPACE

namespace onnxruntime {
Expand Down Expand Up @@ -249,6 +255,7 @@ constexpr const char* kCudaExecutionProvider = "CUDAExecutionProvider";
constexpr const char* kCannExecutionProvider = "CANNExecutionProvider";
constexpr const char* kDnnlExecutionProvider = "DnnlExecutionProvider";
constexpr const char* kOpenVINOExecutionProvider = "OpenVINOExecutionProvider";
constexpr const char* kVitisAIExecutionProvider = "VitisAIExecutionProvider";
constexpr const char* kRocmExecutionProvider = "ROCMExecutionProvider";
constexpr const char* kTensorrtExecutionProvider = "TensorrtExecutionProvider";
constexpr const char* kMIGraphXExecutionProvider = "MIGraphXExecutionProvider";
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -492,6 +492,10 @@ template <>
Status UnpackTensor(const ONNX_NAMESPACE::TensorProto& tensor, const void* raw_data, size_t raw_data_len, /*out*/ int64_t* p_data, size_t expected_size) { return g_host->UnpackTensor(tensor, raw_data, raw_data_len, p_data, expected_size); }
template <>
Status UnpackTensor(const ONNX_NAMESPACE::TensorProto& tensor, const void* raw_data, size_t raw_data_len, /*out*/ uint64_t* p_data, size_t expected_size) { return g_host->UnpackTensor(tensor, raw_data, raw_data_len, p_data, expected_size); }
Status UnpackInitializerData(const ONNX_NAMESPACE::TensorProto& tensor, const Path& model_path,
/*out*/ std::vector<uint8_t>& unpacked_tensor) {
return g_host->UnpackInitializerData(tensor, model_path, unpacked_tensor);
}

} // namespace utils

Expand Down
Loading

0 comments on commit 1d6f13f

Please sign in to comment.