2026-01-28 06:16:04 +00:00

112 lines
4.1 KiB
C++

/**
* \brief an abstract class for the inferencers. Subclasses should implement the pure virtual function Inference
* \author Nan Zhou, nanzhou at kneron dot us
* \copyright 2019 Kneron Inc. All right reserved.
*/
#ifndef PIANO_DYNASTY_INCLUDE_ABSTRACTINFERENCER_H_
#define PIANO_DYNASTY_INCLUDE_ABSTRACTINFERENCER_H_
#include <memory>
#include <string>
#include <unordered_map>
#include <vector>
#include "tensor.h"
namespace dynasty {
namespace inferencer {
template<typename T>
class AbstractInferencer {
public:
/**
* @brief We need it since we all a virtual function in the destructor
*/
class InferencerDeletor {
public:
void operator () (AbstractInferencer<T> *ptr) {
ptr->CleanUp();
delete ptr;
}
};
class Builder {
public:
Builder() = default;
virtual ~Builder() = default;
virtual std::unique_ptr<AbstractInferencer<T>, InferencerDeletor> Build() = 0;
};
protected:
AbstractInferencer() = default;
/**
* @brief Dummy clean up
*/
virtual void CleanUp() {};
public:
virtual ~AbstractInferencer() = default;
/**
* \param preprocess_input: [{operation_node_name, 1d_vector}]
* \brief interface need to be implemented, pack output data path names and their float vectors then return
* \return name_value_pair: {operation node name: corresponding float vector}
*/
virtual std::unordered_map<std::string, std::vector<T>> Inference(
std::unordered_map<std::string, std::vector<T>> const &preprocess_input, bool only_output_layers) = 0;
/**
* \param preprocess_input: [{operation_node_name, 1d_vector}]
* \brief interface need to be implemented, pack output data path names and their Tensors then return
* \return name_value_pair: {operation node name: corresponding float vector}
*/
virtual std::unordered_map<std::string, std::vector<T>> Inference(
std::unordered_map<std::string, std::shared_ptr<dynasty::common::Tensor>> const &preprocess_input, bool only_output_layers);
/**
* \param preprocess_input: [{operation_node_name, path_to_1d_vector}]
* \brief interface to inference from operation_name and txt pairs
* \return name_value_pair: {operation node name: corresponding float vector}
*/
virtual std::unordered_map<std::string, std::vector<T>> Inference(
std::unordered_map<std::string, std::string> const &preprocess_input, bool only_output_layers) = 0;
/**
* \param preprocess_input_config: a json file specify the config
* {
"model_input_txts": [
{
"data_vector": "/path/to/input_1_h_w_c.txt",
"operation_name": "input_1_o0"
},
{
"data_vector": "/path/to/input_2_h_w_c.txt",
"operation_name": "input_2_o0"
}
]
}
only_output_layers: if true, will only return results of output operations,
otherwise will return results of all operations
* \brief interface to inference from a config file
* \return name_value_pair: {operation node name: corresponding float vector}
*/
std::unordered_map<std::string, std::vector<T>> Inference(std::string const &preprocess_input_config,
bool only_output_layers = true);
virtual void convertInput(std::unordered_map<std::string, std::string> const &preprocess_input,
std::unordered_map<std::string, std::vector<T>>& vector_inputs) {;}
virtual std::unordered_map<std::string, std::vector<int>> ConvertFloatToInt(std::unordered_map<std::string, std::vector<T>>& float_output, bool only_output_layers){
std::unordered_map<std::string, std::vector<int>> temp;
return temp;
};
virtual void dumpRadixJson(std::string const &model_file, std::string output_path) {;}
};
template <typename T>
using InferencerUniquePtr = std::unique_ptr<AbstractInferencer<T>, typename AbstractInferencer<T>::InferencerDeletor>;
}
}
#endif // PIANO_DYNASTY_INCLUDE_ABSTRACTINFERENCER_H_