Program Listing for File NeuralNetwork.hpp
↰ Return to documentation for file (include/depthai/pipeline/node/NeuralNetwork.hpp)
#pragma once
#include "depthai/openvino/OpenVINO.hpp"
#include "depthai/pipeline/Node.hpp"
// standard
#include <fstream>
// shared
#include <depthai-shared/properties/NeuralNetworkProperties.hpp>
namespace dai {
namespace node {
class NeuralNetwork : public NodeCRTP<Node, NeuralNetwork, NeuralNetworkProperties> {
public:
constexpr static const char* NAME = "NeuralNetwork";
protected:
tl::optional<OpenVINO::Version> getRequiredOpenVINOVersion() override;
tl::optional<OpenVINO::Version> networkOpenvinoVersion;
public:
NeuralNetwork(const std::shared_ptr<PipelineImpl>& par, int64_t nodeId);
NeuralNetwork(const std::shared_ptr<PipelineImpl>& par, int64_t nodeId, std::unique_ptr<Properties> props);
Input input{*this, "in", Input::Type::SReceiver, true, 5, true, {{DatatypeEnum::Buffer, true}}};
Output out{*this, "out", Output::Type::MSender, {{DatatypeEnum::NNData, false}}};
Output passthrough{*this, "passthrough", Output::Type::MSender, {{DatatypeEnum::Buffer, true}}};
InputMap inputs;
OutputMap passthroughs;
// Specify local filesystem path to load the blob (which gets loaded at loadAssets)
void setBlobPath(const dai::Path& path);
void setBlob(OpenVINO::Blob blob);
void setBlob(const dai::Path& path);
void setNumPoolFrames(int numFrames);
void setNumInferenceThreads(int numThreads);
void setNumNCEPerInferenceThread(int numNCEPerThread);
int getNumInferenceThreads();
// TODO add getters for other API
};
} // namespace node
} // namespace dai