class InferenceEngine::RemoteBlob¶
Overview¶
This class represents an Inference Engine abstraction to the memory allocated on the remote (non-CPU) accelerator device. More…
#include <ie_remote_blob.hpp>
class RemoteBlob: public InferenceEngine::MemoryBlob
{
public:
// typedefs
typedef std::shared_ptr<RemoteBlob> Ptr;
typedef std::shared_ptr<const RemoteBlob> CPtr;
// construction
RemoteBlob(const TensorDesc& tensorDesc);
// methods
virtual ParamMap getParams() const = 0;
virtual std::string getDeviceName() const = 0;
virtual std::shared_ptr<RemoteContext> getContext() const = 0;
};
// direct descendants
class ClBlob;
Inherited Members¶
public:
// typedefs
typedef std::shared_ptr<Blob> Ptr;
typedef std::shared_ptr<const Blob> CPtr;
typedef std::shared_ptr<MemoryBlob> Ptr;
typedef std::shared_ptr<const MemoryBlob> CPtr;
// methods
static Ptr CreateFromData(const DataPtr& data);
template <
typename T,
typename std::enable_if<!std::is_pointer<T>::value&&!std::is_reference<T>::value, int>::type = 0,
typename std::enable_if<std::is_base_of<Blob, T>::value, int>::type = 0
>
bool is();
template <
typename T,
typename std::enable_if<!std::is_pointer<T>::value&&!std::is_reference<T>::value, int>::type = 0,
typename std::enable_if<std::is_base_of<Blob, T>::value, int>::type = 0
>
bool is() const;
template <
typename T,
typename std::enable_if<!std::is_pointer<T>::value&&!std::is_reference<T>::value, int>::type = 0,
typename std::enable_if<std::is_base_of<Blob, T>::value, int>::type = 0
>
T \* as();
template <
typename T,
typename std::enable_if<!std::is_pointer<T>::value&&!std::is_reference<T>::value, int>::type = 0,
typename std::enable_if<std::is_base_of<Blob, T>::value, int>::type = 0
>
const T \* as() const;
virtual const TensorDesc& getTensorDesc() const;
virtual TensorDesc& getTensorDesc();
virtual size_t size() const;
virtual size_t byteSize() const;
virtual size_t element_size() const = 0;
virtual void allocate() = 0;
virtual bool deallocate() = 0;
void setShape(const SizeVector& dims);
virtual Blob::Ptr createROI(const ROI& roi) const;
virtual Blob::Ptr createROI(
const std::vector<std::size_t>& begin,
const std::vector<std::size_t>& end
) const;
virtual const TensorDesc& getTensorDesc() const;
virtual TensorDesc& getTensorDesc();
virtual size_t size() const;
virtual size_t byteSize() const;
virtual size_t element_size() const;
virtual void allocate() = 0;
virtual bool deallocate() = 0;
virtual LockedMemory<void> rwmap() = 0;
virtual LockedMemory<const void> rmap() const = 0;
virtual LockedMemory<void> wmap() = 0;
Detailed Documentation¶
This class represents an Inference Engine abstraction to the memory allocated on the remote (non-CPU) accelerator device.
Typedefs¶
typedef std::shared_ptr<RemoteBlob> Ptr
A smart pointer to the RemoteBlob object.
typedef std::shared_ptr<const RemoteBlob> CPtr
A smart pointer to the const RemoteBlob object.
Construction¶
RemoteBlob(const TensorDesc& tensorDesc)
Constructor. Creates an empty RemoteBlob object with the specified precision.
Parameters:
tensorDesc |
Defines the layout and dims of the blob |
Methods¶
virtual ParamMap getParams() const = 0
Returns a map of device-specific parameters required for low-level operations with underlying object. Parameters include device/context/surface/buffer handles, access flags, etc. Contents of the map returned depend on remote execution context that is currently set on the device (working scenario). Abstract method.
Returns:
A map of name/parameter elements.
virtual std::string getDeviceName() const = 0
Returns name of the device on which underlying object is allocated. Abstract method.
Returns:
A device name string in the same format as that in plugin metric.
virtual std::shared_ptr<RemoteContext> getContext() const = 0
Returns device context which underlying object belongs to. Abstract method.
Returns:
Pointer to plugin-specific context class object, which is derived from RemoteContext. Dynamic casting should be used if it is necessary to retrieve a pointer to original class.