Getting started
Processing Block API is a scalable interface that replaces Legacy API for easier integration of Face SDK capabilities into your application.
Key features
- Multiple components combined into a single integration
- Simplicity and ease of learning
- Rapid implementation
- Long-term support and updates
Requirements
- Windows x86 64-bit or Linux x86 64-bit.
- Face SDK windows_x86_64 or linux_x86_64 is installed (see Getting Started).
Context-container
Processing Block API is based on the use of Context. Context is a heterogeneous container that consists of a set of hierarchically organized data presented in the form of key–value pairs. The closest analogue of Context is a JSON object. Each Context object can contain a scalar object (integer, real, boolean, string), a memory area or pointer, a sequential array of Context objects, or an associative container of string-Context pairs, with unlimited nesting.
How to create and use a Context object
Create a Context-container:
- C++
- Python
- Flutter
- C#
- Java
auto array_elem0 = service->createContext();
array_elem0 = service.createContext({})
Context array_elem0 = service.createContext({});
Context array_elem0 = service.CreateContext(new());
Context array_elem0 = service.createContext();
Common set of operations with a Context-container:
creating an associative container by calling
["key"]
on empty Context:- C++
- Python
- Flutter
- C#
- Java
array_elem0["name"] = "Julius Zeleny"; // pass string
array_elem0["phone"] = 11111111111l; // pass integer (long)
array_elem0["social_score"] = 0.999; // pass double
array_elem0["verified"] = true; // pass boolarray_elem0["name"] = "Julius Zeleny" # pass string
array_elem0["phone"] = 11111111111 # pass integer
array_elem0["social_score"] = 0.999 # pass double
array_elem0["verified"] = True # pass boolarray_elem0["name"] = "Julius Zeleny"; // pass string
array_elem0["phone"] = 11111111111; // pass integer
array_elem0["social_score"] = 0.999; // pass double
array_elem0["verified"] = true; // pass boolarray_elem0["name"] = "Julius Zeleny"; // pass string
array_elem0["phone"] = 11111111111L; // pass integer
array_elem0["social_score"] = 0.999; // pass double
array_elem0["verified"] = true; // pass boolarray_elem0.get("name").setString("Julius Zeleny"); // pass string
array_elem0.get("phone").setLong(11111111111l); // pass integer
array_elem0.get("social_score").setDouble(0.99); // pass double
array_elem0.get("verified").setBool(true); // pass boolgetters:
- C++
- Python
- Flutter
- C#
- Java
ASSERT_EQ( array_elem0["name"].getString(), "Julius Zeleny" );
ASSERT_EQ( array_elem0["phone"].getLong(), 11111111111l );
ASSERT_EQ( array_elem0["social_score"].getDouble(), 0.999 );
ASSERT_EQ( array_elem0["verified"].getBool(), true );assert array_elem0["name"].get_value(), "Julius Zeleny"
assert array_elem0["phone"].get_value() == 11111111111
assert array_elem0["social_score"].get_value() == 0.999
assert array_elem0["verified"].get_value() == Trueassert (array_elem0["name"].get_value() == "Julius Zeleny");
assert (array_elem0["phone"].get_value() == 11111111111);
assert (array_elem0["social_score"].get_value() == 0.999);
assert (array_elem0["verified"].get_value() == true);Debug.Assert(array_elem0["name"].GetString() == "Julius Zeleny");
Debug.Assert(array_elem0["phone"].GetLong() == 11111111111L);
Debug.Assert(array_elem0["social_score"].GetDouble() == 0.999);
Debug.Assert(array_elem0["verified"].GetBool() == true);assertTrue(array_elem0.get("name").getString().equals("Julius Zeleny"));
assertTrue(array_elem0.get("phone").getLong() == 11111111111l);
assertTrue(array_elem0.get("social_score").getDouble() == 0.99);
assertTrue(array_elem0.get("verified").getBool() == true);creating a sequence array by calling
push_back
on empty Context:- C++
- Python
- Flutter
- C#
- Java
auto array == service->createContext();
array.push_back(array_elem0);array = service.create_context([])
array.push_back(array_elem0)Context array = service.createContext([]);
array.pushBack(array_elem0);Context array == service->CreateContext(new());
array.PushBack(array_elem0);Context array = service.createContext();
array.pushBack(array_elem0);iterating over array:
- C++
- Python
- Flutter
- C#
- Java
// get by index
ASSERT_EQ( array[0]["phone"].getLong(), 11111111111l );
// iterate with index
size_t array_sz = array.size();
for(size_t i = 0; i < array_sz; ++i)
array[i]["phone"];
// or with iterators
for(auto iter = array.begin(); iter != array.end(); ++iter)
(*iter)["phone"]; // deference returns nested Context
// or с foreach
for(auto val : array)
val["phone"];# get by index
assert array[0]["phone"].get_value() == 11111111111
# iterate with index
for i in range(len(array)):
array[i]["phone"]
# or with iterators
for elem in array:
elem["phone"]// get by index
assert (array[0]["phone"].get_value() == 11111111111);
// iterate with index
for (int i = 0; i < array.len(); i++)
array[i]["phone"];// get by index
Debug.Assert(array[0]["phone"].GetLong() == 11111111111L);
// iterate with index
for (long i = 0; i < array.Length(); i++)
array[i]["phone"];// get by index
assertTrue(array.get(0).get("phone").getLong() == 11111111111l);
// iterate with index
for (int i = 0; i < array.size(); i++)
array.get(i).get("phone");operations with a nested associative container:
- C++
- Python
- Flutter
- C#
- Java
auto full = service->createContext();
full["friends"] = std::move(array); // move assignment without copying
// access to the nested object
ASSERT_EQ( full["friends"][0]["social_score"].getDouble(), 0.999 );
// iterate over associative containers values
for(auto iter = full.begin(); iter != full.end(); ++iter) {
iter.key(); // get the key value from iterator
(*iter)[0]["social_score"].getDouble(); // get the value
}
// с foreach
for(auto val : full)
val[0]["social_score"].getDouble();full = service.create_context()
full["friends"] = array.to_dict()
# access to the nested object
assert full["friends"][0]["social_score"].get_value() == 0.999
# iterate over associative containers values
for key in full.keys():
full[key][0]["social_score"].get_value()Context full = service.createContext({});
full["friends"] = array.toMap();
// access to the nested object
assert (full["friends"][0]["social_score"].get_value() == 0.999);
// iterate over associative containers values
for (var key in full.getKeys())
full[key][0]["social_score"].get_value();Context full = service.createContext(new());
full["friends"] = array;
// access to the nested object
Debug.Assert(full["friends"][0]["social_score"].GetDouble() == 0.999);
// iterate over associative containers values
foreach (String key in full.GetKeys())
full[key][0]["social_score"].GetDouble();Context full = service.createContext();
full.get("friends").setContext(array);
// access to the nested object
assertTrue(full.get("friends").get(0).get("social_score").getDouble() == 0.999);
// iterate over associative containers values
for (String key : full.getKeys())
{
full.get(key).get(0).get("social_score").getDouble();
}other Context's convenient methods:
- C++
- Python
- Flutter
- C#
- Java
void clear()
bool contains(const std::string& key); // for an assosiative container
Context operator[](size_t index); // for a sequence array, access specified element with bounds checking
Context operator[](const std::string& key); // for an assosiative container, access or insert
Context at(const std::string& key); // for an assosiative container, with bounds checking
size_t size(); // returned elements count for a container
bool isNone(); // is empty
bool isArray(); // is a sequence array
bool isObject(); // is an assosiative container
bool isLong(), isDouble(), isString(), isBool(); // check if contains a certain scalar typedef to_dict(self) -> dict # converts context to dictionary
def is_none(self) -> bool # is empty
def is_array(self) -> bool # check for sequential array
def is_object(self) -> bool # check for associative container
def is_long, is_double, is_string, is_bool -> bool # check for a scalar data typeContext operator[](int index) // for a sequence array, access specified element with bounds checking
Context operator[](String key) // for an assosiative container, access or insert
int len() // returned elements count for a container
bool is_none() // is empty
bool is_array() // is a sequence array
bool is_object() // is an assosiative container
bool is_long(), is_double(), is_string(), is_bool() // check if contains a certain scalar typebool Contains(string key); // for an assosiative container
Context this[int index]; // for a sequence array, access specified element with bounds checking
Context this[string key]; // for an assosiative container, access or insert
Context GetByKey(string key); // for an assosiative container, with bounds checking
Context GetByIndex(int index); // for an assosiative container, with bounds checking
ulong Length(); // returned elements count for a container
bool IsNone(); // is empty
bool IsArray(); // is a sequence array
bool IsObject(); // is an assosiative container
bool IsLong(), IsDouble(), IsString(), IsBool(); // check if contains a certain scalar typevoid clear();
boolean contains(String key); // for an assosiative container
Context get(long index); // for a sequence array, access specified element with bounds checking
Context get(String key); // for an assosiative container, access or insert
long size(); // returned elements count for a container
boolean isNone(); // is empty
boolean isArray(); // is a sequence array
boolean isObject(); // is an assosiative container
boolean isLong(), isDouble(), isString(), isBool(); // check if contains a certain scalar typeFacerecService methods connected with Context:
- C++
- Python
- Flutter
- C#
- Java
// get Context from image file
pbio::Context createContextFromEncodedImage(const uint8_t* data, uint64_t dataSize);
pbio::Context createContextFromEncodedImage(const std::vector<uint8_t>& data);
pbio::Context createContextFromEncodedImage(const std::string& data);
pbio::Context createContextFromEncodedImage(const std::vector<char>& data);
// get Context from image bytes
pbio::Context createContextFromFrame(uint8_t* data, int32_t width, int32_t height, pbio::Context::Format format, int32_t baseAngle);# get Context from image file
def create_context_from_encoded_image(self, data: bytes) -> Context
# get Context from image bytes
def create_context_from_frame(self, data: bytes, width: int, height: int, format: ContextFormat, base_angle: int) -> Context:// get Context from image file
Context createContextFromEncodedImage(Uint8List data);
// get Context from image bytes
Context createContextFromFrame(Uint8List data, int width, int height, {ContextFormat format, int baseAngle});// get Context from image file
Context CreateContextFromImage(byte[] data);
// get Context from image bytes
Context CreateContextFromFrame(byte[] data, int width, int height, Context.Format format, int baseAngle);// get Context from image file
Context createContextFromEncodedImage(byte[] data);
// get Context from image bytes
Context createContextFromFrame(byte[] data, int width, int height, Context.Format format, int baseAngle);
Binary image format
Most of the processing blocks operate on Context with an image in binary format:
{
"image" : { "format": "NDARRAY",
"blob": "data pointer",
"dtype": "uint8_t",
"shape": [height, width, channels] }
}
The "blob"
key contains a smart pointer to data. The pointer is set by the function void Context::setDataPtr(void* ptr, int copy_sz)
,
where copy_sz
is the size of memory in Bytes, that will be copied, and then automatically released when Context objects lifetime ends.
Copying will not perform if 0
is passed as argument copy_sz
. In this case the Context object does not control the lifetime of the object it points to.
You can also allocate a raw memory, f.e. to copy data later, passing nullptr and size as arguments of setDataPtr
.
The "dtype"
can contain one of these values: "uint8_t"
, "int8_t"
, "uint16_t"
, "int16_t"
, "int32_t"
, "float"
, "double"
.
This is according to OpenCV types: CV_8U
, CV_8S
, CV_16U
, CV_16S
, CV_32S
, CV_32F
, CV_64F
.
Create a Context-container with RGB-image
- C++
- Python
- Flutter
- C#
- Java
- Read an image from the file:
std::string inputImagePath = "{path_to_image}";
std::ifstream imageFile(inputImagePath, std::ios::binary);
std::istreambuf_iterator<char> start(file);
std::vector<char> imageData(start, std::istreambuf_iterator<char>());
3 Create a Context container with an image using the createContextFromEncodedImage()
method
pbio::Context ioData = service->createContextFromEncodedImage(imageData);
- Read an image from the file:
input_image_path = "{path_to_image}"
image_data = bytes()
with open(input_image_path, "rb") as image_file:
image_data = image_file.read()
- Create a Context container with an image using the
create_context_from_encoded_image()
method:
ioData = service.create_context_from_encoded_image(image_data)
- Read an image from the file:
File file = File("{imagePath}");
final Uint8List bytes = await file.readAsBytes();
- Create a Context container with an image using the
createContextFromEncodedImage()
method:
Context ioData = service.createContextFromEncodedImage(bytes);
- Read an image from the file:
string inputImagePath = "<path_to_image>"
byte[] imageData = File.ReadAllBytes(inputImagePath);
- Create a Context container with an image using the
CreateContextFromImage()
method:
Context ioData = service.CreateContextFromImage(imageData);
- Read an image from the file:
final String inputImagePath = "<path_to_image>";
byte[] imageData = Files.readAllBytes(Paths.get(inputImagePath));
- Create a Context container with an image using the
createContextFromEncodedImage()
method:
Context ioData = service.createContextFromEncodedImage(imageData);
Processing Blocks
Processing Blocks types
- FACE_DETECTOR
- HUMAN_BODY_DETECTOR
- HUMAN_POSE_ESTIMATOR
- OBJECT_DETECTOR
- FACE_FITTER
- EMOTION_ESTIMATOR
- AGE_ESTIMATOR
- GENDER_ESTIMATOR
- MASK_ESTIMATOR
- LIVENESS_ESTIMATOR
- QUALITY_ASSESSMENT_ESTIMATOR
- FACE_TEMPLATE_EXTRACTOR
- TEMPLATE_INDEX
- MATCHER_MODULE
- VERIFICATION_MODULE
Examples of using Processing Block API are demonstrated in:
- Samples processing_block_demo in C++
- Python examples examples/python/processing_blocks/
- Samples processing_block_demo in Flutter
Processing Block parameters
unit_type: string
— main parameter of the processing block, defines the type of the created module.modification: string
— optional parameter, defines modification of the processing block. If not specified, the default value will be used.version: int64
— optional parameter, defines the version of modification of the processing block. If not specified, the default value will be used.model_path: string
— optional parameter, defines the path to the processing block model. If not specified, the default value will be used.use_cuda: bool
— optional parameter, responsible for starting the processing block on GPU. The default value isfalse
.use_legacy: bool
— optional parameter, needed to use older onnxruntime library. The default value isfalse
.ONNXRuntime
— key for onnxruntime configuration parameters.library_path: string
— path to the onnxruntime libraries, by default the path to the libfacerec.so directory.intra_op_num_threads: int64
— number of threads for paralleling the module, the default value is 1.
Processing Block usage
Create a Context-container, specify the parameters you need and pass it to the
FacerecService.createProcessingBlock()
method.- C++
- Python
- Flutter
- C#
- Java
// mandatory, specify the name of processing block
auto configCtx = service->createContext();
configCtx["unit_type"] = "<name_of_processing_block>";
// if omitted, the default value will be used
configCtx["modification"] = "<modification>";
// if not specified, the first version of the modification will be used
configCtx["version"] = <version>;
// the default models are located in the Face SDK distribution directory: share/processing_block/<modification>/(<version>/ or <version>.enc)
// you can set your own path to the model
configCtx["model_path"] = "<path_to_model_file>";
// default location of the onnxruntime library in <FaceSDKShortProductName /> distribution: the "lib" folder for Linux platfrom or the "bin" folder for Windows platfrom
// you can specify your own path to onnxruntime library
// if value is not specified, the os-specific default search order will be used
configCtx["ONNXRuntime"]["library_path"] = "../lib"; // for Linux
configCtx["ONNXRuntime"]["library_path"] = "../bin"; // for Windows
// optional, "true" if you want to use GPU acceleration (CUDA) for processing block that support it
configCtx["use_cuda"] = false;
pbio::ProcessingBlock processing_block = service->createProcessingBlock(configCtx);configDict = {};
# mandatory, specify the name of processing block
configDict["unit_type"] = "<name_of_processing_block>"
# if omitted, the default value will be used
configDict["modification"] = "<modification>"
# if not specified, the first version of the modification will be used
configDict["version"] = <version>
# the default models are located in the Face SDK distribution directory: share/processing_block/<modification>/(<version>/ or <version>.enc)
# you can set your own path to the model
configDict["model_path"] = "<path_to_model_file>"
# default location of the onnxruntime library in Face SDK folder for Linux platfrom or the "bin" folder for Windows platfrom
# you can specify your own path to onnxruntime library
# if value is not specified, the os-specific default search order will be used
configDict["ONNXRuntime"]["library_path"] = "../lib" # for Linux
configDict["ONNXRuntime"]["library_path"] = "../bin" # for Windows
# optional, "true" if you want to use GPU acceleration (CUDA) for processing block that support it
configDict["use_cuda"] = False
processing_block = service.create_processing_block(configDict);Map<String, dynamic> configMap = {};
// mandatory, specify the name of processing block
configMap["unit_type"] = "<name_of_processing_block>";
// if omitted, the default value will be used
configMap["modification"] = "<modification>";
// if not specified, the first version of the modification will be used
configMap["version"] = <version>;
// the default models are located in the Face SDK distribution directory: share/processing_block/<modification>/(<version>/ or <version>.enc)
// you can set your own path to the model
configMap["model_path"] = "<path_to_model_file>";
processing_block = service.createProcessingBlock(configMap);Dictionary<object, object> configDict = new Dictionary<object, object>();
// mandatory, specify the name of processing block
configDict["unit_type"] = "<name_of_processing_block>"
// if omitted, the default value will be used
configDict["modification"] = "<modification>"
// if not specified, the first version of the modification will be used
configDict["version"] = <version>
// the default models are located in the Face SDK distribution directory: share/processing_block/<modification>/(<version>/ or <version>.enc)
// you can set your own path to the model
configDict["model_path"] = "<path_to_model_file>"
/ default location of the onnxruntime library in <FaceSDKShortProductName /> distribution: the "lib" folder for Linux platfrom or the "bin" folder for Windows platfrom
// you can specify your own path to onnxruntime library
// if value is not specified, the os-specific default search order will be used
configDict["ONNXRuntime"]["library_path"] = "../lib" # для Linux
configDict["ONNXRuntime"]["library_path"] = "../bin" # для Windows
// optional, "true" if you want to use GPU acceleration (CUDA) for processing block that support it
configDict["use_cuda"] = False
ProcessingBlock processingBlock = service.CreateProcessingBlock(configDict);// mandatory, specify the name of processing block
Context configCtx = service.createContext();
configCtx.get("unit_type").setString("{name_of_processing_block}");
// if omitted, the default value will be used
configCtx.get("modification").setString("{modification}");
// if not specified, the first version of the modification will be used
configCtx.get("version").setLong({version});
// the default models are located in the Face SDK distribution directory: share/processing_block/<modification>/(<version>/ or <version>.enc)
// you can set your own path to the model
configCtx.get("model_path").setString("{path_to_model_file}");
// default location of the onnxruntime library in <FaceSDKShortProductName /> distribution: the "lib" folder for Linux platfrom or the "bin" folder for Windows platfrom
// you can specify your own path to onnxruntime library
// if value is not specified, the os-specific default search order will be used
configCtx.get("ONNXRuntime").get("library_path").setString("../lib"); // для Linux
configCtx.get("ONNXRuntime").get("library_path").setString("../bin"); // для Windows
// optional, "true" if you want to use GPU acceleration (CUDA) for processing block that support it
configCtx.get("use_cuda").setBool(false);
ProcessingBlock processing_block = service.createProcessingBlock(configCtx);Prepare input Context and pass it to processing block
- C++
- Python
- Flutter
- C#
- Java
std::string input_image_path = "{path_to_image}";
cv::Mat image = cv::imread(input_image_path, cv::IMREAD_COLOR);
cv::Mat input_image;
cv::cvtColor(image, input_image, cv::COLOR_BGR2RGB);
// creating a Context container with a binary image
auto imgCtx = service->createContext();
pbio::context_utils::putImage(imgCtx, input_image.data, input_image.rows, input_image.cols, pbio::IRawImage::FORMAT_RGB, true);
// creating the input Context
auto ioData = service->createContext();
ioData["image"] = imgCtx;
// Processing Block call
processing_block(ioData);input_image_path = "{path_to_image}"
image = cv2.imread(input_image_path, cv2.IMREAD_COLOR)
input_image = cv2.cvtColor(image, cv2.COLOR_BGR2RGB)
# creating a Context container with a binary image
imgDict = {
"blob": input_image.tobytes(),
"dtype": "uint8_t",
"format": "NDARRAY",
"shape": [dim for dim in input_image.shape]
}
imgCtx = service.create_context(imgDict)
# creating the input Context
ioData = service.create_context({})
ioData["image"] = imgCtx;
# Processing Block call
processing_block(ioData);File file = File(imagePath);
final Uint8List bytes = await file.readAsBytes();
final ImageDescriptor descriptor = await ImageDescriptor.encoded(await ImmutableBuffer.fromUint8List(bytes));
Map<String, dynamic> imageContext = {
"blob": bytes,
"dtype": "uint8_t",
"format": "NDARRAY",
"shape": [descriptor.height, descriptor.width, 3]
};
// creating the input Context
ioData = service.createContext({
"objects": [],
"image": {
"blob": {byte array with RGB image},
"dtype": "uint8_t",
"format": "NDARRAY",
"shape": [{image height in px}, {image width in px}, 3]
}
});
// Processing Block call
processing_block.process(ioData);string inputImagePath = "<path_to_image>"
Mat image = Cv2.ImRead(inputImagePath);
Mat inputImage = new();
Cv2.CvtColor(image, inputImage, ColorConversionCodes.BGR2RGB);
// creating a Context container with a binary image
Dictionary<int, string> CvTypeToStr = new()
{
{ MatType.CV_8U, "uint8_t" }, { MatType.CV_8S, "int8_t" },
{ MatType.CV_16U, "uint16_t" }, { MatType.CV_16S, "int16_t" },
{ MatType.CV_32S, "int32_t" }, { MatType.CV_32F, "float" }, { MatType.CV_64F, "double" }
};
Dictionary<object, object> imgDict = new();
var inputImage = img.IsContinuous() ? img : img.Clone();
long size = inputImage.Total() * inputImage.ElemSize();
List<object> sizes = [];
byte[] buffer = new byte[size];
using (Mat temp = new(inputImage.Rows, inputImage.Cols, inputImage.Type(), buffer))
{
inputImage.CopyTo(temp);
}
for (int i = 0; i < inputImage.Dims; ++i)
{
sizes.Add(inputImage.Size(i));
}
sizes.Add(inputImage.Channels());
imgDict["blob"] = buffer;
imgDict["format"] = "NDARRAY";
imgDict["shape"] = sizes;
imgDict["dtype"] = CvTypeToStr[inputImage.Depth()];
// creating the input Context
Context ioData = service.CreateContext
(
new Dictionary<object, object>
{
{ "image", imageContext }
}
);
// Processing Block call
processingBlock.Invoke(ioData);final String inputImagePath = "<path_to_image>";
final File file = new File(inputImagePath);
final BufferedImage img = ImageIO.read(file);
// creating the input Context
Context ioData = service.createContext();
// creating a Context container with a binary image
Context imgCtx = ioData.get("image");
final byte[] pixels = ((DataBufferByte) img.getRaster().getDataBuffer()).getData();
imgCtx.get("format").setString("NDARRAY");
imgCtx.get("dtype").setString("uint8_t");
imgCtx.get("blob").setDataBytes(pixels);
Context shapeImgCtx = imgCtx.get("shape");
shapeImgCtx.pushBack(img.getHeight());
shapeImgCtx.pushBack(img.getWidth());
shapeImgCtx.pushBack(3l);
// Processing Block call
processing_block.process(ioData);
GPU acceleration
Processing Blocks can be used with GPU acceleration (CUDA). To activate acceleration you need to define the
["use_cuda"]
key with the true
value for Processing Block configuration container. To start processing blocks on cuda-10.1, it is necessary to define the key "use_legacy"
with the value true
for the Context container of the Processing Block. The system requirements are available here.