Skip to content

Commit 700c863

Browse files
committed
Adapt object_detection_demo for batch processing
1 parent 3128d4f commit 700c863

File tree

7 files changed

+207
-59
lines changed

7 files changed

+207
-59
lines changed

demos/common/cpp/models/include/models/detection_model_ssd.h

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -50,6 +50,9 @@ class ModelSSD : public DetectionModel {
5050
const std::string& layout = "");
5151

5252
std::shared_ptr<InternalModelData> preprocess(const InputData& inputData, ov::InferRequest& request) override;
53+
std::shared_ptr<InternalModelData> preprocess(std::vector<std::shared_ptr<InputData>>::iterator inputDataBegin,
54+
std::vector<std::shared_ptr<InputData>>::iterator inputDataEnd,
55+
ov::InferRequest& request) override;
5356
std::unique_ptr<ResultBase> postprocess(InferenceResult& infResult) override;
5457

5558
protected:

demos/common/cpp/models/src/classification_model.cpp

Lines changed: 0 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -44,27 +44,16 @@ ClassificationModel::ClassificationModel(const std::string& modelFileName,
4444

4545
std::unique_ptr<ResultBase> ClassificationModel::postprocess(InferenceResult& infResult) {
4646
const ov::Tensor& indicesTensor = infResult.outputsData.find(outputsNames[0])->second;
47-
const void* indicesTensorBuffer = reinterpret_cast<const void*>(indicesTensor.data());
48-
std::cout << "-S- indices tensor data: " << indicesTensorBuffer << ", size: " << indicesTensor.get_size() << std::endl;
4947
const int* indicesPtr = indicesTensor.data<int>();
50-
for (int i = 0; i < indicesTensor.get_size(); i++){
51-
std::cout << "-S- index[" << i << "]: " << indicesPtr[i] <<std::endl;
52-
}
5348
const ov::Tensor& scoresTensor = infResult.outputsData.find(outputsNames[1])->second;
5449
const float* scoresPtr = scoresTensor.data<float>();
55-
const void* scoresTensorBuffer = reinterpret_cast<const void*>(scoresTensor.data());
56-
std::cout << "-S- scores tensor data: " << scoresTensorBuffer << ", size: " << scoresTensor.get_size() <<std::endl;
57-
for (int i = 0; i < scoresTensor.get_size(); i++){
58-
std::cout << "-S- score[" << i << "]: " << scoresPtr[i] <<std::endl;
59-
}
6050

6151
ClassificationResult* result = new ClassificationResult(infResult.frameId, infResult.metaData);
6252
auto retVal = std::unique_ptr<ResultBase>(result);
6353

6454
result->topLabels.reserve(scoresTensor.get_size());
6555
for (size_t i = 0; i < scoresTensor.get_size(); ++i) {
6656
int ind = indicesPtr[i];
67-
std::cout << "-S- index???[" << i << "]: " << ind << ", labels size: " << labels.size() <<std::endl;
6857
if (ind < 0 || ind >= static_cast<int>(labels.size())) {
6958
throw std::runtime_error(std::string("Invalid index: ") + std::to_string(ind) + " for the class label is found during postprocessing, label size: " + std::to_string(labels.size()));
7059
}

demos/common/cpp/models/src/detection_model_ssd.cpp

Lines changed: 15 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -53,6 +53,21 @@ std::shared_ptr<InternalModelData> ModelSSD::preprocess(const InputData& inputDa
5353
return DetectionModel::preprocess(inputData, request);
5454
}
5555

56+
std::shared_ptr<InternalModelData> ModelSSD::preprocess(std::vector<std::shared_ptr<InputData>>::iterator inputDataBegin,
57+
std::vector<std::shared_ptr<InputData>>::iterator inputDataEnd,
58+
ov::InferRequest& request) {
59+
if (inputsNames.size() > 1) {
60+
const auto& imageInfoTensor = request.get_tensor(inputsNames[1]);
61+
const auto info = imageInfoTensor.data<float>();
62+
info[0] = static_cast<float>(netInputHeight);
63+
info[1] = static_cast<float>(netInputWidth);
64+
info[2] = 1;
65+
request.set_tensor(inputsNames[1], imageInfoTensor);
66+
}
67+
68+
return DetectionModel::preprocess(inputDataBegin, inputDataEnd, request);
69+
}
70+
5671
std::unique_ptr<ResultBase> ModelSSD::postprocess(InferenceResult& infResult) {
5772
return outputsNames.size() > 1 ? postprocessMultipleOutputs(infResult) : postprocessSingleOutput(infResult);
5873
}

demos/common/cpp/models/src/image_model.cpp

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -43,12 +43,15 @@ std::shared_ptr<InternalModelData> ImageModel::preprocess(std::vector<std::share
4343
const size_t width = tensorShape[ov::layout::width_idx(layout)];
4444
const size_t height = tensorShape[ov::layout::height_idx(layout)];
4545
const size_t channels = tensorShape[ov::layout::channels_idx(layout)];
46-
std::cout << "ImageModel::preprocess: batch: " << batch << ", width: " << width << ", height: " << height << ", channels: " << channels << std::endl;
4746
char* memoryBlob = nullptr;
4847
size_t image_index = 0;
4948
bool isMatFloat = false;
5049
int origImg_cols = 0;
5150
int origImg_rows = 0;
51+
size_t image_count = std::distance(inputDataBegin, inputDataEnd);
52+
if (image_count != batch) {
53+
throw std::runtime_error("Image count in preprocess must repeat batch count");
54+
}
5255
for (auto inputDataIt = inputDataBegin; inputDataIt != inputDataEnd; ++inputDataIt ) {
5356
const auto& origImg = (*inputDataIt)->asRef<ImageInputData>().inputImage;
5457
origImg_cols = origImg.cols;
@@ -75,7 +78,6 @@ std::shared_ptr<InternalModelData> ImageModel::preprocess(std::vector<std::share
7578
img = resizeImageExt(img, width, height, resizeMode, interpolationMode);
7679
}
7780
size_t sizeInBytes = img.total() * img.elemSize();
78-
std::cout << "image size in bytes: " << sizeInBytes << std::endl;
7981
if (!memoryBlob) {
8082
memoryBlob = new char[sizeInBytes * batch]; // intended memory leak
8183
}
@@ -85,7 +87,6 @@ std::shared_ptr<InternalModelData> ImageModel::preprocess(std::vector<std::share
8587
image_index++;
8688
}
8789

88-
std::cout << "isMatFloat: " << isMatFloat << std::endl;
8990
auto precision = isMatFloat ? ov::element::f32 : ov::element::u8;
9091
auto batched_tensor = ov::Tensor(precision, ov::Shape{ batch, height, width, channels }, memoryBlob);
9192
request.set_tensor(inputsNames[0], batched_tensor);

demos/common/cpp/pipelines/include/pipelines/metadata.h

Lines changed: 25 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -40,6 +40,31 @@ struct ImageMetaData : public MetaData {
4040
ImageMetaData(cv::Mat img, std::chrono::steady_clock::time_point timeStamp) : img(img), timeStamp(timeStamp) {}
4141
};
4242

43+
struct ImageBatchMetaData : public MetaData {
44+
std::chrono::steady_clock::time_point timeStamp;
45+
std::vector<std::shared_ptr<ImageMetaData>> metadatas;
46+
47+
ImageBatchMetaData() {}
48+
49+
ImageBatchMetaData(std::vector<cv::Mat>::iterator imagesBeginIt,
50+
const std::vector<cv::Mat>::iterator imagesEndIt,
51+
std::chrono::steady_clock::time_point timeStamp) : timeStamp(timeStamp) {
52+
size_t images_count = std::distance(imagesBeginIt, imagesEndIt);
53+
metadatas.reserve(images_count);
54+
for (; imagesBeginIt != imagesEndIt;) {
55+
metadatas.push_back(std::make_shared<ImageMetaData>(*imagesBeginIt++, timeStamp));
56+
}
57+
}
58+
59+
void add(cv::Mat img, std::chrono::steady_clock::time_point timeStamp) {
60+
metadatas.push_back(std::make_shared<ImageMetaData>(img, timeStamp));
61+
this->timeStamp = timeStamp;
62+
}
63+
void clear() {
64+
metadatas.clear();
65+
}
66+
};
67+
4368
struct ClassificationImageMetaData : public ImageMetaData {
4469
unsigned int groundTruthId;
4570

demos/common/cpp/pipelines/src/async_pipeline.cpp

Lines changed: 0 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -95,7 +95,6 @@ int64_t AsyncPipeline::submitData(std::vector<std::shared_ptr<InputData>>::itera
9595
{
9696
const std::lock_guard<std::mutex> lock(mtx);
9797
inferenceMetrics.update(startTime);
98-
std::cout << "callback has been called" << std::endl;
9998
try {
10099
if (ex) {
101100
std::rethrow_exception(ex);
@@ -108,14 +107,11 @@ int64_t AsyncPipeline::submitData(std::vector<std::shared_ptr<InputData>>::itera
108107

109108
for (const auto& outName : model->getOutputsNames()) {
110109
auto tensor = request.get_tensor(outName);
111-
std::cout << "-S- output tensorName: " << outName << ", tensor ptr: " << reinterpret_cast<void*>(tensor.data()) << ", size: " << tensor.get_size() << std::endl;
112110
result.outputsData.emplace(outName, tensor);
113111
}
114112

115113
completedInferenceResults.emplace(frameID, result);
116-
std::cout << "before setRequestIdle: " << std::endl;
117114
requestsPool->setRequestIdle(request);
118-
std::cout << "after setRequestIdle: " << std::endl;
119115
} catch (...) {
120116
if (!callbackException) {
121117
callbackException = std::current_exception();

0 commit comments

Comments
 (0)