Skip to content

Commit 3128d4f

Browse files
committed
stash
1 parent 55f5d43 commit 3128d4f

File tree

3 files changed

+16
-2
lines changed

3 files changed

+16
-2
lines changed

demos/common/cpp/models/src/classification_model.cpp

Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -44,16 +44,27 @@ ClassificationModel::ClassificationModel(const std::string& modelFileName,
4444

4545
std::unique_ptr<ResultBase> ClassificationModel::postprocess(InferenceResult& infResult) {
4646
const ov::Tensor& indicesTensor = infResult.outputsData.find(outputsNames[0])->second;
47+
const void* indicesTensorBuffer = reinterpret_cast<const void*>(indicesTensor.data());
48+
std::cout << "-S- indices tensor data: " << indicesTensorBuffer << ", size: " << indicesTensor.get_size() << std::endl;
4749
const int* indicesPtr = indicesTensor.data<int>();
50+
for (int i = 0; i < indicesTensor.get_size(); i++){
51+
std::cout << "-S- index[" << i << "]: " << indicesPtr[i] <<std::endl;
52+
}
4853
const ov::Tensor& scoresTensor = infResult.outputsData.find(outputsNames[1])->second;
4954
const float* scoresPtr = scoresTensor.data<float>();
55+
const void* scoresTensorBuffer = reinterpret_cast<const void*>(scoresTensor.data());
56+
std::cout << "-S- scores tensor data: " << scoresTensorBuffer << ", size: " << scoresTensor.get_size() <<std::endl;
57+
for (int i = 0; i < scoresTensor.get_size(); i++){
58+
std::cout << "-S- score[" << i << "]: " << scoresPtr[i] <<std::endl;
59+
}
5060

5161
ClassificationResult* result = new ClassificationResult(infResult.frameId, infResult.metaData);
5262
auto retVal = std::unique_ptr<ResultBase>(result);
5363

5464
result->topLabels.reserve(scoresTensor.get_size());
5565
for (size_t i = 0; i < scoresTensor.get_size(); ++i) {
5666
int ind = indicesPtr[i];
67+
std::cout << "-S- index???[" << i << "]: " << ind << ", labels size: " << labels.size() <<std::endl;
5768
if (ind < 0 || ind >= static_cast<int>(labels.size())) {
5869
throw std::runtime_error(std::string("Invalid index: ") + std::to_string(ind) + " for the class label is found during postprocessing, label size: " + std::to_string(labels.size()));
5970
}

demos/common/cpp/models/src/image_model.cpp

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -43,7 +43,7 @@ std::shared_ptr<InternalModelData> ImageModel::preprocess(std::vector<std::share
4343
const size_t width = tensorShape[ov::layout::width_idx(layout)];
4444
const size_t height = tensorShape[ov::layout::height_idx(layout)];
4545
const size_t channels = tensorShape[ov::layout::channels_idx(layout)];
46-
46+
std::cout << "ImageModel::preprocess: batch: " << batch << ", width: " << width << ", height: " << height << ", channels: " << channels << std::endl;
4747
char* memoryBlob = nullptr;
4848
size_t image_index = 0;
4949
bool isMatFloat = false;
@@ -75,6 +75,7 @@ std::shared_ptr<InternalModelData> ImageModel::preprocess(std::vector<std::share
7575
img = resizeImageExt(img, width, height, resizeMode, interpolationMode);
7676
}
7777
size_t sizeInBytes = img.total() * img.elemSize();
78+
std::cout << "image size in bytes: " << sizeInBytes << std::endl;
7879
if (!memoryBlob) {
7980
memoryBlob = new char[sizeInBytes * batch]; // intended memory leak
8081
}
@@ -84,9 +85,10 @@ std::shared_ptr<InternalModelData> ImageModel::preprocess(std::vector<std::share
8485
image_index++;
8586
}
8687

88+
std::cout << "isMatFloat: " << isMatFloat << std::endl;
8789
auto precision = isMatFloat ? ov::element::f32 : ov::element::u8;
8890
auto batched_tensor = ov::Tensor(precision, ov::Shape{ batch, height, width, channels }, memoryBlob);
89-
request.set_tensor(inputsNames[0],batched_tensor);
91+
request.set_tensor(inputsNames[0], batched_tensor);
9092
return std::make_shared<InternalImageModelData>(origImg_cols, origImg_rows);
9193
}
9294

demos/common/cpp/pipelines/src/async_pipeline.cpp

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -108,6 +108,7 @@ int64_t AsyncPipeline::submitData(std::vector<std::shared_ptr<InputData>>::itera
108108

109109
for (const auto& outName : model->getOutputsNames()) {
110110
auto tensor = request.get_tensor(outName);
111+
std::cout << "-S- output tensorName: " << outName << ", tensor ptr: " << reinterpret_cast<void*>(tensor.data()) << ", size: " << tensor.get_size() << std::endl;
111112
result.outputsData.emplace(outName, tensor);
112113
}
113114

0 commit comments

Comments
 (0)