-
Notifications
You must be signed in to change notification settings - Fork 2.8k
[Dynamic batch] Investigate refactoring opportunities for batch management in Plugin and Compiler #31691
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
[Dynamic batch] Investigate refactoring opportunities for batch management in Plugin and Compiler #31691
Changes from 3 commits
01b7450
365846f
d299f5c
21eb1ef
528435d
2ba4a52
55fb7d7
a2f5de8
9349a91
2e9f5d7
2653c3f
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -506,9 +506,27 @@ ov::Any Plugin::get_property(const std::string& name, const ov::AnyMap& argument | |
| return _properties->get_property(name, arguments); | ||
| } | ||
|
|
||
| std::shared_ptr<ov::Model> deBatchModel(std::shared_ptr<ov::Model>& model) { | ||
| size_t inputIdx = 0; | ||
| std::map<std::string, ov::PartialShape> newShapes; | ||
| for (auto&& item : model->get_parameters()) { | ||
| auto layout = item->get_layout(); | ||
| auto partShape = item->get_partial_shape(); | ||
| if (ov::layout::has_batch(layout)) { | ||
| partShape[ov::layout::batch_idx(layout)] = 1; | ||
| } | ||
| newShapes.emplace(item->get_friendly_name(), partShape); | ||
| inputIdx++; | ||
| } | ||
| model->reshape(newShapes); | ||
|
|
||
| return model; | ||
| } | ||
|
|
||
| std::shared_ptr<ov::ICompiledModel> Plugin::compile_model(const std::shared_ptr<const ov::Model>& model, | ||
| const ov::AnyMap& properties) const { | ||
| OV_ITT_SCOPED_TASK(itt::domains::NPUPlugin, "Plugin::compile_model"); | ||
| auto modelForCompilation = model->clone(); | ||
|
|
||
| // Before going any further: if | ||
| // ... 1 - NPUW mode is activated | ||
|
|
@@ -556,6 +574,27 @@ std::shared_ptr<ov::ICompiledModel> Plugin::compile_model(const std::shared_ptr< | |
| localConfig.update({{ov::intel_npu::batch_mode.name(), strStream.str()}}); | ||
| } | ||
|
|
||
| ov::Dimension originalBatch; | ||
| bool modelDeBached = false; | ||
| if (localConfig.isAvailable(ov::intel_npu::batch_mode.name()) && | ||
| localConfig.get<BATCH_MODE>() == ov::intel_npu::BatchMode::PLUGIN && model->is_dynamic()) { | ||
| try { | ||
| originalBatch = ov::get_batch(modelForCompilation); | ||
| ov::set_batch(modelForCompilation, 1); | ||
| modelDeBached = true; | ||
| } catch (const std::exception& ex) { | ||
| _logger.warning("The plugin couldn't resize a batched model due to exception: {0}.\nProbably, the " | ||
| "model is a dynamic model and layout hasn't been specified. Trying to debatch it...", | ||
| ex.what()); | ||
| modelForCompilation = deBatchModel(modelForCompilation); | ||
| if (!modelForCompilation) { | ||
| OPENVINO_THROW("Cannot debatch a model"); | ||
| } | ||
| _logger.info("The model has been debatched successfully"); | ||
| modelDeBached = true; | ||
| } | ||
| } | ||
DariaMityagina marked this conversation as resolved.
Show resolved
Hide resolved
|
||
|
|
||
| if (localConfig.isAvailable(ov::intel_npu::batch_mode.name()) && !model->get_variables().empty()) { | ||
| if (localConfig.get<BATCH_MODE>() == ov::intel_npu::BatchMode::PLUGIN) { | ||
| OPENVINO_THROW("This model contains states, thus it is not supported when handling batching on the plugin"); | ||
|
|
@@ -614,10 +653,10 @@ std::shared_ptr<ov::ICompiledModel> Plugin::compile_model(const std::shared_ptr< | |
| _logger.debug("performing compile"); | ||
|
|
||
| if (!localConfig.get<WEIGHTLESS_BLOB>()) { | ||
| graph = compiler->compile(model->clone(), localConfig); | ||
| graph = compiler->compile(modelForCompilation->clone(), localConfig); | ||
| } else { | ||
| check_weightless_cache_attribute_occurrence(model); | ||
| graph = compiler->compileWS(model->clone(), localConfig); | ||
| graph = compiler->compileWS(modelForCompilation->clone(), localConfig); | ||
| } | ||
| } catch (const std::exception& ex) { | ||
| OPENVINO_THROW(ex.what()); | ||
|
|
@@ -626,6 +665,16 @@ std::shared_ptr<ov::ICompiledModel> Plugin::compile_model(const std::shared_ptr< | |
| OPENVINO_THROW("NPU plugin: got an unexpected exception from compiler"); | ||
| } | ||
|
|
||
| if (modelDeBached) { | ||
| auto metadata = graph->get_metadata(); | ||
| for (auto& in : metadata.inputs) { | ||
| if (in.shapeFromIRModel.has_value() && originalBatch.get_max_length() != 1) { | ||
| in.shapeFromIRModel.value()[0] = originalBatch; | ||
|
||
| } | ||
| } | ||
| graph->set_metadata(metadata); | ||
|
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. I'd propose to extend The purpose of adding this layout is to let user specify it so that we will stick to it instead of speculate with BATCH_AXIS position which is not equal to 0 in the generic case as we had ensured in previous PRs |
||
| } | ||
|
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. This section is necessary to preserve the original batch information. After reshaping the model in lines 660-676 and compiling it in line 736, the metadata will reflect There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Points to consider: Is it possible to avoid altering the metadata? Can we eliminate dependence on it when dealing with dynamic batch scenarios? |
||
|
|
||
| std::shared_ptr<ov::ICompiledModel> compiledModel; | ||
| try { | ||
| compiledModel = std::make_shared<CompiledModel>(model, shared_from_this(), device, graph, localConfig); | ||
|
|
||
Uh oh!
There was an error while loading. Please reload this page.