diff --git a/models/cv/classification/efficientnet_b1/ixrt/README.md b/models/cv/classification/efficientnet_b1/ixrt/README.md index 9d6cbe934c101a963d1170ad0d7c423f246d8022..c943c82133745543c0682e4222d9ab20383d0164 100644 --- a/models/cv/classification/efficientnet_b1/ixrt/README.md +++ b/models/cv/classification/efficientnet_b1/ixrt/README.md @@ -15,7 +15,7 @@ EfficientNet B1 is one of the variants in the EfficientNet family of neural netw ### Prepare Resources -Pretrained model: +Pretrained model: Dataset: to download the validation dataset. @@ -34,8 +34,7 @@ pip3 install -r ../../ixrt_common/requirements.txt ### Model Conversion ```bash -mkdir checkpoints -python3 ../../ixrt_common/export.py --model-name efficientnet_b1 --weight efficientnet_b1-c27df63c.pth --output checkpoints/efficientnet_b1.onnx +mkdir -p checkpoints && wget http://files.deepspark.org.cn:880/deepspark/data/checkpoints/efficientnet_b1.onnx checkpoints/efficientnet_b1.onnx ``` ## Model Inference diff --git a/models/cv/classification/ixrt_common/modify_batchsize.py b/models/cv/classification/ixrt_common/modify_batchsize.py index fbd3be3f213030aaad20ee48a44f206118813c45..c55dd2467c8977ff973f7dca7351cf8a0a02c54e 100644 --- a/models/cv/classification/ixrt_common/modify_batchsize.py +++ b/models/cv/classification/ixrt_common/modify_batchsize.py @@ -1,5 +1,6 @@ import onnx import argparse +import numpy as np def change_input_dim(model, bsz): batch_size = bsz @@ -23,6 +24,23 @@ def change_input_dim(model, bsz): # set batch size of 1 dim1.dim_value = 1 +def change_reshape_batch(model, bsz): + batch_size = int(bsz) if not isinstance(bsz, int) else bsz + initializer_map = {init.name: init for init in model.graph.initializer} + for node in model.graph.node: + if node.op_type == 'Reshape' and len(node.input) >= 2: + shape_name = node.input[1] + if shape_name not in initializer_map: + continue + init = initializer_map[shape_name] + shape_val = np.array(onnx.numpy_helper.to_array(init)) + if len(shape_val) >= 1 and shape_val[0] > 0 and shape_val[0] != batch_size: + old_val = shape_val[0] + shape_val[0] = batch_size + new_init = onnx.numpy_helper.from_array(shape_val, name=shape_name) + init.CopyFrom(new_init) + print(f" Reshape {node.name}: shape[0] {old_val} -> {batch_size}") + def infer_node_shape(model): # remove old shape of the node for value_info in model.graph.value_info: @@ -54,8 +72,9 @@ if __name__ == "__main__": args = parse_args() model = onnx.load(args.origin_model) change_input_dim(model, args.batch_size) + change_reshape_batch(model, args.batch_size) if args.strict_mode: model = infer_node_shape(model) - onnx.save(model, args.output_model) + onnx.save(model, args.output_model) \ No newline at end of file