Skip to content

Commit

Permalink
fix ncnn convdw1d dynamic weight loading
Browse files Browse the repository at this point in the history
  • Loading branch information
nihui committed Mar 27, 2024
1 parent ef84eb0 commit 6c626ac
Show file tree
Hide file tree
Showing 3 changed files with 7 additions and 3 deletions.
3 changes: 3 additions & 0 deletions src/layer/convolutiondepthwise1d.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -59,6 +59,9 @@ int ConvolutionDepthWise1D::load_param(const ParamDict& pd)

int ConvolutionDepthWise1D::load_model(const ModelBin& mb)
{
if (dynamic_weight)
return 0;

weight_data = mb.load(weight_data_size, 0);
if (weight_data.empty())
return -100;
Expand Down
1 change: 1 addition & 0 deletions tools/pnnx/src/pass_level2.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -1235,6 +1235,7 @@ static void functionize(Graph& graph)
op->type = op->type.substr(0, op->type.size() - 1);

// append aten::copy_
if (graph.operands[alias_index]->consumers.size() > 1)
{
Operand* in0 = op->inputs[0];
Operand* out0 = op->outputs[0];
Expand Down
6 changes: 3 additions & 3 deletions tools/pnnx/src/pass_ncnn/F_conv1d.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -103,7 +103,7 @@ pnnx.Output output 1 0 out
std::vector<int> weight_shape = op->inputs[1]->shape;
if (weight_shape.empty())
{
weight_shape = {0, 0, 0, 0};
weight_shape = {0, 0, 0};
}

op->params["0"] = weight_shape[0];
Expand Down Expand Up @@ -158,7 +158,7 @@ pnnx.Output output 1 0 out
std::vector<int> weight_shape = op->inputs[1]->shape;
if (weight_shape.empty())
{
weight_shape = {0, 0, 0, 0};
weight_shape = {0, 0, 0};
}

op->params["0"] = weight_shape[0];
Expand Down Expand Up @@ -215,7 +215,7 @@ pnnx.Output output 1 0 out
std::vector<int> weight_shape = op->inputs[1]->shape;
if (weight_shape.empty())
{
weight_shape = {0, 0, 0, 0};
weight_shape = {0, 0, 0};
}

op->params["0"] = weight_shape[0];
Expand Down

0 comments on commit 6c626ac

Please sign in to comment.