Skip to content

Commit

Permalink
apply code-format changes
Browse files Browse the repository at this point in the history
  • Loading branch information
nihui authored and github-actions[bot] committed Oct 15, 2024
1 parent 8113cb8 commit 6e765a2
Showing 1 changed file with 3 additions and 3 deletions.
6 changes: 3 additions & 3 deletions src/layer/x86/multiheadattention_x86.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@ int MultiHeadAttention_x86::create_pipeline(const Option& _opt)
{
support_packing = false;

opt.use_packing_layout = false;// TODO enable packing
opt.use_packing_layout = false; // TODO enable packing
}

{
Expand Down Expand Up @@ -257,7 +257,7 @@ int MultiHeadAttention_x86::destroy_pipeline(const Option& _opt)
Option opt = _opt;
if (int8_scale_term)
{
opt.use_packing_layout = false;// TODO enable packing
opt.use_packing_layout = false; // TODO enable packing
}

if (qk_softmax)
Expand Down Expand Up @@ -321,7 +321,7 @@ int MultiHeadAttention_x86::forward(const std::vector<Mat>& bottom_blobs, std::v
Option opt = _opt;
if (int8_scale_term)
{
opt.use_packing_layout = false;// TODO enable packing
opt.use_packing_layout = false; // TODO enable packing
}

Mat attn_mask_blob_unpacked;
Expand Down

0 comments on commit 6e765a2

Please sign in to comment.