Skip to content

Commit

Permalink
Merge pull request #17 from mazrean/doc/add-next-part-benchmark
Browse files Browse the repository at this point in the history
add NextPart benchmark
  • Loading branch information
mazrean authored Mar 30, 2024
2 parents 0b5ca3b + ac143d5 commit 5335f06
Show file tree
Hide file tree
Showing 3 changed files with 95 additions and 6 deletions.
84 changes: 82 additions & 2 deletions formstream_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@ package formstream_test

import (
"bytes"
"errors"
"fmt"
"io"
"log"
Expand Down Expand Up @@ -42,7 +43,7 @@ large file contents
}

return nil
})
}, formstream.WithRequiredPart("field"))
if err != nil {
log.Fatal(err)
}
Expand Down Expand Up @@ -209,7 +210,7 @@ func benchmarkFormStream(b *testing.B, fileSize formstream.DataSize, reverse boo

parser := formstream.NewParser(boundary)

err = parser.Register("stream", func(r io.Reader, header formstream.Header) error {
err = parser.Register("stream", func(r io.Reader, _ formstream.Header) error {
// get field value
_, _, _ = parser.Value("field")

Expand All @@ -231,6 +232,85 @@ func benchmarkFormStream(b *testing.B, fileSize formstream.DataSize, reverse boo
}
}

func BenchmarkStdMultipartNextPart(b *testing.B) {
b.Run("1MB", func(b *testing.B) {
benchmarkStdMultipartNextPart(b, 1*formstream.MB)
})
b.Run("10MB", func(b *testing.B) {
benchmarkStdMultipartNextPart(b, 10*formstream.MB)
})
b.Run("100MB", func(b *testing.B) {
benchmarkStdMultipartNextPart(b, 100*formstream.MB)
})
b.Run("1GB", func(b *testing.B) {
benchmarkStdMultipartNextPart(b, 1*formstream.GB)
})
b.Run("5GB", func(b *testing.B) {
if testing.Short() {
b.Skip("skipping test in short mode.")
}
benchmarkStdMultipartNextPart(b, 5*formstream.GB)
})
b.Run("10GB", func(b *testing.B) {
if testing.Short() {
b.Skip("skipping test in short mode.")
}
benchmarkStdMultipartNextPart(b, 10*formstream.GB)
})
}

func benchmarkStdMultipartNextPart(b *testing.B, fileSize formstream.DataSize) {
r, err := sampleForm(fileSize, boundary, false)
if err != nil {
b.Fatal(err)
}
defer r.Close()

b.ResetTimer()
for i := 0; i < b.N; i++ {
b.StopTimer()
_, err := r.Seek(0, io.SeekStart)
if err != nil {
b.Fatal(err)
}
b.StartTimer()

func() {
mr := multipart.NewReader(r, boundary)

for {
p, err := mr.NextPart()
if errors.Is(err, io.EOF) {
break
}
if err != nil {
b.Fatal(err)
}

if p.FormName() == "field" {
sb := &strings.Builder{}
_, err := io.Copy(sb, p)
if err != nil {
b.Fatal(err)
}

_ = sb.String()
} else {
_, err := io.Copy(io.Discard, p)
if err != nil {
b.Fatal(err)
}
}

_, err = io.Copy(io.Discard, p)
if err != nil {
b.Fatal(err)
}
}
}()
}
}

func BenchmarkStdMultipartReadForm(b *testing.B) {
b.Run("1MB", func(b *testing.B) {
benchmarkStdMultipartReadForm(b, 1*formstream.MB)
Expand Down
4 changes: 2 additions & 2 deletions http/parser_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,7 @@ func createUserHandler(res http.ResponseWriter, req *http.Request) {
return
}

err = parser.Register("icon", func(r io.Reader, header formstream.Header) error {
err = parser.Register("icon", func(r io.Reader, _ formstream.Header) error {
name, _, _ := parser.Value("name")
password, _, _ := parser.Value("password")

Expand Down Expand Up @@ -317,7 +317,7 @@ func benchmarkFormStream(b *testing.B, fileSize formstream.DataSize, reverse boo
return
}

err = parser.Register("stream", func(r io.Reader, header formstream.Header) error {
err = parser.Register("stream", func(r io.Reader, _ formstream.Header) error {
// get field value
_, _, _ = parser.Value("field")

Expand Down
13 changes: 11 additions & 2 deletions scripts/graph.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,13 @@ def parse_file_size(mem_size: str):
raise Exception("Unknown memory size: " + mem_size)


group_colors = {
"FormStream(FastPath)": "#ADFF2F",
"FormStream(SlowPath)": "#00FF00",
"std(with NextPart)": "#6495ED",
"std(with ReadForm)": "#D3D3D3",
}

file_sizes = []
time_per_ops = {}
mem_per_ops = {}
Expand All @@ -51,15 +58,16 @@ def parse_file_size(mem_size: str):
# remove duplicates
file_sizes = sorted(set(file_sizes), key=file_sizes.index)

bar_width = 0.25
bar_width = 0.2

index = np.arange(len(file_sizes))
index = np.arange(len(file_sizes)) - 0.1
fig, ax_time = plt.subplots(figsize=(12, 7))
for i, (group, group_time_dict) in enumerate(time_per_ops.items()):
ax_time.bar(
index + i * bar_width,
[group_time_dict[fs] / 1e6 for fs in file_sizes],
bar_width,
color=group_colors[group],
label=group,
)

Expand All @@ -79,6 +87,7 @@ def parse_file_size(mem_size: str):
index + i * bar_width,
[group_mem_dict[fs] for fs in file_sizes],
bar_width,
color=group_colors[group],
label=group,
)

Expand Down

0 comments on commit 5335f06

Please sign in to comment.