perf: use io copy with buffer pool (#6389)

* feat: add io methods with buffer

* chore: move io.Copy calls to utils.CopyWithBuffer
pull/6397/head
Mmx 2024-04-25 20:11:15 +08:00 committed by GitHub
parent ec08ecdf6c
commit b95df1d745
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
22 changed files with 59 additions and 27 deletions

View File

@ -194,7 +194,7 @@ func (d *Pan123) Put(ctx context.Context, dstDir model.Obj, stream model.FileStr
defer func() {
_ = tempFile.Close()
}()
if _, err = io.Copy(h, tempFile); err != nil {
if _, err = utils.CopyWithBuffer(h, tempFile); err != nil {
return err
}
_, err = tempFile.Seek(0, io.SeekStart)

View File

@ -595,7 +595,7 @@ func (y *Cloud189PC) FastUpload(ctx context.Context, dstDir model.Obj, file mode
}
silceMd5.Reset()
if _, err := io.CopyN(io.MultiWriter(fileMd5, silceMd5), tempFile, byteSize); err != nil && err != io.EOF {
if _, err := utils.CopyWithBufferN(io.MultiWriter(fileMd5, silceMd5), tempFile, byteSize); err != nil && err != io.EOF {
return nil, err
}
md5Byte := silceMd5.Sum(nil)

View File

@ -194,7 +194,7 @@ func (d *AliDrive) Put(ctx context.Context, dstDir model.Obj, streamer model.Fil
}
if d.RapidUpload {
buf := bytes.NewBuffer(make([]byte, 0, 1024))
io.CopyN(buf, file, 1024)
utils.CopyWithBufferN(buf, file, 1024)
reqBody["pre_hash"] = utils.HashData(utils.SHA1, buf.Bytes())
if localFile != nil {
if _, err := localFile.Seek(0, io.SeekStart); err != nil {

View File

@ -136,7 +136,7 @@ func (d *AliyundriveOpen) calProofCode(stream model.FileStreamer) (string, error
if err != nil {
return "", err
}
_, err = io.CopyN(buf, reader, length)
_, err = utils.CopyWithBufferN(buf, reader, length)
if err != nil {
return "", err
}

View File

@ -211,7 +211,7 @@ func (d *BaiduNetdisk) Put(ctx context.Context, dstDir model.Obj, stream model.F
if i == count {
byteSize = lastBlockSize
}
_, err := io.CopyN(io.MultiWriter(fileMd5H, sliceMd5H, slicemd5H2Write), tempFile, byteSize)
_, err := utils.CopyWithBufferN(io.MultiWriter(fileMd5H, sliceMd5H, slicemd5H2Write), tempFile, byteSize)
if err != nil && err != io.EOF {
return nil, err
}

View File

@ -261,7 +261,7 @@ func (d *BaiduPhoto) Put(ctx context.Context, dstDir model.Obj, stream model.Fil
if i == count {
byteSize = lastBlockSize
}
_, err := io.CopyN(io.MultiWriter(fileMd5H, sliceMd5H, slicemd5H2Write), tempFile, byteSize)
_, err := utils.CopyWithBufferN(io.MultiWriter(fileMd5H, sliceMd5H, slicemd5H2Write), tempFile, byteSize)
if err != nil && err != io.EOF {
return nil, err
}

View File

@ -229,7 +229,7 @@ func (d *ChaoXing) Put(ctx context.Context, dstDir model.Obj, stream model.FileS
if err != nil {
return err
}
_, err = io.Copy(filePart, stream)
_, err = utils.CopyWithBuffer(filePart, stream)
if err != nil {
return err
}

View File

@ -271,7 +271,7 @@ func (d *ILanZou) Put(ctx context.Context, dstDir model.Obj, stream model.FileSt
defer func() {
_ = tempFile.Close()
}()
if _, err = io.Copy(h, tempFile); err != nil {
if _, err = utils.CopyWithBuffer(h, tempFile); err != nil {
return nil, err
}
_, err = tempFile.Seek(0, io.SeekStart)

View File

@ -206,7 +206,7 @@ func (d *MediaTrack) Put(ctx context.Context, dstDir model.Obj, stream model.Fil
return err
}
h := md5.New()
_, err = io.Copy(h, tempFile)
_, err = utils.CopyWithBuffer(h, tempFile)
if err != nil {
return err
}

View File

@ -4,6 +4,7 @@ import (
"crypto/sha1"
"encoding/hex"
"errors"
"github.com/alist-org/alist/v3/pkg/utils"
"io"
"net/http"
@ -141,7 +142,7 @@ func getGcid(r io.Reader, size int64) (string, error) {
readSize := calcBlockSize(size)
for {
hash2.Reset()
if n, err := io.CopyN(hash2, r, readSize); err != nil && n == 0 {
if n, err := utils.CopyWithBufferN(hash2, r, readSize); err != nil && n == 0 {
if err != io.EOF {
return "", err
}

View File

@ -143,7 +143,7 @@ func (d *QuarkOrUC) Put(ctx context.Context, dstDir model.Obj, stream model.File
_ = tempFile.Close()
}()
m := md5.New()
_, err = io.Copy(m, tempFile)
_, err = utils.CopyWithBuffer(m, tempFile)
if err != nil {
return err
}
@ -153,7 +153,7 @@ func (d *QuarkOrUC) Put(ctx context.Context, dstDir model.Obj, stream model.File
}
md5Str := hex.EncodeToString(m.Sum(nil))
s := sha1.New()
_, err = io.Copy(s, tempFile)
_, err = utils.CopyWithBuffer(s, tempFile)
if err != nil {
return err
}

View File

@ -1,7 +1,7 @@
package smb
import (
"io"
"github.com/alist-org/alist/v3/pkg/utils"
"io/fs"
"net"
"os"
@ -74,7 +74,7 @@ func (d *SMB) CopyFile(src, dst string) error {
}
defer dstfd.Close()
if _, err = io.Copy(dstfd, srcfd); err != nil {
if _, err = utils.CopyWithBuffer(dstfd, srcfd); err != nil {
return err
}
if srcinfo, err = d.fs.Stat(src); err != nil {

View File

@ -190,7 +190,7 @@ func getGcid(r io.Reader, size int64) (string, error) {
readSize := calcBlockSize(size)
for {
hash2.Reset()
if n, err := io.CopyN(hash2, r, readSize); err != nil && n == 0 {
if n, err := utils.CopyWithBufferN(hash2, r, readSize); err != nil && n == 0 {
if err != io.EOF {
return "", err
}

View File

@ -4,6 +4,7 @@ import (
"bytes"
"context"
"fmt"
"github.com/alist-org/alist/v3/pkg/utils"
"io"
"math"
"net/http"
@ -271,7 +272,7 @@ func (d *downloader) tryDownloadChunk(params *HttpRequestParams, ch *chunk) (int
}
}
n, err := io.Copy(ch.buf, resp.Body)
n, err := utils.CopyWithBuffer(ch.buf, resp.Body)
if err != nil {
return n, &errReadingBody{err: err}

View File

@ -162,7 +162,7 @@ func ServeHTTP(w http.ResponseWriter, r *http.Request, name string, modTime time
pw.CloseWithError(err)
return
}
if _, err := io.CopyN(part, reader, ra.Length); err != nil {
if _, err := utils.CopyWithBufferN(part, reader, ra.Length); err != nil {
pw.CloseWithError(err)
return
}
@ -182,7 +182,7 @@ func ServeHTTP(w http.ResponseWriter, r *http.Request, name string, modTime time
w.WriteHeader(code)
if r.Method != "HEAD" {
written, err := io.CopyN(w, sendContent, sendSize)
written, err := utils.CopyWithBufferN(w, sendContent, sendSize)
if err != nil {
log.Warnf("ServeHttp error. err: %s ", err)
if written != sendSize {

View File

@ -2,6 +2,7 @@ package net
import (
"fmt"
"github.com/alist-org/alist/v3/pkg/utils"
"io"
"math"
"mime/multipart"
@ -330,7 +331,7 @@ func GetRangedHttpReader(readCloser io.ReadCloser, offset, length int64) (io.Rea
log.Warnf("offset is more than 100MB, if loading data from internet, high-latency and wasting of bandwidth is expected")
}
if _, err := io.Copy(io.Discard, io.LimitReader(readCloser, offset)); err != nil {
if _, err := utils.CopyWithBuffer(io.Discard, io.LimitReader(readCloser, offset)); err != nil {
return nil, err
}

View File

@ -104,7 +104,7 @@ func (f *FileStream) RangeRead(httpRange http_range.Range) (io.Reader, error) {
if httpRange.Start == 0 && httpRange.Length <= InMemoryBufMaxSizeBytes && f.peekBuff == nil {
bufSize := utils.Min(httpRange.Length, f.GetSize())
newBuf := bytes.NewBuffer(make([]byte, 0, bufSize))
n, err := io.CopyN(newBuf, f.Reader, bufSize)
n, err := utils.CopyWithBufferN(newBuf, f.Reader, bufSize)
if err != nil {
return nil, err
}

View File

@ -4,6 +4,7 @@ import (
"bytes"
"encoding/xml"
"fmt"
"github.com/alist-org/alist/v3/pkg/utils"
"io"
"net/http"
"net/url"
@ -419,7 +420,7 @@ func (c *Client) ReadStreamRange(path string, offset, length int64) (io.ReadClos
// stream in rs.Body
if rs.StatusCode == 200 {
// discard first 'offset' bytes.
if _, err := io.Copy(io.Discard, io.LimitReader(rs.Body, offset)); err != nil {
if _, err := utils.CopyWithBuffer(io.Discard, io.LimitReader(rs.Body, offset)); err != nil {
return nil, newPathErrorErr("ReadStreamRange", path, err)
}

View File

@ -32,7 +32,7 @@ func CopyFile(src, dst string) error {
}
defer dstfd.Close()
if _, err = io.Copy(dstfd, srcfd); err != nil {
if _, err = CopyWithBuffer(dstfd, srcfd); err != nil {
return err
}
if srcinfo, err = os.Stat(src); err != nil {
@ -121,7 +121,7 @@ func CreateTempFile(r io.Reader, size int64) (*os.File, error) {
if err != nil {
return nil, err
}
readBytes, err := io.Copy(f, r)
readBytes, err := CopyWithBuffer(f, r)
if err != nil {
_ = os.Remove(f.Name())
return nil, errs.NewErr(err, "CreateTempFile failed")

View File

@ -96,7 +96,7 @@ func HashData(hashType *HashType, data []byte, params ...any) string {
// HashReader get hash of one hashType from a reader
func HashReader(hashType *HashType, reader io.Reader, params ...any) (string, error) {
h := hashType.NewFunc(params...)
_, err := io.Copy(h, reader)
_, err := CopyWithBuffer(h, reader)
if err != nil {
return "", errs.NewErr(err, "HashReader error")
}

View File

@ -4,7 +4,6 @@ import (
"bytes"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"io"
"testing"
)
@ -36,7 +35,7 @@ var hashTestSet = []hashTest{
func TestMultiHasher(t *testing.T) {
for _, test := range hashTestSet {
mh := NewMultiHasher([]*HashType{MD5, SHA1, SHA256})
n, err := io.Copy(mh, bytes.NewBuffer(test.input))
n, err := CopyWithBuffer(mh, bytes.NewBuffer(test.input))
require.NoError(t, err)
assert.Len(t, test.input, int(n))
hashInfo := mh.GetHashInfo()

View File

@ -6,6 +6,7 @@ import (
"errors"
"fmt"
"io"
"sync"
"time"
"golang.org/x/exp/constraints"
@ -29,7 +30,7 @@ func CopyWithCtx(ctx context.Context, out io.Writer, in io.Reader, size int64, p
// possible in the call process.
var finish int64 = 0
s := size / 100
_, err := io.Copy(out, readerFunc(func(p []byte) (int, error) {
_, err := CopyWithBuffer(out, readerFunc(func(p []byte) (int, error) {
// golang non-blocking channel: https://gobyexample.com/non-blocking-channel-operations
select {
// if context has been canceled
@ -204,3 +205,31 @@ func Max[T constraints.Ordered](a, b T) T {
}
return a
}
var IoBuffPool = &sync.Pool{
New: func() interface{} {
return make([]byte, 32*1024*2) // Two times of size in io package
},
}
func CopyWithBuffer(dst io.Writer, src io.Reader) (written int64, err error) {
buff := IoBuffPool.Get().([]byte)
defer IoBuffPool.Put(buff)
written, err = io.CopyBuffer(dst, src, buff)
if err != nil {
return
}
return written, nil
}
func CopyWithBufferN(dst io.Writer, src io.Reader, n int64) (written int64, err error) {
written, err = CopyWithBuffer(dst, io.LimitReader(src, n))
if written == n {
return n, nil
}
if written < n && err == nil {
// src stopped early; must have been EOF.
err = io.EOF
}
return
}