fix: s3 upload exceeded total allowed configured MaxUploadParts (close #5909)

pull/5910/head^2
Andy Hsu 2024-01-19 12:05:10 +08:00
parent 442c2f77ea
commit 0f29a811bf
7 changed files with 25 additions and 5 deletions

View File

@ -6,6 +6,10 @@ import (
"encoding/base64" "encoding/base64"
"encoding/hex" "encoding/hex"
"fmt" "fmt"
"io"
"net/http"
"net/url"
"github.com/alist-org/alist/v3/drivers/base" "github.com/alist-org/alist/v3/drivers/base"
"github.com/alist-org/alist/v3/internal/driver" "github.com/alist-org/alist/v3/internal/driver"
"github.com/alist-org/alist/v3/internal/errs" "github.com/alist-org/alist/v3/internal/errs"
@ -17,9 +21,6 @@ import (
"github.com/aws/aws-sdk-go/service/s3/s3manager" "github.com/aws/aws-sdk-go/service/s3/s3manager"
"github.com/go-resty/resty/v2" "github.com/go-resty/resty/v2"
log "github.com/sirupsen/logrus" log "github.com/sirupsen/logrus"
"io"
"net/http"
"net/url"
) )
type Pan123 struct { type Pan123 struct {
@ -232,6 +233,9 @@ func (d *Pan123) Put(ctx context.Context, dstDir model.Obj, stream model.FileStr
return err return err
} }
uploader := s3manager.NewUploader(s) uploader := s3manager.NewUploader(s)
if stream.GetSize() > s3manager.MaxUploadParts*s3manager.DefaultUploadPartSize {
uploader.PartSize = stream.GetSize() / (s3manager.MaxUploadParts - 1)
}
input := &s3manager.UploadInput{ input := &s3manager.UploadInput{
Bucket: &resp.Data.Bucket, Bucket: &resp.Data.Bucket,
Key: &resp.Data.Key, Key: &resp.Data.Key,

View File

@ -188,6 +188,9 @@ func (d *MediaTrack) Put(ctx context.Context, dstDir model.Obj, stream model.Fil
_ = tempFile.Close() _ = tempFile.Close()
}() }()
uploader := s3manager.NewUploader(s) uploader := s3manager.NewUploader(s)
if stream.GetSize() > s3manager.MaxUploadParts*s3manager.DefaultUploadPartSize {
uploader.PartSize = stream.GetSize() / (s3manager.MaxUploadParts - 1)
}
input := &s3manager.UploadInput{ input := &s3manager.UploadInput{
Bucket: &resp.Data.Bucket, Bucket: &resp.Data.Bucket,
Key: &resp.Data.Object, Key: &resp.Data.Object,

View File

@ -172,6 +172,9 @@ func (d *PikPak) Put(ctx context.Context, dstDir model.Obj, stream model.FileStr
return err return err
} }
uploader := s3manager.NewUploader(ss) uploader := s3manager.NewUploader(ss)
if stream.GetSize() > s3manager.MaxUploadParts*s3manager.DefaultUploadPartSize {
uploader.PartSize = stream.GetSize() / (s3manager.MaxUploadParts - 1)
}
input := &s3manager.UploadInput{ input := &s3manager.UploadInput{
Bucket: &params.Bucket, Bucket: &params.Bucket,
Key: &params.Key, Key: &params.Key,

View File

@ -311,7 +311,7 @@ func (d *Quqi) Put(ctx context.Context, dstDir model.Obj, stream model.FileStrea
length = stream.GetSize() - (int64(i)-1)*partSize length = stream.GetSize() - (int64(i)-1)*partSize
} }
_, err := client.Object.UploadPart( _, err := client.Object.UploadPart(
context.Background(), uploadInitResp.Data.Key, uploadInitResp.Data.UploadID, i, io.LimitReader(f, partSize), &cos.ObjectUploadPartOptions{ ctx, uploadInitResp.Data.Key, uploadInitResp.Data.UploadID, i, io.LimitReader(f, partSize), &cos.ObjectUploadPartOptions{
ContentLength: length, ContentLength: length,
}, },
) )

View File

@ -244,6 +244,9 @@ func (d *Teambition) newUpload(ctx context.Context, dstDir model.Obj, stream mod
return err return err
} }
uploader := s3manager.NewUploader(ss) uploader := s3manager.NewUploader(ss)
if stream.GetSize() > s3manager.MaxUploadParts*s3manager.DefaultUploadPartSize {
uploader.PartSize = stream.GetSize() / (s3manager.MaxUploadParts - 1)
}
input := &s3manager.UploadInput{ input := &s3manager.UploadInput{
Bucket: &uploadToken.Upload.Bucket, Bucket: &uploadToken.Upload.Bucket,
Key: &uploadToken.Upload.Key, Key: &uploadToken.Upload.Key,

View File

@ -373,7 +373,11 @@ func (xc *XunLeiCommon) Put(ctx context.Context, dstDir model.Obj, stream model.
if err != nil { if err != nil {
return err return err
} }
_, err = s3manager.NewUploader(s).UploadWithContext(ctx, &s3manager.UploadInput{ uploader := s3manager.NewUploader(s)
if stream.GetSize() > s3manager.MaxUploadParts*s3manager.DefaultUploadPartSize {
uploader.PartSize = stream.GetSize() / (s3manager.MaxUploadParts - 1)
}
_, err = uploader.UploadWithContext(ctx, &s3manager.UploadInput{
Bucket: aws.String(param.Bucket), Bucket: aws.String(param.Bucket),
Key: aws.String(param.Key), Key: aws.String(param.Key),
Expires: aws.Time(param.Expiration), Expires: aws.Time(param.Expiration),

View File

@ -272,6 +272,9 @@ func (d *Vtencent) FileUpload(ctx context.Context, dstDir model.Obj, stream mode
return err return err
} }
uploader := s3manager.NewUploader(ss) uploader := s3manager.NewUploader(ss)
if stream.GetSize() > s3manager.MaxUploadParts*s3manager.DefaultUploadPartSize {
uploader.PartSize = stream.GetSize() / (s3manager.MaxUploadParts - 1)
}
input := &s3manager.UploadInput{ input := &s3manager.UploadInput{
Bucket: aws.String(fmt.Sprintf("%s-%d", params.StorageBucket, params.StorageAppID)), Bucket: aws.String(fmt.Sprintf("%s-%d", params.StorageBucket, params.StorageAppID)),
Key: &params.Video.StoragePath, Key: &params.Video.StoragePath,