mirror of https://github.com/Xhofe/alist
feat: support cancel for some drivers (close #2717)
parent
e4a88a7c13
commit
105f22969c
|
@ -221,7 +221,7 @@ func (d *Pan123) Put(ctx context.Context, dstDir model.Obj, stream model.FileStr
|
||||||
}
|
}
|
||||||
var resp UploadResp
|
var resp UploadResp
|
||||||
_, err := d.request("https://www.123pan.com/a/api/file/upload_request", http.MethodPost, func(req *resty.Request) {
|
_, err := d.request("https://www.123pan.com/a/api/file/upload_request", http.MethodPost, func(req *resty.Request) {
|
||||||
req.SetBody(data)
|
req.SetBody(data).SetContext(ctx)
|
||||||
}, &resp)
|
}, &resp)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
|
@ -245,14 +245,14 @@ func (d *Pan123) Put(ctx context.Context, dstDir model.Obj, stream model.FileStr
|
||||||
Key: &resp.Data.Key,
|
Key: &resp.Data.Key,
|
||||||
Body: uploadFile,
|
Body: uploadFile,
|
||||||
}
|
}
|
||||||
_, err = uploader.Upload(input)
|
_, err = uploader.UploadWithContext(ctx, input)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
_, err = d.request("https://www.123pan.com/api/file/upload_complete", http.MethodPost, func(req *resty.Request) {
|
_, err = d.request("https://www.123pan.com/api/file/upload_complete", http.MethodPost, func(req *resty.Request) {
|
||||||
req.SetBody(base.Json{
|
req.SetBody(base.Json{
|
||||||
"fileId": resp.Data.FileId,
|
"fileId": resp.Data.FileId,
|
||||||
})
|
}).SetContext(ctx)
|
||||||
}, nil)
|
}, nil)
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
|
@ -13,6 +13,7 @@ import (
|
||||||
"github.com/alist-org/alist/v3/internal/driver"
|
"github.com/alist-org/alist/v3/internal/driver"
|
||||||
"github.com/alist-org/alist/v3/internal/errs"
|
"github.com/alist-org/alist/v3/internal/errs"
|
||||||
"github.com/alist-org/alist/v3/internal/model"
|
"github.com/alist-org/alist/v3/internal/model"
|
||||||
|
"github.com/alist-org/alist/v3/pkg/utils"
|
||||||
log "github.com/sirupsen/logrus"
|
log "github.com/sirupsen/logrus"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -268,6 +269,9 @@ func (d *Yun139) Put(ctx context.Context, dstDir model.Obj, stream model.FileStr
|
||||||
part := int(math.Ceil(float64(stream.GetSize()) / float64(Default)))
|
part := int(math.Ceil(float64(stream.GetSize()) / float64(Default)))
|
||||||
var start int64 = 0
|
var start int64 = 0
|
||||||
for i := 0; i < part; i++ {
|
for i := 0; i < part; i++ {
|
||||||
|
if utils.IsCanceled(ctx) {
|
||||||
|
return ctx.Err()
|
||||||
|
}
|
||||||
byteSize := stream.GetSize() - start
|
byteSize := stream.GetSize() - start
|
||||||
if byteSize > Default {
|
if byteSize > Default {
|
||||||
byteSize = Default
|
byteSize = Default
|
||||||
|
@ -281,6 +285,7 @@ func (d *Yun139) Put(ctx context.Context, dstDir model.Obj, stream model.FileStr
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
req = req.WithContext(ctx)
|
||||||
headers := map[string]string{
|
headers := map[string]string{
|
||||||
"Accept": "*/*",
|
"Accept": "*/*",
|
||||||
"Content-Type": "text/plain;name=" + unicode(stream.GetName()),
|
"Content-Type": "text/plain;name=" + unicode(stream.GetName()),
|
||||||
|
|
|
@ -194,7 +194,7 @@ func (d *Cloud189) Remove(ctx context.Context, obj model.Obj) error {
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *Cloud189) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) error {
|
func (d *Cloud189) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) error {
|
||||||
return d.newUpload(dstDir, stream, up)
|
return d.newUpload(ctx, dstDir, stream, up)
|
||||||
}
|
}
|
||||||
|
|
||||||
var _ driver.Driver = (*Cloud189)(nil)
|
var _ driver.Driver = (*Cloud189)(nil)
|
||||||
|
|
|
@ -2,6 +2,7 @@ package _189
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"bytes"
|
"bytes"
|
||||||
|
"context"
|
||||||
"crypto/md5"
|
"crypto/md5"
|
||||||
"encoding/base64"
|
"encoding/base64"
|
||||||
"encoding/hex"
|
"encoding/hex"
|
||||||
|
@ -306,7 +307,7 @@ func (d *Cloud189) uploadRequest(uri string, form map[string]string, resp interf
|
||||||
return data, nil
|
return data, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *Cloud189) newUpload(dstDir model.Obj, file model.FileStreamer, up driver.UpdateProgress) error {
|
func (d *Cloud189) newUpload(ctx context.Context, dstDir model.Obj, file model.FileStreamer, up driver.UpdateProgress) error {
|
||||||
sessionKey, err := d.getSessionKey()
|
sessionKey, err := d.getSessionKey()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
|
@ -335,6 +336,9 @@ func (d *Cloud189) newUpload(dstDir model.Obj, file model.FileStreamer, up drive
|
||||||
md5s := make([]string, 0)
|
md5s := make([]string, 0)
|
||||||
md5Sum := md5.New()
|
md5Sum := md5.New()
|
||||||
for i = 1; i <= count; i++ {
|
for i = 1; i <= count; i++ {
|
||||||
|
if utils.IsCanceled(ctx) {
|
||||||
|
return ctx.Err()
|
||||||
|
}
|
||||||
byteSize = file.GetSize() - finish
|
byteSize = file.GetSize() - finish
|
||||||
if DEFAULT < byteSize {
|
if DEFAULT < byteSize {
|
||||||
byteSize = DEFAULT
|
byteSize = DEFAULT
|
||||||
|
@ -364,12 +368,15 @@ func (d *Cloud189) newUpload(dstDir model.Obj, file model.FileStreamer, up drive
|
||||||
log.Debugf("uploadData: %+v", uploadData)
|
log.Debugf("uploadData: %+v", uploadData)
|
||||||
requestURL := uploadData.RequestURL
|
requestURL := uploadData.RequestURL
|
||||||
uploadHeaders := strings.Split(decodeURIComponent(uploadData.RequestHeader), "&")
|
uploadHeaders := strings.Split(decodeURIComponent(uploadData.RequestHeader), "&")
|
||||||
req, _ := http.NewRequest(http.MethodPut, requestURL, bytes.NewReader(byteData))
|
req, err := http.NewRequest(http.MethodPut, requestURL, bytes.NewReader(byteData))
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
req = req.WithContext(ctx)
|
||||||
for _, v := range uploadHeaders {
|
for _, v := range uploadHeaders {
|
||||||
i := strings.Index(v, "=")
|
i := strings.Index(v, "=")
|
||||||
req.Header.Set(v[0:i], v[i+1:])
|
req.Header.Set(v[0:i], v[i+1:])
|
||||||
}
|
}
|
||||||
|
|
||||||
r, err := base.HttpClient.Do(req)
|
r, err := base.HttpClient.Do(req)
|
||||||
log.Debugf("%+v %+v", r, r.Request.Header)
|
log.Debugf("%+v %+v", r, r.Request.Header)
|
||||||
r.Body.Close()
|
r.Body.Close()
|
||||||
|
|
|
@ -13,7 +13,7 @@ import (
|
||||||
"github.com/go-resty/resty/v2"
|
"github.com/go-resty/resty/v2"
|
||||||
)
|
)
|
||||||
|
|
||||||
type Yun189PC struct {
|
type Cloud189PC struct {
|
||||||
model.Storage
|
model.Storage
|
||||||
Addition
|
Addition
|
||||||
|
|
||||||
|
@ -26,15 +26,15 @@ type Yun189PC struct {
|
||||||
tokenInfo *AppSessionResp
|
tokenInfo *AppSessionResp
|
||||||
}
|
}
|
||||||
|
|
||||||
func (y *Yun189PC) Config() driver.Config {
|
func (y *Cloud189PC) Config() driver.Config {
|
||||||
return config
|
return config
|
||||||
}
|
}
|
||||||
|
|
||||||
func (y *Yun189PC) GetAddition() driver.Additional {
|
func (y *Cloud189PC) GetAddition() driver.Additional {
|
||||||
return &y.Addition
|
return &y.Addition
|
||||||
}
|
}
|
||||||
|
|
||||||
func (y *Yun189PC) Init(ctx context.Context) (err error) {
|
func (y *Cloud189PC) Init(ctx context.Context) (err error) {
|
||||||
// 处理个人云和家庭云参数
|
// 处理个人云和家庭云参数
|
||||||
if y.isFamily() && y.RootFolderID == "-11" {
|
if y.isFamily() && y.RootFolderID == "-11" {
|
||||||
y.RootFolderID = ""
|
y.RootFolderID = ""
|
||||||
|
@ -73,15 +73,15 @@ func (y *Yun189PC) Init(ctx context.Context) (err error) {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
func (y *Yun189PC) Drop(ctx context.Context) error {
|
func (y *Cloud189PC) Drop(ctx context.Context) error {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (y *Yun189PC) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]model.Obj, error) {
|
func (y *Cloud189PC) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]model.Obj, error) {
|
||||||
return y.getFiles(ctx, dir.GetID())
|
return y.getFiles(ctx, dir.GetID())
|
||||||
}
|
}
|
||||||
|
|
||||||
func (y *Yun189PC) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*model.Link, error) {
|
func (y *Cloud189PC) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*model.Link, error) {
|
||||||
var downloadUrl struct {
|
var downloadUrl struct {
|
||||||
URL string `json:"fileDownloadUrl"`
|
URL string `json:"fileDownloadUrl"`
|
||||||
}
|
}
|
||||||
|
@ -140,7 +140,7 @@ func (y *Yun189PC) Link(ctx context.Context, file model.Obj, args model.LinkArgs
|
||||||
return like, nil
|
return like, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (y *Yun189PC) MakeDir(ctx context.Context, parentDir model.Obj, dirName string) error {
|
func (y *Cloud189PC) MakeDir(ctx context.Context, parentDir model.Obj, dirName string) error {
|
||||||
fullUrl := API_URL
|
fullUrl := API_URL
|
||||||
if y.isFamily() {
|
if y.isFamily() {
|
||||||
fullUrl += "/family/file"
|
fullUrl += "/family/file"
|
||||||
|
@ -167,7 +167,7 @@ func (y *Yun189PC) MakeDir(ctx context.Context, parentDir model.Obj, dirName str
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
func (y *Yun189PC) Move(ctx context.Context, srcObj, dstDir model.Obj) error {
|
func (y *Cloud189PC) Move(ctx context.Context, srcObj, dstDir model.Obj) error {
|
||||||
_, err := y.post(API_URL+"/batch/createBatchTask.action", func(req *resty.Request) {
|
_, err := y.post(API_URL+"/batch/createBatchTask.action", func(req *resty.Request) {
|
||||||
req.SetContext(ctx)
|
req.SetContext(ctx)
|
||||||
req.SetFormData(map[string]string{
|
req.SetFormData(map[string]string{
|
||||||
|
@ -191,7 +191,7 @@ func (y *Yun189PC) Move(ctx context.Context, srcObj, dstDir model.Obj) error {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
func (y *Yun189PC) Rename(ctx context.Context, srcObj model.Obj, newName string) error {
|
func (y *Cloud189PC) Rename(ctx context.Context, srcObj model.Obj, newName string) error {
|
||||||
queryParam := make(map[string]string)
|
queryParam := make(map[string]string)
|
||||||
fullUrl := API_URL
|
fullUrl := API_URL
|
||||||
method := http.MethodPost
|
method := http.MethodPost
|
||||||
|
@ -216,7 +216,7 @@ func (y *Yun189PC) Rename(ctx context.Context, srcObj model.Obj, newName string)
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
func (y *Yun189PC) Copy(ctx context.Context, srcObj, dstDir model.Obj) error {
|
func (y *Cloud189PC) Copy(ctx context.Context, srcObj, dstDir model.Obj) error {
|
||||||
_, err := y.post(API_URL+"/batch/createBatchTask.action", func(req *resty.Request) {
|
_, err := y.post(API_URL+"/batch/createBatchTask.action", func(req *resty.Request) {
|
||||||
req.SetContext(ctx)
|
req.SetContext(ctx)
|
||||||
req.SetFormData(map[string]string{
|
req.SetFormData(map[string]string{
|
||||||
|
@ -241,7 +241,7 @@ func (y *Yun189PC) Copy(ctx context.Context, srcObj, dstDir model.Obj) error {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
func (y *Yun189PC) Remove(ctx context.Context, obj model.Obj) error {
|
func (y *Cloud189PC) Remove(ctx context.Context, obj model.Obj) error {
|
||||||
_, err := y.post(API_URL+"/batch/createBatchTask.action", func(req *resty.Request) {
|
_, err := y.post(API_URL+"/batch/createBatchTask.action", func(req *resty.Request) {
|
||||||
req.SetContext(ctx)
|
req.SetContext(ctx)
|
||||||
req.SetFormData(map[string]string{
|
req.SetFormData(map[string]string{
|
||||||
|
@ -265,7 +265,7 @@ func (y *Yun189PC) Remove(ctx context.Context, obj model.Obj) error {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
func (y *Yun189PC) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) error {
|
func (y *Cloud189PC) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) error {
|
||||||
if y.RapidUpload {
|
if y.RapidUpload {
|
||||||
return y.FastUpload(ctx, dstDir, stream, up)
|
return y.FastUpload(ctx, dstDir, stream, up)
|
||||||
}
|
}
|
||||||
|
|
|
@ -25,6 +25,6 @@ var config = driver.Config{
|
||||||
|
|
||||||
func init() {
|
func init() {
|
||||||
op.RegisterDriver(func() driver.Driver {
|
op.RegisterDriver(func() driver.Driver {
|
||||||
return &Yun189PC{}
|
return &Cloud189PC{}
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
|
@ -47,7 +47,7 @@ const (
|
||||||
CHANNEL_ID = "web_cloud.189.cn"
|
CHANNEL_ID = "web_cloud.189.cn"
|
||||||
)
|
)
|
||||||
|
|
||||||
func (y *Yun189PC) request(url, method string, callback base.ReqCallback, params Params, resp interface{}) ([]byte, error) {
|
func (y *Cloud189PC) request(url, method string, callback base.ReqCallback, params Params, resp interface{}) ([]byte, error) {
|
||||||
dateOfGmt := getHttpDateStr()
|
dateOfGmt := getHttpDateStr()
|
||||||
sessionKey := y.tokenInfo.SessionKey
|
sessionKey := y.tokenInfo.SessionKey
|
||||||
sessionSecret := y.tokenInfo.SessionSecret
|
sessionSecret := y.tokenInfo.SessionSecret
|
||||||
|
@ -124,15 +124,15 @@ func (y *Yun189PC) request(url, method string, callback base.ReqCallback, params
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (y *Yun189PC) get(url string, callback base.ReqCallback, resp interface{}) ([]byte, error) {
|
func (y *Cloud189PC) get(url string, callback base.ReqCallback, resp interface{}) ([]byte, error) {
|
||||||
return y.request(url, http.MethodGet, callback, nil, resp)
|
return y.request(url, http.MethodGet, callback, nil, resp)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (y *Yun189PC) post(url string, callback base.ReqCallback, resp interface{}) ([]byte, error) {
|
func (y *Cloud189PC) post(url string, callback base.ReqCallback, resp interface{}) ([]byte, error) {
|
||||||
return y.request(url, http.MethodPost, callback, nil, resp)
|
return y.request(url, http.MethodPost, callback, nil, resp)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (y *Yun189PC) getFiles(ctx context.Context, fileId string) ([]model.Obj, error) {
|
func (y *Cloud189PC) getFiles(ctx context.Context, fileId string) ([]model.Obj, error) {
|
||||||
fullUrl := API_URL
|
fullUrl := API_URL
|
||||||
if y.isFamily() {
|
if y.isFamily() {
|
||||||
fullUrl += "/family/file"
|
fullUrl += "/family/file"
|
||||||
|
@ -184,7 +184,7 @@ func (y *Yun189PC) getFiles(ctx context.Context, fileId string) ([]model.Obj, er
|
||||||
return res, nil
|
return res, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (y *Yun189PC) login() (err error) {
|
func (y *Cloud189PC) login() (err error) {
|
||||||
// 初始化登陆所需参数
|
// 初始化登陆所需参数
|
||||||
if y.loginParam == nil || !y.NoUseOcr {
|
if y.loginParam == nil || !y.NoUseOcr {
|
||||||
if err = y.initLoginParam(); err != nil {
|
if err = y.initLoginParam(); err != nil {
|
||||||
|
@ -264,7 +264,7 @@ func (y *Yun189PC) login() (err error) {
|
||||||
/* 初始化登陆需要的参数
|
/* 初始化登陆需要的参数
|
||||||
* 如果遇到验证码返回错误
|
* 如果遇到验证码返回错误
|
||||||
*/
|
*/
|
||||||
func (y *Yun189PC) initLoginParam() error {
|
func (y *Cloud189PC) initLoginParam() error {
|
||||||
// 清除cookie
|
// 清除cookie
|
||||||
jar, _ := cookiejar.New(nil)
|
jar, _ := cookiejar.New(nil)
|
||||||
y.client.SetCookieJar(jar)
|
y.client.SetCookieJar(jar)
|
||||||
|
@ -335,7 +335,7 @@ func (y *Yun189PC) initLoginParam() error {
|
||||||
}
|
}
|
||||||
|
|
||||||
// 刷新会话
|
// 刷新会话
|
||||||
func (y *Yun189PC) refreshSession() (err error) {
|
func (y *Cloud189PC) refreshSession() (err error) {
|
||||||
var erron RespErr
|
var erron RespErr
|
||||||
var userSessionResp UserSessionResp
|
var userSessionResp UserSessionResp
|
||||||
_, err = y.client.R().
|
_, err = y.client.R().
|
||||||
|
@ -381,7 +381,7 @@ func (y *Yun189PC) refreshSession() (err error) {
|
||||||
}
|
}
|
||||||
|
|
||||||
// 普通上传
|
// 普通上传
|
||||||
func (y *Yun189PC) CommonUpload(ctx context.Context, dstDir model.Obj, file model.FileStreamer, up driver.UpdateProgress) (err error) {
|
func (y *Cloud189PC) CommonUpload(ctx context.Context, dstDir model.Obj, file model.FileStreamer, up driver.UpdateProgress) (err error) {
|
||||||
const DEFAULT int64 = 10485760
|
const DEFAULT int64 = 10485760
|
||||||
var count = int64(math.Ceil(float64(file.GetSize()) / float64(DEFAULT)))
|
var count = int64(math.Ceil(float64(file.GetSize()) / float64(DEFAULT)))
|
||||||
|
|
||||||
|
@ -418,10 +418,8 @@ func (y *Yun189PC) CommonUpload(ctx context.Context, dstDir model.Obj, file mode
|
||||||
silceMd5Hexs := make([]string, 0, count)
|
silceMd5Hexs := make([]string, 0, count)
|
||||||
byteData := bytes.NewBuffer(make([]byte, DEFAULT))
|
byteData := bytes.NewBuffer(make([]byte, DEFAULT))
|
||||||
for i := int64(1); i <= count; i++ {
|
for i := int64(1); i <= count; i++ {
|
||||||
select {
|
if utils.IsCanceled(ctx) {
|
||||||
case <-ctx.Done():
|
|
||||||
return ctx.Err()
|
return ctx.Err()
|
||||||
default:
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// 读取块
|
// 读取块
|
||||||
|
@ -491,7 +489,7 @@ func (y *Yun189PC) CommonUpload(ctx context.Context, dstDir model.Obj, file mode
|
||||||
}
|
}
|
||||||
|
|
||||||
// 快传
|
// 快传
|
||||||
func (y *Yun189PC) FastUpload(ctx context.Context, dstDir model.Obj, file model.FileStreamer, up driver.UpdateProgress) (err error) {
|
func (y *Cloud189PC) FastUpload(ctx context.Context, dstDir model.Obj, file model.FileStreamer, up driver.UpdateProgress) (err error) {
|
||||||
// 需要获取完整文件md5,必须支持 io.Seek
|
// 需要获取完整文件md5,必须支持 io.Seek
|
||||||
tempFile, err := utils.CreateTempFile(file.GetReadCloser())
|
tempFile, err := utils.CreateTempFile(file.GetReadCloser())
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
@ -511,10 +509,8 @@ func (y *Yun189PC) FastUpload(ctx context.Context, dstDir model.Obj, file model.
|
||||||
silceMd5Hexs := make([]string, 0, count)
|
silceMd5Hexs := make([]string, 0, count)
|
||||||
silceMd5Base64s := make([]string, 0, count)
|
silceMd5Base64s := make([]string, 0, count)
|
||||||
for i := 1; i <= count; i++ {
|
for i := 1; i <= count; i++ {
|
||||||
select {
|
if utils.IsCanceled(ctx) {
|
||||||
case <-ctx.Done():
|
|
||||||
return ctx.Err()
|
return ctx.Err()
|
||||||
default:
|
|
||||||
}
|
}
|
||||||
|
|
||||||
silceMd5.Reset()
|
silceMd5.Reset()
|
||||||
|
@ -616,11 +612,11 @@ func (y *Yun189PC) FastUpload(ctx context.Context, dstDir model.Obj, file model.
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
func (y *Yun189PC) isFamily() bool {
|
func (y *Cloud189PC) isFamily() bool {
|
||||||
return y.Type == "family"
|
return y.Type == "family"
|
||||||
}
|
}
|
||||||
|
|
||||||
func (y *Yun189PC) isLogin() bool {
|
func (y *Cloud189PC) isLogin() bool {
|
||||||
if y.tokenInfo == nil {
|
if y.tokenInfo == nil {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
@ -629,7 +625,7 @@ func (y *Yun189PC) isLogin() bool {
|
||||||
}
|
}
|
||||||
|
|
||||||
// 获取家庭云所有用户信息
|
// 获取家庭云所有用户信息
|
||||||
func (y *Yun189PC) getFamilyInfoList() ([]FamilyInfoResp, error) {
|
func (y *Cloud189PC) getFamilyInfoList() ([]FamilyInfoResp, error) {
|
||||||
var resp FamilyInfoListResp
|
var resp FamilyInfoListResp
|
||||||
_, err := y.get(API_URL+"/family/manage/getFamilyList.action", nil, &resp)
|
_, err := y.get(API_URL+"/family/manage/getFamilyList.action", nil, &resp)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
@ -639,7 +635,7 @@ func (y *Yun189PC) getFamilyInfoList() ([]FamilyInfoResp, error) {
|
||||||
}
|
}
|
||||||
|
|
||||||
// 抽取家庭云ID
|
// 抽取家庭云ID
|
||||||
func (y *Yun189PC) getFamilyID() (string, error) {
|
func (y *Cloud189PC) getFamilyID() (string, error) {
|
||||||
infos, err := y.getFamilyInfoList()
|
infos, err := y.getFamilyInfoList()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return "", err
|
return "", err
|
||||||
|
|
|
@ -162,7 +162,7 @@ func (d *AListV3) Put(ctx context.Context, dstDir model.Obj, stream model.FileSt
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
_, err = base.RestyClient.R().
|
_, err = base.RestyClient.R().SetContext(ctx).
|
||||||
SetResult(&resp).
|
SetResult(&resp).
|
||||||
SetHeader("Authorization", d.AccessToken).
|
SetHeader("Authorization", d.AccessToken).
|
||||||
SetHeader("File-Path", path.Join(dstDir.GetPath(), stream.GetName())).
|
SetHeader("File-Path", path.Join(dstDir.GetPath(), stream.GetName())).
|
||||||
|
|
|
@ -248,10 +248,14 @@ func (d *AliDrive) Put(ctx context.Context, dstDir model.Obj, stream model.FileS
|
||||||
}
|
}
|
||||||
|
|
||||||
for i, partInfo := range resp.PartInfoList {
|
for i, partInfo := range resp.PartInfoList {
|
||||||
|
if utils.IsCanceled(ctx) {
|
||||||
|
return ctx.Err()
|
||||||
|
}
|
||||||
req, err := http.NewRequest("PUT", partInfo.UploadUrl, io.LimitReader(file, DEFAULT))
|
req, err := http.NewRequest("PUT", partInfo.UploadUrl, io.LimitReader(file, DEFAULT))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
req = req.WithContext(ctx)
|
||||||
res, err := base.HttpClient.Do(req)
|
res, err := base.HttpClient.Do(req)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
|
|
|
@ -8,7 +8,6 @@ import (
|
||||||
|
|
||||||
"github.com/alist-org/alist/v3/drivers/base"
|
"github.com/alist-org/alist/v3/drivers/base"
|
||||||
"github.com/alist-org/alist/v3/internal/driver"
|
"github.com/alist-org/alist/v3/internal/driver"
|
||||||
"github.com/alist-org/alist/v3/internal/errs"
|
|
||||||
"github.com/alist-org/alist/v3/internal/model"
|
"github.com/alist-org/alist/v3/internal/model"
|
||||||
"github.com/alist-org/alist/v3/pkg/cron"
|
"github.com/alist-org/alist/v3/pkg/cron"
|
||||||
"github.com/alist-org/alist/v3/pkg/utils"
|
"github.com/alist-org/alist/v3/pkg/utils"
|
||||||
|
@ -113,34 +112,4 @@ func (d *AliyundriveShare) Link(ctx context.Context, file model.Obj, args model.
|
||||||
}, nil
|
}, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *AliyundriveShare) MakeDir(ctx context.Context, parentDir model.Obj, dirName string) error {
|
|
||||||
// TODO create folder
|
|
||||||
return errs.NotSupport
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *AliyundriveShare) Move(ctx context.Context, srcObj, dstDir model.Obj) error {
|
|
||||||
// TODO move obj
|
|
||||||
return errs.NotSupport
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *AliyundriveShare) Rename(ctx context.Context, srcObj model.Obj, newName string) error {
|
|
||||||
// TODO rename obj
|
|
||||||
return errs.NotSupport
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *AliyundriveShare) Copy(ctx context.Context, srcObj, dstDir model.Obj) error {
|
|
||||||
// TODO copy obj
|
|
||||||
return errs.NotSupport
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *AliyundriveShare) Remove(ctx context.Context, obj model.Obj) error {
|
|
||||||
// TODO remove obj
|
|
||||||
return errs.NotSupport
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *AliyundriveShare) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) error {
|
|
||||||
// TODO upload file
|
|
||||||
return errs.NotSupport
|
|
||||||
}
|
|
||||||
|
|
||||||
var _ driver.Driver = (*AliyundriveShare)(nil)
|
var _ driver.Driver = (*AliyundriveShare)(nil)
|
||||||
|
|
|
@ -192,6 +192,9 @@ func (d *BaiduNetdisk) Put(ctx context.Context, dstDir model.Obj, stream model.F
|
||||||
}
|
}
|
||||||
left = stream.GetSize()
|
left = stream.GetSize()
|
||||||
for i, partseq := range precreateResp.BlockList {
|
for i, partseq := range precreateResp.BlockList {
|
||||||
|
if utils.IsCanceled(ctx) {
|
||||||
|
return ctx.Err()
|
||||||
|
}
|
||||||
byteSize := Default
|
byteSize := Default
|
||||||
var byteData []byte
|
var byteData []byte
|
||||||
if left < Default {
|
if left < Default {
|
||||||
|
@ -207,7 +210,11 @@ func (d *BaiduNetdisk) Put(ctx context.Context, dstDir model.Obj, stream model.F
|
||||||
}
|
}
|
||||||
u := "https://d.pcs.baidu.com/rest/2.0/pcs/superfile2"
|
u := "https://d.pcs.baidu.com/rest/2.0/pcs/superfile2"
|
||||||
params["partseq"] = strconv.Itoa(partseq)
|
params["partseq"] = strconv.Itoa(partseq)
|
||||||
res, err := base.RestyClient.R().SetQueryParams(params).SetFileReader("file", stream.GetName(), bytes.NewReader(byteData)).Post(u)
|
res, err := base.RestyClient.R().
|
||||||
|
SetContext(ctx).
|
||||||
|
SetQueryParams(params).
|
||||||
|
SetFileReader("file", stream.GetName(), bytes.NewReader(byteData)).
|
||||||
|
Post(u)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
|
@ -240,6 +240,9 @@ func (d *BaiduPhoto) Put(ctx context.Context, dstDir model.Obj, stream model.Fil
|
||||||
}
|
}
|
||||||
|
|
||||||
for i := 0; i < count; i++ {
|
for i := 0; i < count; i++ {
|
||||||
|
if utils.IsCanceled(ctx) {
|
||||||
|
return ctx.Err()
|
||||||
|
}
|
||||||
uploadParams["partseq"] = fmt.Sprint(i)
|
uploadParams["partseq"] = fmt.Sprint(i)
|
||||||
_, err = d.Post("https://c3.pcs.baidu.com/rest/2.0/pcs/superfile2", func(r *resty.Request) {
|
_, err = d.Post("https://c3.pcs.baidu.com/rest/2.0/pcs/superfile2", func(r *resty.Request) {
|
||||||
r.SetContext(ctx)
|
r.SetContext(ctx)
|
||||||
|
|
|
@ -113,6 +113,7 @@ func (d *FTP) Put(ctx context.Context, dstDir model.Obj, stream model.FileStream
|
||||||
if err := d.login(); err != nil {
|
if err := d.login(); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
// TODO: support cancel
|
||||||
return d.conn.Stor(stdpath.Join(dstDir.GetPath(), stream.GetName()), stream)
|
return d.conn.Stor(stdpath.Join(dstDir.GetPath(), stream.GetName()), stream)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -134,7 +134,7 @@ func (d *GoogleDrive) Put(ctx context.Context, dstDir model.Obj, stream model.Fi
|
||||||
"X-Upload-Content-Type": stream.GetMimetype(),
|
"X-Upload-Content-Type": stream.GetMimetype(),
|
||||||
"X-Upload-Content-Length": strconv.FormatInt(stream.GetSize(), 10),
|
"X-Upload-Content-Length": strconv.FormatInt(stream.GetSize(), 10),
|
||||||
}).
|
}).
|
||||||
SetError(&e).SetBody(data)
|
SetError(&e).SetBody(data).SetContext(ctx)
|
||||||
if obj != nil {
|
if obj != nil {
|
||||||
res, err = req.Patch(url)
|
res, err = req.Patch(url)
|
||||||
} else {
|
} else {
|
||||||
|
|
|
@ -9,6 +9,7 @@ import (
|
||||||
|
|
||||||
"github.com/alist-org/alist/v3/drivers/base"
|
"github.com/alist-org/alist/v3/drivers/base"
|
||||||
"github.com/alist-org/alist/v3/internal/model"
|
"github.com/alist-org/alist/v3/internal/model"
|
||||||
|
"github.com/alist-org/alist/v3/pkg/utils"
|
||||||
"github.com/go-resty/resty/v2"
|
"github.com/go-resty/resty/v2"
|
||||||
log "github.com/sirupsen/logrus"
|
log "github.com/sirupsen/logrus"
|
||||||
)
|
)
|
||||||
|
@ -104,6 +105,9 @@ func (d *GoogleDrive) chunkUpload(ctx context.Context, stream model.FileStreamer
|
||||||
var defaultChunkSize = d.ChunkSize * 1024 * 1024
|
var defaultChunkSize = d.ChunkSize * 1024 * 1024
|
||||||
var finish int64 = 0
|
var finish int64 = 0
|
||||||
for finish < stream.GetSize() {
|
for finish < stream.GetSize() {
|
||||||
|
if utils.IsCanceled(ctx) {
|
||||||
|
return ctx.Err()
|
||||||
|
}
|
||||||
chunkSize := stream.GetSize() - finish
|
chunkSize := stream.GetSize() - finish
|
||||||
if chunkSize > defaultChunkSize {
|
if chunkSize > defaultChunkSize {
|
||||||
chunkSize = defaultChunkSize
|
chunkSize = defaultChunkSize
|
||||||
|
@ -112,7 +116,7 @@ func (d *GoogleDrive) chunkUpload(ctx context.Context, stream model.FileStreamer
|
||||||
req.SetHeaders(map[string]string{
|
req.SetHeaders(map[string]string{
|
||||||
"Content-Length": strconv.FormatInt(chunkSize, 10),
|
"Content-Length": strconv.FormatInt(chunkSize, 10),
|
||||||
"Content-Range": fmt.Sprintf("bytes %d-%d/%d", finish, finish+chunkSize-1, stream.GetSize()),
|
"Content-Range": fmt.Sprintf("bytes %d-%d/%d", finish, finish+chunkSize-1, stream.GetSize()),
|
||||||
}).SetBody(io.LimitReader(stream.GetReadCloser(), chunkSize))
|
}).SetBody(io.LimitReader(stream.GetReadCloser(), chunkSize)).SetContext(ctx)
|
||||||
}, nil)
|
}, nil)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
|
|
|
@ -124,7 +124,7 @@ func (d *GooglePhoto) Put(ctx context.Context, dstDir model.Obj, stream model.Fi
|
||||||
}
|
}
|
||||||
|
|
||||||
resp, err := d.request(postUrl, http.MethodPost, func(req *resty.Request) {
|
resp, err := d.request(postUrl, http.MethodPost, func(req *resty.Request) {
|
||||||
req.SetBody(stream.GetReadCloser())
|
req.SetBody(stream.GetReadCloser()).SetContext(ctx)
|
||||||
}, nil, postHeaders)
|
}, nil, postHeaders)
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
|
|
@ -157,7 +157,7 @@ func (d *LanZou) Put(ctx context.Context, dstDir model.Obj, stream model.FileStr
|
||||||
"id": "WU_FILE_0",
|
"id": "WU_FILE_0",
|
||||||
"name": stream.GetName(),
|
"name": stream.GetName(),
|
||||||
"folder_id": dstDir.GetID(),
|
"folder_id": dstDir.GetID(),
|
||||||
}).SetFileReader("upload_file", stream.GetName(), stream)
|
}).SetFileReader("upload_file", stream.GetName(), stream).SetContext(ctx)
|
||||||
}, nil, true)
|
}, nil, true)
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
|
@ -195,7 +195,7 @@ func (d *MediaTrack) Put(ctx context.Context, dstDir model.Obj, stream model.Fil
|
||||||
Key: &resp.Data.Object,
|
Key: &resp.Data.Object,
|
||||||
Body: tempFile,
|
Body: tempFile,
|
||||||
}
|
}
|
||||||
_, err = uploader.Upload(input)
|
_, err = uploader.UploadWithContext(ctx, input)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
|
@ -9,6 +9,7 @@ import (
|
||||||
"github.com/alist-org/alist/v3/internal/driver"
|
"github.com/alist-org/alist/v3/internal/driver"
|
||||||
"github.com/alist-org/alist/v3/internal/errs"
|
"github.com/alist-org/alist/v3/internal/errs"
|
||||||
"github.com/alist-org/alist/v3/internal/model"
|
"github.com/alist-org/alist/v3/internal/model"
|
||||||
|
"github.com/alist-org/alist/v3/pkg/utils"
|
||||||
log "github.com/sirupsen/logrus"
|
log "github.com/sirupsen/logrus"
|
||||||
"github.com/t3rm1n4l/go-mega"
|
"github.com/t3rm1n4l/go-mega"
|
||||||
)
|
)
|
||||||
|
@ -155,6 +156,9 @@ func (d *Mega) Put(ctx context.Context, dstDir model.Obj, stream model.FileStrea
|
||||||
}
|
}
|
||||||
|
|
||||||
for id := 0; id < u.Chunks(); id++ {
|
for id := 0; id < u.Chunks(); id++ {
|
||||||
|
if utils.IsCanceled(ctx) {
|
||||||
|
return ctx.Err()
|
||||||
|
}
|
||||||
_, chkSize, err := u.ChunkLocation(id)
|
_, chkSize, err := u.ChunkLocation(id)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
|
|
|
@ -137,7 +137,7 @@ func (d *Onedrive) Remove(ctx context.Context, obj model.Obj) error {
|
||||||
func (d *Onedrive) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) error {
|
func (d *Onedrive) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) error {
|
||||||
var err error
|
var err error
|
||||||
if stream.GetSize() <= 4*1024*1024 {
|
if stream.GetSize() <= 4*1024*1024 {
|
||||||
err = d.upSmall(dstDir, stream)
|
err = d.upSmall(ctx, dstDir, stream)
|
||||||
} else {
|
} else {
|
||||||
err = d.upBig(ctx, dstDir, stream, up)
|
err = d.upBig(ctx, dstDir, stream, up)
|
||||||
}
|
}
|
||||||
|
|
|
@ -147,14 +147,14 @@ func (d *Onedrive) GetFile(path string) (*File, error) {
|
||||||
return &file, err
|
return &file, err
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *Onedrive) upSmall(dstDir model.Obj, stream model.FileStreamer) error {
|
func (d *Onedrive) upSmall(ctx context.Context, dstDir model.Obj, stream model.FileStreamer) error {
|
||||||
url := d.GetMetaUrl(false, stdpath.Join(dstDir.GetPath(), stream.GetName())) + "/content"
|
url := d.GetMetaUrl(false, stdpath.Join(dstDir.GetPath(), stream.GetName())) + "/content"
|
||||||
data, err := io.ReadAll(stream)
|
data, err := io.ReadAll(stream)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
_, err = d.Request(url, http.MethodPut, func(req *resty.Request) {
|
_, err = d.Request(url, http.MethodPut, func(req *resty.Request) {
|
||||||
req.SetBody(data)
|
req.SetBody(data).SetContext(ctx)
|
||||||
}, nil)
|
}, nil)
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
@ -185,6 +185,10 @@ func (d *Onedrive) upBig(ctx context.Context, dstDir model.Obj, stream model.Fil
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
req, err := http.NewRequest("PUT", uploadUrl, bytes.NewBuffer(byteData))
|
req, err := http.NewRequest("PUT", uploadUrl, bytes.NewBuffer(byteData))
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
req = req.WithContext(ctx)
|
||||||
req.Header.Set("Content-Length", strconv.Itoa(int(byteSize)))
|
req.Header.Set("Content-Length", strconv.Itoa(int(byteSize)))
|
||||||
req.Header.Set("Content-Range", fmt.Sprintf("bytes %d-%d/%d", finish, finish+byteSize-1, stream.GetSize()))
|
req.Header.Set("Content-Range", fmt.Sprintf("bytes %d-%d/%d", finish, finish+byteSize-1, stream.GetSize()))
|
||||||
finish += byteSize
|
finish += byteSize
|
||||||
|
|
|
@ -189,7 +189,7 @@ func (d *PikPak) Put(ctx context.Context, dstDir model.Obj, stream model.FileStr
|
||||||
Key: &key,
|
Key: &key,
|
||||||
Body: tempFile,
|
Body: tempFile,
|
||||||
}
|
}
|
||||||
_, err = uploader.Upload(input)
|
_, err = uploader.UploadWithContext(ctx, input)
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -6,7 +6,6 @@ import (
|
||||||
|
|
||||||
"github.com/alist-org/alist/v3/internal/driver"
|
"github.com/alist-org/alist/v3/internal/driver"
|
||||||
"github.com/alist-org/alist/v3/internal/model"
|
"github.com/alist-org/alist/v3/internal/model"
|
||||||
"github.com/alist-org/alist/v3/internal/errs"
|
|
||||||
"github.com/alist-org/alist/v3/pkg/utils"
|
"github.com/alist-org/alist/v3/pkg/utils"
|
||||||
"github.com/go-resty/resty/v2"
|
"github.com/go-resty/resty/v2"
|
||||||
log "github.com/sirupsen/logrus"
|
log "github.com/sirupsen/logrus"
|
||||||
|
@ -79,34 +78,4 @@ func (d *PikPakShare) Link(ctx context.Context, file model.Obj, args model.LinkA
|
||||||
return &link, nil
|
return &link, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *PikPakShare) MakeDir(ctx context.Context, parentDir model.Obj, dirName string) error {
|
|
||||||
// TODO create folder
|
|
||||||
return errs.NotSupport
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *PikPakShare) Move(ctx context.Context, srcObj, dstDir model.Obj) error {
|
|
||||||
// TODO move obj
|
|
||||||
return errs.NotSupport
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *PikPakShare) Rename(ctx context.Context, srcObj model.Obj, newName string) error {
|
|
||||||
// TODO rename obj
|
|
||||||
return errs.NotSupport
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *PikPakShare) Copy(ctx context.Context, srcObj, dstDir model.Obj) error {
|
|
||||||
// TODO copy obj
|
|
||||||
return errs.NotSupport
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *PikPakShare) Remove(ctx context.Context, obj model.Obj) error {
|
|
||||||
// TODO remove obj
|
|
||||||
return errs.NotSupport
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *PikPakShare) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) error {
|
|
||||||
// TODO upload file
|
|
||||||
return errs.NotSupport
|
|
||||||
}
|
|
||||||
|
|
||||||
var _ driver.Driver = (*PikPakShare)(nil)
|
var _ driver.Driver = (*PikPakShare)(nil)
|
||||||
|
|
|
@ -179,6 +179,9 @@ func (d *Quark) Put(ctx context.Context, dstDir model.Obj, stream model.FileStre
|
||||||
partNumber := 1
|
partNumber := 1
|
||||||
sizeDivide100 := stream.GetSize() / 100
|
sizeDivide100 := stream.GetSize() / 100
|
||||||
for left > 0 {
|
for left > 0 {
|
||||||
|
if utils.IsCanceled(ctx) {
|
||||||
|
return ctx.Err()
|
||||||
|
}
|
||||||
if left > int64(partSize) {
|
if left > int64(partSize) {
|
||||||
bytes = defaultBytes
|
bytes = defaultBytes
|
||||||
} else {
|
} else {
|
||||||
|
@ -190,7 +193,7 @@ func (d *Quark) Put(ctx context.Context, dstDir model.Obj, stream model.FileStre
|
||||||
}
|
}
|
||||||
left -= int64(partSize)
|
left -= int64(partSize)
|
||||||
log.Debugf("left: %d", left)
|
log.Debugf("left: %d", left)
|
||||||
m, err := d.upPart(pre, stream.GetMimetype(), partNumber, bytes)
|
m, err := d.upPart(ctx, pre, stream.GetMimetype(), partNumber, bytes)
|
||||||
//m, err := driver.UpPart(pre, file.GetMIMEType(), partNumber, bytes, account, md5Str, sha1Str)
|
//m, err := driver.UpPart(pre, file.GetMIMEType(), partNumber, bytes, account, md5Str, sha1Str)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
package quark
|
package quark
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"context"
|
||||||
"crypto/md5"
|
"crypto/md5"
|
||||||
"encoding/base64"
|
"encoding/base64"
|
||||||
"errors"
|
"errors"
|
||||||
|
@ -118,7 +119,7 @@ func (d *Quark) upHash(md5, sha1, taskId string) (bool, error) {
|
||||||
return resp.Data.Finish, err
|
return resp.Data.Finish, err
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *Quark) upPart(pre UpPreResp, mineType string, partNumber int, bytes []byte) (string, error) {
|
func (d *Quark) upPart(ctx context.Context, pre UpPreResp, mineType string, partNumber int, bytes []byte) (string, error) {
|
||||||
//func (driver Quark) UpPart(pre UpPreResp, mineType string, partNumber int, bytes []byte, account *model.Account, md5Str, sha1Str string) (string, error) {
|
//func (driver Quark) UpPart(pre UpPreResp, mineType string, partNumber int, bytes []byte, account *model.Account, md5Str, sha1Str string) (string, error) {
|
||||||
timeStr := time.Now().UTC().Format(http.TimeFormat)
|
timeStr := time.Now().UTC().Format(http.TimeFormat)
|
||||||
data := base.Json{
|
data := base.Json{
|
||||||
|
@ -135,7 +136,7 @@ x-oss-user-agent:aliyun-sdk-js/6.6.1 Chrome 98.0.4758.80 on Windows 10 64-bit
|
||||||
}
|
}
|
||||||
var resp UpAuthResp
|
var resp UpAuthResp
|
||||||
_, err := d.request("/file/upload/auth", http.MethodPost, func(req *resty.Request) {
|
_, err := d.request("/file/upload/auth", http.MethodPost, func(req *resty.Request) {
|
||||||
req.SetBody(data)
|
req.SetBody(data).SetContext(ctx)
|
||||||
}, &resp)
|
}, &resp)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return "", err
|
return "", err
|
||||||
|
@ -150,7 +151,7 @@ x-oss-user-agent:aliyun-sdk-js/6.6.1 Chrome 98.0.4758.80 on Windows 10 64-bit
|
||||||
// }
|
// }
|
||||||
//}
|
//}
|
||||||
u := fmt.Sprintf("https://%s.%s/%s", pre.Data.Bucket, pre.Data.UploadUrl[7:], pre.Data.ObjKey)
|
u := fmt.Sprintf("https://%s.%s/%s", pre.Data.Bucket, pre.Data.UploadUrl[7:], pre.Data.ObjKey)
|
||||||
res, err := base.RestyClient.R().
|
res, err := base.RestyClient.R().SetContext(ctx).
|
||||||
SetHeaders(map[string]string{
|
SetHeaders(map[string]string{
|
||||||
"Authorization": resp.Data.AuthKey,
|
"Authorization": resp.Data.AuthKey,
|
||||||
"Content-Type": mineType,
|
"Content-Type": mineType,
|
||||||
|
|
|
@ -134,7 +134,7 @@ func (d *S3) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreame
|
||||||
Key: &key,
|
Key: &key,
|
||||||
Body: stream,
|
Body: stream,
|
||||||
}
|
}
|
||||||
_, err := uploader.Upload(input)
|
_, err := uploader.UploadWithContext(ctx, input)
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -132,11 +132,11 @@ func (d *Teambition) Put(ctx context.Context, dstDir model.Obj, stream model.Fil
|
||||||
var newFile *FileUpload
|
var newFile *FileUpload
|
||||||
if stream.GetSize() <= 20971520 {
|
if stream.GetSize() <= 20971520 {
|
||||||
// post upload
|
// post upload
|
||||||
newFile, err = d.upload(stream, token)
|
newFile, err = d.upload(ctx, stream, token)
|
||||||
} else {
|
} else {
|
||||||
// chunk upload
|
// chunk upload
|
||||||
//err = base.ErrNotImplement
|
//err = base.ErrNotImplement
|
||||||
newFile, err = d.chunkUpload(stream, token, up)
|
newFile, err = d.chunkUpload(ctx, stream, token, up)
|
||||||
}
|
}
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
package teambition
|
package teambition
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"context"
|
||||||
"errors"
|
"errors"
|
||||||
"fmt"
|
"fmt"
|
||||||
"io"
|
"io"
|
||||||
|
@ -12,6 +13,7 @@ import (
|
||||||
"github.com/alist-org/alist/v3/drivers/base"
|
"github.com/alist-org/alist/v3/drivers/base"
|
||||||
"github.com/alist-org/alist/v3/internal/driver"
|
"github.com/alist-org/alist/v3/internal/driver"
|
||||||
"github.com/alist-org/alist/v3/internal/model"
|
"github.com/alist-org/alist/v3/internal/model"
|
||||||
|
"github.com/alist-org/alist/v3/pkg/utils"
|
||||||
"github.com/go-resty/resty/v2"
|
"github.com/go-resty/resty/v2"
|
||||||
log "github.com/sirupsen/logrus"
|
log "github.com/sirupsen/logrus"
|
||||||
)
|
)
|
||||||
|
@ -115,13 +117,15 @@ func (d *Teambition) getFiles(parentId string) ([]model.Obj, error) {
|
||||||
return files, nil
|
return files, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *Teambition) upload(file model.FileStreamer, token string) (*FileUpload, error) {
|
func (d *Teambition) upload(ctx context.Context, file model.FileStreamer, token string) (*FileUpload, error) {
|
||||||
prefix := "tcs"
|
prefix := "tcs"
|
||||||
if d.isInternational() {
|
if d.isInternational() {
|
||||||
prefix = "us-tcs"
|
prefix = "us-tcs"
|
||||||
}
|
}
|
||||||
var newFile FileUpload
|
var newFile FileUpload
|
||||||
_, err := base.RestyClient.R().SetResult(&newFile).SetHeader("Authorization", token).
|
_, err := base.RestyClient.R().
|
||||||
|
SetContext(ctx).
|
||||||
|
SetResult(&newFile).SetHeader("Authorization", token).
|
||||||
SetMultipartFormData(map[string]string{
|
SetMultipartFormData(map[string]string{
|
||||||
"name": file.GetName(),
|
"name": file.GetName(),
|
||||||
"type": file.GetMimetype(),
|
"type": file.GetMimetype(),
|
||||||
|
@ -135,7 +139,7 @@ func (d *Teambition) upload(file model.FileStreamer, token string) (*FileUpload,
|
||||||
return &newFile, nil
|
return &newFile, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *Teambition) chunkUpload(file model.FileStreamer, token string, up driver.UpdateProgress) (*FileUpload, error) {
|
func (d *Teambition) chunkUpload(ctx context.Context, file model.FileStreamer, token string, up driver.UpdateProgress) (*FileUpload, error) {
|
||||||
prefix := "tcs"
|
prefix := "tcs"
|
||||||
referer := "https://www.teambition.com/"
|
referer := "https://www.teambition.com/"
|
||||||
if d.isInternational() {
|
if d.isInternational() {
|
||||||
|
@ -153,6 +157,9 @@ func (d *Teambition) chunkUpload(file model.FileStreamer, token string, up drive
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
for i := 0; i < newChunk.Chunks; i++ {
|
for i := 0; i < newChunk.Chunks; i++ {
|
||||||
|
if utils.IsCanceled(ctx) {
|
||||||
|
return nil, ctx.Err()
|
||||||
|
}
|
||||||
chunkSize := newChunk.ChunkSize
|
chunkSize := newChunk.ChunkSize
|
||||||
if i == newChunk.Chunks-1 {
|
if i == newChunk.Chunks-1 {
|
||||||
chunkSize = int(file.GetSize()) - i*chunkSize
|
chunkSize = int(file.GetSize()) - i*chunkSize
|
||||||
|
@ -166,7 +173,9 @@ func (d *Teambition) chunkUpload(file model.FileStreamer, token string, up drive
|
||||||
u := fmt.Sprintf("https://%s.teambition.net/upload/chunk/%s?chunk=%d&chunks=%d",
|
u := fmt.Sprintf("https://%s.teambition.net/upload/chunk/%s?chunk=%d&chunks=%d",
|
||||||
prefix, newChunk.FileKey, i+1, newChunk.Chunks)
|
prefix, newChunk.FileKey, i+1, newChunk.Chunks)
|
||||||
log.Debugf("url: %s", u)
|
log.Debugf("url: %s", u)
|
||||||
_, err := base.RestyClient.R().SetHeaders(map[string]string{
|
_, err := base.RestyClient.R().
|
||||||
|
SetContext(ctx).
|
||||||
|
SetHeaders(map[string]string{
|
||||||
"Authorization": token,
|
"Authorization": token,
|
||||||
"Content-Type": "application/octet-stream",
|
"Content-Type": "application/octet-stream",
|
||||||
"Referer": referer,
|
"Referer": referer,
|
||||||
|
|
|
@ -123,6 +123,7 @@ func (d *USS) Remove(ctx context.Context, obj model.Obj) error {
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *USS) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) error {
|
func (d *USS) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) error {
|
||||||
|
// TODO not support cancel??
|
||||||
return d.client.Put(&upyun.PutObjectConfig{
|
return d.client.Put(&upyun.PutObjectConfig{
|
||||||
Path: getKey(path.Join(dstDir.GetPath(), stream.GetName()), false),
|
Path: getKey(path.Join(dstDir.GetPath(), stream.GetName()), false),
|
||||||
Reader: stream,
|
Reader: stream,
|
||||||
|
|
|
@ -98,6 +98,7 @@ func (d *WebDav) Put(ctx context.Context, dstDir model.Obj, stream model.FileStr
|
||||||
r.Header.Set("Content-Type", stream.GetMimetype())
|
r.Header.Set("Content-Type", stream.GetMimetype())
|
||||||
r.ContentLength = stream.GetSize()
|
r.ContentLength = stream.GetSize()
|
||||||
}
|
}
|
||||||
|
// TODO: support cancel
|
||||||
err := d.client.WriteStream(path.Join(dstDir.GetPath(), stream.GetName()), stream, 0644, callback)
|
err := d.client.WriteStream(path.Join(dstDir.GetPath(), stream.GetName()), stream, 0644, callback)
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
|
@ -121,10 +121,11 @@ func (d *YandexDisk) Put(ctx context.Context, dstDir model.Obj, stream model.Fil
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
req = req.WithContext(ctx)
|
||||||
req.Header.Set("Content-Length", strconv.FormatInt(stream.GetSize(), 10))
|
req.Header.Set("Content-Length", strconv.FormatInt(stream.GetSize(), 10))
|
||||||
req.Header.Set("Content-Type", "application/octet-stream")
|
req.Header.Set("Content-Type", "application/octet-stream")
|
||||||
res, err := base.HttpClient.Do(req)
|
res, err := base.HttpClient.Do(req)
|
||||||
res.Body.Close()
|
_ = res.Body.Close()
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue