feat: add quark driver

pull/1604/head
Noah Hsu 2022-09-02 21:36:47 +08:00
parent 0f2425ce53
commit decea4a739
8 changed files with 726 additions and 2 deletions

View File

@ -6,6 +6,7 @@ import (
_ "github.com/alist-org/alist/v3/drivers/local" _ "github.com/alist-org/alist/v3/drivers/local"
_ "github.com/alist-org/alist/v3/drivers/onedrive" _ "github.com/alist-org/alist/v3/drivers/onedrive"
_ "github.com/alist-org/alist/v3/drivers/pikpak" _ "github.com/alist-org/alist/v3/drivers/pikpak"
_ "github.com/alist-org/alist/v3/drivers/quark"
_ "github.com/alist-org/alist/v3/drivers/teambition" _ "github.com/alist-org/alist/v3/drivers/teambition"
_ "github.com/alist-org/alist/v3/drivers/virtual" _ "github.com/alist-org/alist/v3/drivers/virtual"
) )

239
drivers/quark/driver.go Normal file
View File

@ -0,0 +1,239 @@
package quark
import (
"context"
"crypto/md5"
"crypto/sha1"
"encoding/hex"
"io"
"net/http"
"os"
"github.com/alist-org/alist/v3/drivers/base"
"github.com/alist-org/alist/v3/internal/conf"
"github.com/alist-org/alist/v3/internal/driver"
"github.com/alist-org/alist/v3/internal/errs"
"github.com/alist-org/alist/v3/internal/model"
"github.com/alist-org/alist/v3/pkg/utils"
"github.com/go-resty/resty/v2"
log "github.com/sirupsen/logrus"
)
type Quark struct {
model.Storage
Addition
}
func (d *Quark) Config() driver.Config {
return config
}
func (d *Quark) GetAddition() driver.Additional {
return d.Addition
}
func (d *Quark) Init(ctx context.Context, storage model.Storage) error {
d.Storage = storage
err := utils.Json.UnmarshalFromString(d.Storage.Addition, &d.Addition)
if err != nil {
return err
}
_, err = d.request("/config", http.MethodGet, nil, nil)
return err
}
func (d *Quark) Drop(ctx context.Context) error {
return nil
}
func (d *Quark) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]model.Obj, error) {
files, err := d.GetFiles(dir.GetID())
if err != nil {
return nil, err
}
objs := make([]model.Obj, len(files))
for i := 0; i < len(files); i++ {
objs[i] = fileToObj(files[i])
}
return objs, nil
}
//func (d *Quark) Get(ctx context.Context, path string) (model.Obj, error) {
// // TODO this is optional
// return nil, errs.NotImplement
//}
func (d *Quark) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*model.Link, error) {
data := base.Json{
"fids": []string{file.GetID()},
}
var resp DownResp
_, err := d.request("/file/download", http.MethodPost, func(req *resty.Request) {
req.SetBody(data)
}, &resp)
if err != nil {
return nil, err
}
return &model.Link{
URL: resp.Data[0].DownloadUrl,
Header: http.Header{
"Cookie": []string{d.Cookie},
"Referer": []string{"https://pan.quark.cn"},
},
}, nil
}
func (d *Quark) MakeDir(ctx context.Context, parentDir model.Obj, dirName string) error {
data := base.Json{
"dir_init_lock": false,
"dir_path": "",
"file_name": dirName,
"pdir_fid": parentDir.GetID(),
}
_, err := d.request("/file", http.MethodPost, func(req *resty.Request) {
req.SetBody(data)
}, nil)
return err
}
func (d *Quark) Move(ctx context.Context, srcObj, dstDir model.Obj) error {
data := base.Json{
"action_type": 1,
"exclude_fids": []string{},
"filelist": []string{srcObj.GetID()},
"to_pdir_fid": dstDir.GetID(),
}
_, err := d.request("/file/move", http.MethodPost, func(req *resty.Request) {
req.SetBody(data)
}, nil)
return err
}
func (d *Quark) Rename(ctx context.Context, srcObj model.Obj, newName string) error {
data := base.Json{
"fid": srcObj.GetID(),
"file_name": newName,
}
_, err := d.request("/file/rename", http.MethodPost, func(req *resty.Request) {
req.SetBody(data)
}, nil)
return err
}
func (d *Quark) Copy(ctx context.Context, srcObj, dstDir model.Obj) error {
return errs.NotSupport
}
func (d *Quark) Remove(ctx context.Context, obj model.Obj) error {
data := base.Json{
"action_type": 1,
"exclude_fids": []string{},
"filelist": []string{obj.GetID()},
}
_, err := d.request("/file/delete", http.MethodPost, func(req *resty.Request) {
req.SetBody(data)
}, nil)
return err
}
func (d *Quark) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) error {
var tempFile *os.File
var err error
if f, ok := stream.GetReadCloser().(*os.File); ok {
tempFile = f
} else {
tempFile, err = os.CreateTemp(conf.Conf.TempDir, "file-*")
if err != nil {
return err
}
defer func() {
_ = tempFile.Close()
_ = os.Remove(tempFile.Name())
}()
_, err = io.Copy(tempFile, stream)
if err != nil {
return err
}
_, err = tempFile.Seek(0, io.SeekStart)
if err != nil {
return err
}
}
m := md5.New()
_, err = io.Copy(m, tempFile)
if err != nil {
return err
}
_, err = tempFile.Seek(0, io.SeekStart)
if err != nil {
return err
}
md5Str := hex.EncodeToString(m.Sum(nil))
s := sha1.New()
_, err = io.Copy(s, tempFile)
if err != nil {
return err
}
_, err = tempFile.Seek(0, io.SeekStart)
if err != nil {
return err
}
sha1Str := hex.EncodeToString(s.Sum(nil))
// pre
pre, err := d.upPre(stream, dstDir.GetID())
if err != nil {
return err
}
log.Debugln("hash: ", md5Str, sha1Str)
// hash
finish, err := d.upHash(md5Str, sha1Str, pre.Data.TaskId)
if err != nil {
return err
}
if finish {
return nil
}
// part up
partSize := pre.Metadata.PartSize
var bytes []byte
md5s := make([]string, 0)
defaultBytes := make([]byte, partSize)
left := stream.GetSize()
partNumber := 1
sizeDivide100 := stream.GetSize() / 100
for left > 0 {
if left > int64(partSize) {
bytes = defaultBytes
} else {
bytes = make([]byte, left)
}
_, err := io.ReadFull(tempFile, bytes)
if err != nil {
return err
}
left -= int64(partSize)
log.Debugf("left: %d", left)
m, err := d.upPart(pre, stream.GetMimetype(), partNumber, bytes)
//m, err := driver.UpPart(pre, file.GetMIMEType(), partNumber, bytes, account, md5Str, sha1Str)
if err != nil {
return err
}
if m == "finish" {
return nil
}
md5s = append(md5s, m)
partNumber++
up(100 - int(left/sizeDivide100))
}
err = d.upCommit(pre, md5s)
if err != nil {
return err
}
return d.upFinish(pre)
}
func (d *Quark) Other(ctx context.Context, args model.OtherArgs) (interface{}, error) {
return nil, errs.NotSupport
}
var _ driver.Driver = (*Quark)(nil)

25
drivers/quark/meta.go Normal file
View File

@ -0,0 +1,25 @@
package quark
import (
"github.com/alist-org/alist/v3/internal/driver"
"github.com/alist-org/alist/v3/internal/op"
)
type Addition struct {
Cookie string `json:"cookie" required:"true"`
driver.RootFolderID
OrderBy string `json:"order_by" type:"select" options:"file_type,file_name,updated_at" default:"file_name"`
OrderDirection string `json:"order_direction" type:"select" options:"asc,desc" default:"asc"`
}
var config = driver.Config{
Name: "Quark",
}
func New() driver.Driver {
return &Quark{}
}
func init() {
op.RegisterDriver(config, New)
}

150
drivers/quark/types.go Normal file
View File

@ -0,0 +1,150 @@
package quark
import (
"time"
"github.com/alist-org/alist/v3/internal/model"
)
type Resp struct {
Status int `json:"status"`
Code int `json:"code"`
Message string `json:"message"`
//ReqId string `json:"req_id"`
//Timestamp int `json:"timestamp"`
}
type File struct {
Fid string `json:"fid"`
FileName string `json:"file_name"`
//PdirFid string `json:"pdir_fid"`
//Category int `json:"category"`
//FileType int `json:"file_type"`
Size int64 `json:"size"`
//FormatType string `json:"format_type"`
//Status int `json:"status"`
//Tags string `json:"tags,omitempty"`
//LCreatedAt int64 `json:"l_created_at"`
LUpdatedAt int64 `json:"l_updated_at"`
//NameSpace int `json:"name_space"`
//IncludeItems int `json:"include_items,omitempty"`
//RiskType int `json:"risk_type"`
//BackupSign int `json:"backup_sign"`
//Duration int `json:"duration"`
//FileSource string `json:"file_source"`
File bool `json:"file"`
//CreatedAt int64 `json:"created_at"`
UpdatedAt int64 `json:"updated_at"`
//PrivateExtra struct {} `json:"_private_extra"`
//ObjCategory string `json:"obj_category,omitempty"`
//Thumbnail string `json:"thumbnail,omitempty"`
}
func fileToObj(f File) *model.Object {
return &model.Object{
ID: f.Fid,
Name: f.FileName,
Size: f.Size,
Modified: time.UnixMilli(f.UpdatedAt),
IsFolder: !f.File,
}
}
type SortResp struct {
Resp
Data struct {
List []File `json:"list"`
} `json:"data"`
Metadata struct {
Size int `json:"_size"`
Page int `json:"_page"`
Count int `json:"_count"`
Total int `json:"_total"`
Way string `json:"way"`
} `json:"metadata"`
}
type DownResp struct {
Resp
Data []struct {
//Fid string `json:"fid"`
//FileName string `json:"file_name"`
//PdirFid string `json:"pdir_fid"`
//Category int `json:"category"`
//FileType int `json:"file_type"`
//Size int `json:"size"`
//FormatType string `json:"format_type"`
//Status int `json:"status"`
//Tags string `json:"tags"`
//LCreatedAt int64 `json:"l_created_at"`
//LUpdatedAt int64 `json:"l_updated_at"`
//NameSpace int `json:"name_space"`
//Thumbnail string `json:"thumbnail"`
DownloadUrl string `json:"download_url"`
//Md5 string `json:"md5"`
//RiskType int `json:"risk_type"`
//RangeSize int `json:"range_size"`
//BackupSign int `json:"backup_sign"`
//ObjCategory string `json:"obj_category"`
//Duration int `json:"duration"`
//FileSource string `json:"file_source"`
//File bool `json:"file"`
//CreatedAt int64 `json:"created_at"`
//UpdatedAt int64 `json:"updated_at"`
//PrivateExtra struct {
//} `json:"_private_extra"`
} `json:"data"`
//Metadata struct {
// Acc2 string `json:"acc2"`
// Acc1 string `json:"acc1"`
//} `json:"metadata"`
}
type UpPreResp struct {
Resp
Data struct {
TaskId string `json:"task_id"`
Finish bool `json:"finish"`
UploadId string `json:"upload_id"`
ObjKey string `json:"obj_key"`
UploadUrl string `json:"upload_url"`
Fid string `json:"fid"`
Bucket string `json:"bucket"`
Callback struct {
CallbackUrl string `json:"callbackUrl"`
CallbackBody string `json:"callbackBody"`
} `json:"callback"`
FormatType string `json:"format_type"`
Size int `json:"size"`
AuthInfo string `json:"auth_info"`
} `json:"data"`
Metadata struct {
PartThread int `json:"part_thread"`
Acc2 string `json:"acc2"`
Acc1 string `json:"acc1"`
PartSize int `json:"part_size"` // 分片大小
} `json:"metadata"`
}
type HashResp struct {
Resp
Data struct {
Finish bool `json:"finish"`
Fid string `json:"fid"`
Thumbnail string `json:"thumbnail"`
FormatType string `json:"format_type"`
} `json:"data"`
Metadata struct {
} `json:"metadata"`
}
type UpAuthResp struct {
Resp
Data struct {
AuthKey string `json:"auth_key"`
Speed int `json:"speed"`
Headers []interface{} `json:"headers"`
} `json:"data"`
Metadata struct {
} `json:"metadata"`
}

248
drivers/quark/util.go Normal file
View File

@ -0,0 +1,248 @@
package quark
import (
"crypto/md5"
"encoding/base64"
"errors"
"fmt"
"net/http"
"strconv"
"strings"
"time"
"github.com/alist-org/alist/v3/drivers/base"
"github.com/alist-org/alist/v3/internal/model"
"github.com/alist-org/alist/v3/internal/op"
"github.com/alist-org/alist/v3/pkg/cookie"
"github.com/alist-org/alist/v3/pkg/utils"
"github.com/go-resty/resty/v2"
log "github.com/sirupsen/logrus"
)
// do others that not defined in Driver interface
func (d *Quark) request(pathname string, method string, callback func(req *resty.Request), resp interface{}) ([]byte, error) {
u := "https://drive.quark.cn/1/clouddrive" + pathname
req := base.RestyClient.R()
req.SetHeaders(map[string]string{
"Cookie": d.Cookie,
"Accept": "application/json, text/plain, */*",
"Referer": "https://pan.quark.cn/",
})
req.SetQueryParam("pr", "ucpro")
req.SetQueryParam("fr", "pc")
if callback != nil {
callback(req)
}
if resp != nil {
req.SetResult(resp)
}
var e Resp
req.SetError(&e)
res, err := req.Execute(method, u)
if err != nil {
return nil, err
}
__puus := cookie.GetCookie(res.Cookies(), "__puus")
if __puus != nil {
d.Cookie = cookie.SetStr(d.Cookie, "__puus", __puus.Value)
op.MustSaveDriverStorage(d)
}
if e.Status >= 400 || e.Code != 0 {
return nil, errors.New(e.Message)
}
return res.Body(), nil
}
func (d *Quark) GetFiles(parent string) ([]File, error) {
files := make([]File, 0)
page := 1
size := 100
query := map[string]string{
"pdir_fid": parent,
"_size": strconv.Itoa(size),
"_fetch_total": "1",
"_sort": "file_type:asc," + d.OrderBy + ":" + d.OrderDirection,
}
for {
query["_page"] = strconv.Itoa(page)
var resp SortResp
_, err := d.request("/file/sort", http.MethodGet, func(req *resty.Request) {
req.SetQueryParams(query)
}, &resp)
if err != nil {
return nil, err
}
files = append(files, resp.Data.List...)
if page*size >= resp.Metadata.Total {
break
}
page++
}
return files, nil
}
func (d *Quark) upPre(file model.FileStreamer, parentId string) (UpPreResp, error) {
now := time.Now()
data := base.Json{
"ccp_hash_update": true,
"dir_name": "",
"file_name": file.GetName(),
"format_type": file.GetMimetype(),
"l_created_at": now.UnixMilli(),
"l_updated_at": now.UnixMilli(),
"pdir_fid": parentId,
"size": file.GetSize(),
}
var resp UpPreResp
_, err := d.request("/file/upload/pre", http.MethodPost, func(req *resty.Request) {
req.SetBody(data)
}, &resp)
return resp, err
}
func (d *Quark) upHash(md5, sha1, taskId string) (bool, error) {
data := base.Json{
"md5": md5,
"sha1": sha1,
"task_id": taskId,
}
log.Debugf("hash: %+v", data)
var resp HashResp
_, err := d.request("/file/update/hash", http.MethodPost, func(req *resty.Request) {
req.SetBody(data)
}, &resp)
return resp.Data.Finish, err
}
func (d *Quark) upPart(pre UpPreResp, mineType string, partNumber int, bytes []byte) (string, error) {
//func (driver Quark) UpPart(pre UpPreResp, mineType string, partNumber int, bytes []byte, account *model.Account, md5Str, sha1Str string) (string, error) {
timeStr := time.Now().UTC().Format(http.TimeFormat)
data := base.Json{
"auth_info": pre.Data.AuthInfo,
"auth_meta": fmt.Sprintf(`PUT
%s
%s
x-oss-date:%s
x-oss-user-agent:aliyun-sdk-js/6.6.1 Chrome 98.0.4758.80 on Windows 10 64-bit
/%s/%s?partNumber=%d&uploadId=%s`,
mineType, timeStr, timeStr, pre.Data.Bucket, pre.Data.ObjKey, partNumber, pre.Data.UploadId),
"task_id": pre.Data.TaskId,
}
var resp UpAuthResp
_, err := d.request("/file/upload/auth", http.MethodPost, func(req *resty.Request) {
req.SetBody(data)
}, &resp)
if err != nil {
return "", err
}
//if partNumber == 1 {
// finish, err := driver.UpHash(md5Str, sha1Str, pre.Data.TaskId, account)
// if err != nil {
// return "", err
// }
// if finish {
// return "finish", nil
// }
//}
u := fmt.Sprintf("https://%s.%s/%s", pre.Data.Bucket, pre.Data.UploadUrl[7:], pre.Data.ObjKey)
res, err := base.RestyClient.R().
SetHeaders(map[string]string{
"Authorization": resp.Data.AuthKey,
"Content-Type": mineType,
"Referer": "https://pan.quark.cn/",
"x-oss-date": timeStr,
"x-oss-user-agent": "aliyun-sdk-js/6.6.1 Chrome 98.0.4758.80 on Windows 10 64-bit",
}).
SetQueryParams(map[string]string{
"partNumber": strconv.Itoa(partNumber),
"uploadId": pre.Data.UploadId,
}).SetBody(bytes).Put(u)
if res.StatusCode() != 200 {
return "", fmt.Errorf("up status: %d, error: %s", res.StatusCode(), res.String())
}
return res.Header().Get("ETag"), nil
}
func (d *Quark) upCommit(pre UpPreResp, md5s []string) error {
timeStr := time.Now().UTC().Format(http.TimeFormat)
log.Debugf("md5s: %+v", md5s)
bodyBuilder := strings.Builder{}
bodyBuilder.WriteString(`<?xml version="1.0" encoding="UTF-8"?>
<CompleteMultipartUpload>
`)
for i, m := range md5s {
bodyBuilder.WriteString(fmt.Sprintf(`<Part>
<PartNumber>%d</PartNumber>
<ETag>%s</ETag>
</Part>
`, i+1, m))
}
bodyBuilder.WriteString("</CompleteMultipartUpload>")
body := bodyBuilder.String()
m := md5.New()
m.Write([]byte(body))
contentMd5 := base64.StdEncoding.EncodeToString(m.Sum(nil))
callbackBytes, err := utils.Json.Marshal(pre.Data.Callback)
if err != nil {
return err
}
callbackBase64 := base64.StdEncoding.EncodeToString(callbackBytes)
data := base.Json{
"auth_info": pre.Data.AuthInfo,
"auth_meta": fmt.Sprintf(`POST
%s
application/xml
%s
x-oss-callback:%s
x-oss-date:%s
x-oss-user-agent:aliyun-sdk-js/6.6.1 Chrome 98.0.4758.80 on Windows 10 64-bit
/%s/%s?uploadId=%s`,
contentMd5, timeStr, callbackBase64, timeStr,
pre.Data.Bucket, pre.Data.ObjKey, pre.Data.UploadId),
"task_id": pre.Data.TaskId,
}
log.Debugf("xml: %s", body)
log.Debugf("auth data: %+v", data)
var resp UpAuthResp
_, err = d.request("/file/upload/auth", http.MethodPost, func(req *resty.Request) {
req.SetBody(data)
}, nil)
if err != nil {
return err
}
u := fmt.Sprintf("https://%s.%s/%s", pre.Data.Bucket, pre.Data.UploadUrl[7:], pre.Data.ObjKey)
res, err := base.RestyClient.R().
SetHeaders(map[string]string{
"Authorization": resp.Data.AuthKey,
"Content-MD5": contentMd5,
"Content-Type": "application/xml",
"Referer": "https://pan.quark.cn/",
"x-oss-callback": callbackBase64,
"x-oss-date": timeStr,
"x-oss-user-agent": "aliyun-sdk-js/6.6.1 Chrome 98.0.4758.80 on Windows 10 64-bit",
}).
SetQueryParams(map[string]string{
"uploadId": pre.Data.UploadId,
}).SetBody(body).Post(u)
if res.StatusCode() != 200 {
return fmt.Errorf("up status: %d, error: %s", res.StatusCode(), res.String())
}
return nil
}
func (d *Quark) upFinish(pre UpPreResp) error {
data := base.Json{
"obj_key": pre.Data.ObjKey,
"task_id": pre.Data.TaskId,
}
_, err := d.request("/file/upload/finish", http.MethodPost, func(req *resty.Request) {
req.SetBody(data)
}, nil)
if err != nil {
return err
}
time.Sleep(time.Second)
return nil
}

View File

@ -148,7 +148,7 @@ func (d *Teambition) Put(ctx context.Context, dstDir model.Obj, stream model.Fil
} else { } else {
// chunk upload // chunk upload
//err = base.ErrNotImplement //err = base.ErrNotImplement
newFile, err = d.chunkUpload(stream, token) newFile, err = d.chunkUpload(stream, token, up)
} }
if err != nil { if err != nil {
return err return err

View File

@ -10,6 +10,7 @@ import (
"time" "time"
"github.com/alist-org/alist/v3/drivers/base" "github.com/alist-org/alist/v3/drivers/base"
"github.com/alist-org/alist/v3/internal/driver"
"github.com/alist-org/alist/v3/internal/model" "github.com/alist-org/alist/v3/internal/model"
"github.com/go-resty/resty/v2" "github.com/go-resty/resty/v2"
log "github.com/sirupsen/logrus" log "github.com/sirupsen/logrus"
@ -134,7 +135,7 @@ func (d *Teambition) upload(file model.FileStreamer, token string) (*FileUpload,
return &newFile, nil return &newFile, nil
} }
func (d *Teambition) chunkUpload(file model.FileStreamer, token string) (*FileUpload, error) { func (d *Teambition) chunkUpload(file model.FileStreamer, token string, up driver.UpdateProgress) (*FileUpload, error) {
prefix := "tcs" prefix := "tcs"
referer := "https://www.teambition.com/" referer := "https://www.teambition.com/"
if d.isInternational() { if d.isInternational() {
@ -176,6 +177,7 @@ func (d *Teambition) chunkUpload(file model.FileStreamer, token string) (*FileUp
if err != nil { if err != nil {
return nil, err return nil, err
} }
up(i * 100 / newChunk.Chunks)
} }
_, err = base.RestyClient.R().SetHeader("Authorization", token).Post( _, err = base.RestyClient.R().SetHeader("Authorization", token).Post(
fmt.Sprintf("https://%s.teambition.net/upload/chunk/%s", fmt.Sprintf("https://%s.teambition.net/upload/chunk/%s",

59
pkg/cookie/cookie.go Normal file
View File

@ -0,0 +1,59 @@
package cookie
import (
"net/http"
"strings"
)
func Parse(str string) []*http.Cookie {
header := http.Header{}
header.Add("Cookie", str)
request := http.Request{Header: header}
return request.Cookies()
}
func ToString(cookies []*http.Cookie) string {
if cookies == nil {
return ""
}
cookieStrings := make([]string, len(cookies))
for i, cookie := range cookies {
cookieStrings[i] = cookie.String()
}
return strings.Join(cookieStrings, ";")
}
func SetCookie(cookies []*http.Cookie, name, value string) []*http.Cookie {
for i, cookie := range cookies {
if cookie.Name == name {
cookies[i].Value = value
return cookies
}
}
cookies = append(cookies, &http.Cookie{Name: name, Value: value})
return cookies
}
func GetCookie(cookies []*http.Cookie, name string) *http.Cookie {
for _, cookie := range cookies {
if cookie.Name == name {
return cookie
}
}
return nil
}
func SetStr(cookiesStr, name, value string) string {
cookies := Parse(cookiesStr)
cookies = SetCookie(cookies, name, value)
return ToString(cookies)
}
func GetStr(cookiesStr, name string) string {
cookies := Parse(cookiesStr)
cookie := GetCookie(cookies, name)
if cookie == nil {
return ""
}
return cookie.Value
}