feat: adapt hash feature for some drivers (#5180)

* feat(pikpak,thunder): adaptation gcid hash

* chore(weiyun): add note

* feat(baidu_netdisk): adaptation rapid

* feat(baidu_photo): adaptation hash

* feat(189pc): adaptation rapid

* feat(mopan):adaptation ctime

* feat(139):adaptation hash and ctime

---------

Co-authored-by: Andy Hsu <i@nn.ci>
pull/5205/head
foxxorcat 2023-09-06 14:46:35 +08:00 committed by GitHub
parent b313ac4daa
commit 7200344ace
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
17 changed files with 362 additions and 194 deletions

View File

@ -10,7 +10,7 @@ type Catalog struct {
CatalogID string `json:"catalogID"`
CatalogName string `json:"catalogName"`
//CatalogType int `json:"catalogType"`
//CreateTime string `json:"createTime"`
CreateTime string `json:"createTime"`
UpdateTime string `json:"updateTime"`
//IsShared bool `json:"isShared"`
//CatalogLevel int `json:"catalogLevel"`
@ -63,7 +63,7 @@ type Content struct {
//ParentCatalogID string `json:"parentCatalogId"`
//Channel string `json:"channel"`
//GeoLocFlag string `json:"geoLocFlag"`
//Digest string `json:"digest"`
Digest string `json:"digest"`
//Version string `json:"version"`
//FileEtag string `json:"fileEtag"`
//FileVersion string `json:"fileVersion"`
@ -141,7 +141,7 @@ type CloudContent struct {
//ContentSuffix string `json:"contentSuffix"`
ContentSize int64 `json:"contentSize"`
//ContentDesc string `json:"contentDesc"`
//CreateTime string `json:"createTime"`
CreateTime string `json:"createTime"`
//Shottime interface{} `json:"shottime"`
LastUpdateTime string `json:"lastUpdateTime"`
ThumbnailURL string `json:"thumbnailURL"`
@ -165,7 +165,7 @@ type CloudCatalog struct {
CatalogID string `json:"catalogID"`
CatalogName string `json:"catalogName"`
//CloudID string `json:"cloudID"`
//CreateTime string `json:"createTime"`
CreateTime string `json:"createTime"`
LastUpdateTime string `json:"lastUpdateTime"`
//Creator string `json:"creator"`
//CreatorNickname string `json:"creatorNickname"`

View File

@ -139,6 +139,7 @@ func (d *Yun139) getFiles(catalogID string) ([]model.Obj, error) {
Name: catalog.CatalogName,
Size: 0,
Modified: getTime(catalog.UpdateTime),
Ctime: getTime(catalog.CreateTime),
IsFolder: true,
}
files = append(files, &f)
@ -150,6 +151,7 @@ func (d *Yun139) getFiles(catalogID string) ([]model.Obj, error) {
Name: content.ContentName,
Size: content.ContentSize,
Modified: getTime(content.UpdateTime),
HashInfo: utils.NewHashInfo(utils.MD5, content.Digest),
},
Thumbnail: model.Thumbnail{Thumbnail: content.ThumbnailURL},
//Thumbnail: content.BigthumbnailURL,
@ -202,6 +204,7 @@ func (d *Yun139) familyGetFiles(catalogID string) ([]model.Obj, error) {
Size: 0,
IsFolder: true,
Modified: getTime(catalog.LastUpdateTime),
Ctime: getTime(catalog.CreateTime),
}
files = append(files, &f)
}
@ -212,6 +215,7 @@ func (d *Yun139) familyGetFiles(catalogID string) ([]model.Obj, error) {
Name: content.ContentName,
Size: content.ContentSize,
Modified: getTime(content.LastUpdateTime),
Ctime: getTime(content.CreateTime),
},
Thumbnail: model.Thumbnail{Thumbnail: content.ThumbnailURL},
//Thumbnail: content.BigthumbnailURL,

View File

@ -27,10 +27,15 @@ type Cloud189PC struct {
tokenInfo *AppSessionResp
uploadThread int
storageConfig driver.Config
}
func (y *Cloud189PC) Config() driver.Config {
return config
if y.storageConfig.Name == "" {
y.storageConfig = config
}
return y.storageConfig
}
func (y *Cloud189PC) GetAddition() driver.Additional {
@ -38,6 +43,9 @@ func (y *Cloud189PC) GetAddition() driver.Additional {
}
func (y *Cloud189PC) Init(ctx context.Context) (err error) {
// 兼容旧上传接口
y.storageConfig.NoOverwriteUpload = y.isFamily() && (y.Addition.RapidUpload || y.Addition.UploadMethod == "old")
// 处理个人云和家庭云参数
if y.isFamily() && y.RootFolderID == "-11" {
y.RootFolderID = ""
@ -118,7 +126,7 @@ func (y *Cloud189PC) Link(ctx context.Context, file model.Obj, args model.LinkAr
// 重定向获取真实链接
downloadUrl.URL = strings.Replace(strings.ReplaceAll(downloadUrl.URL, "&amp;", "&"), "http://", "https://", 1)
res, err := base.NoRedirectClient.R().SetContext(ctx).Get(downloadUrl.URL)
res, err := base.NoRedirectClient.R().SetContext(ctx).Head(downloadUrl.URL)
if err != nil {
return nil, err
}
@ -302,6 +310,13 @@ func (y *Cloud189PC) Remove(ctx context.Context, obj model.Obj) error {
}
func (y *Cloud189PC) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) (model.Obj, error) {
// 响应时间长,按需启用
if y.Addition.RapidUpload {
if newObj, err := y.RapidUpload(ctx, dstDir, stream); err == nil {
return newObj, nil
}
}
switch y.UploadMethod {
case "old":
return y.OldUpload(ctx, dstDir, stream, up)

View File

@ -16,6 +16,7 @@ type Addition struct {
FamilyID string `json:"family_id"`
UploadMethod string `json:"upload_method" type:"select" options:"stream,rapid,old" default:"stream"`
UploadThread string `json:"upload_thread" default:"3" help:"1<=thread<=32"`
RapidUpload bool `json:"rapid_upload"`
NoUseOcr bool `json:"no_use_ocr"`
}

View File

@ -546,16 +546,30 @@ func (y *Cloud189PC) StreamUpload(ctx context.Context, dstDir model.Obj, file mo
return resp.toFile(), nil
}
func (y *Cloud189PC) RapidUpload(ctx context.Context, dstDir model.Obj, stream model.FileStreamer) (model.Obj, error) {
fileMd5 := stream.GetHash().GetHash(utils.MD5)
if len(fileMd5) < utils.MD5.Width {
return nil, errors.New("invalid hash")
}
uploadInfo, err := y.OldUploadCreate(ctx, dstDir.GetID(), fileMd5, stream.GetName(), fmt.Sprint(stream.GetSize()))
if err != nil {
return nil, err
}
if uploadInfo.FileDataExists != 1 {
return nil, errors.New("rapid upload fail")
}
return y.OldUploadCommit(ctx, uploadInfo.FileCommitUrl, uploadInfo.UploadFileId)
}
// 快传
func (y *Cloud189PC) FastUpload(ctx context.Context, dstDir model.Obj, file model.FileStreamer, up driver.UpdateProgress) (model.Obj, error) {
// 需要获取完整文件md5,必须支持 io.Seek
tempFile, err := file.CacheFullInTempFile()
if err != nil {
return nil, err
}
defer func() {
_ = tempFile.Close()
}()
var sliceSize = partSize(file.GetSize())
count := int(math.Ceil(float64(file.GetSize()) / float64(sliceSize)))
@ -739,68 +753,24 @@ func (y *Cloud189PC) GetMultiUploadUrls(ctx context.Context, uploadFileId string
// 旧版本上传,家庭云不支持覆盖
func (y *Cloud189PC) OldUpload(ctx context.Context, dstDir model.Obj, file model.FileStreamer, up driver.UpdateProgress) (model.Obj, error) {
// 需要获取完整文件md5,必须支持 io.Seek
tempFile, err := file.CacheFullInTempFile()
if err != nil {
return nil, err
}
defer func() {
_ = tempFile.Close()
}()
// 计算md5
fileMd5 := md5.New()
if _, err := io.Copy(fileMd5, tempFile); err != nil {
fileMd5, err := utils.HashFile(utils.MD5, tempFile)
if err != nil {
return nil, err
}
if _, err = tempFile.Seek(0, io.SeekStart); err != nil {
return nil, err
}
fileMd5Hex := strings.ToUpper(hex.EncodeToString(fileMd5.Sum(nil)))
// 创建上传会话
var uploadInfo CreateUploadFileResp
fullUrl := API_URL + "/createUploadFile.action"
if y.isFamily() {
fullUrl = API_URL + "/family/file/createFamilyFile.action"
}
_, err = y.post(fullUrl, func(req *resty.Request) {
req.SetContext(ctx)
if y.isFamily() {
req.SetQueryParams(map[string]string{
"familyId": y.FamilyID,
"fileMd5": fileMd5Hex,
"fileName": file.GetName(),
"fileSize": fmt.Sprint(file.GetSize()),
"parentId": dstDir.GetID(),
"resumePolicy": "1",
})
} else {
req.SetFormData(map[string]string{
"parentFolderId": dstDir.GetID(),
"fileName": file.GetName(),
"size": fmt.Sprint(file.GetSize()),
"md5": fileMd5Hex,
"opertype": "3",
"flag": "1",
"resumePolicy": "1",
"isLog": "0",
// "baseFileId": "",
// "lastWrite":"",
// "localPath": strings.ReplaceAll(param.LocalPath, "\\", "/"),
// "fileExt": "",
})
}
}, &uploadInfo)
uploadInfo, err := y.OldUploadCreate(ctx, dstDir.GetID(), fileMd5, file.GetName(), fmt.Sprint(file.GetSize()))
if err != nil {
return nil, err
}
// 网盘中不存在该文件,开始上传
status := GetUploadFileStatusResp{CreateUploadFileResp: uploadInfo}
for status.Size < file.GetSize() && status.FileDataExists != 1 {
status := GetUploadFileStatusResp{CreateUploadFileResp: *uploadInfo}
for status.GetSize() < file.GetSize() && status.FileDataExists != 1 {
if utils.IsCanceled(ctx) {
return nil, ctx.Err()
}
@ -839,28 +809,70 @@ func (y *Cloud189PC) OldUpload(ctx context.Context, dstDir model.Obj, file model
if err != nil {
return nil, err
}
if _, err := tempFile.Seek(status.GetSize(), io.SeekStart); err != nil {
return nil, err
}
up(int(status.Size / file.GetSize()))
up(int(status.GetSize()/file.GetSize()) * 100)
}
// 提交
return y.OldUploadCommit(ctx, status.FileCommitUrl, status.UploadFileId)
}
// 创建上传会话
func (y *Cloud189PC) OldUploadCreate(ctx context.Context, parentID string, fileMd5, fileName, fileSize string) (*CreateUploadFileResp, error) {
var uploadInfo CreateUploadFileResp
fullUrl := API_URL + "/createUploadFile.action"
if y.isFamily() {
fullUrl = API_URL + "/family/file/createFamilyFile.action"
}
_, err := y.post(fullUrl, func(req *resty.Request) {
req.SetContext(ctx)
if y.isFamily() {
req.SetQueryParams(map[string]string{
"familyId": y.FamilyID,
"parentId": parentID,
"fileMd5": fileMd5,
"fileName": fileName,
"fileSize": fileSize,
"resumePolicy": "1",
})
} else {
req.SetFormData(map[string]string{
"parentFolderId": parentID,
"fileName": fileName,
"size": fileSize,
"md5": fileMd5,
"opertype": "3",
"flag": "1",
"resumePolicy": "1",
"isLog": "0",
})
}
}, &uploadInfo)
if err != nil {
return nil, err
}
return &uploadInfo, nil
}
// 提交上传文件
func (y *Cloud189PC) OldUploadCommit(ctx context.Context, fileCommitUrl string, uploadFileID int64) (model.Obj, error) {
var resp OldCommitUploadFileResp
_, err = y.post(status.FileCommitUrl, func(req *resty.Request) {
_, err := y.post(fileCommitUrl, func(req *resty.Request) {
req.SetContext(ctx)
if y.isFamily() {
req.SetHeaders(map[string]string{
"ResumePolicy": "1",
"UploadFileId": fmt.Sprint(status.UploadFileId),
"UploadFileId": fmt.Sprint(uploadFileID),
"FamilyId": fmt.Sprint(y.FamilyID),
})
} else {
req.SetFormData(map[string]string{
"opertype": "3",
"resumePolicy": "1",
"uploadFileId": fmt.Sprint(status.UploadFileId),
"uploadFileId": fmt.Sprint(uploadFileID),
"isLog": "0",
})
}

View File

@ -146,7 +146,39 @@ func (d *BaiduNetdisk) Remove(ctx context.Context, obj model.Obj) error {
return err
}
func (d *BaiduNetdisk) PutRapid(ctx context.Context, dstDir model.Obj, stream model.FileStreamer) (model.Obj, error) {
contentMd5 := stream.GetHash().GetHash(utils.MD5)
if len(contentMd5) < utils.MD5.Width {
return nil, errors.New("invalid hash")
}
streamSize := stream.GetSize()
rawPath := stdpath.Join(dstDir.GetPath(), stream.GetName())
path := encodeURIComponent(rawPath)
mtime := stream.ModTime().Unix()
ctime := stream.CreateTime().Unix()
blockList, _ := utils.Json.MarshalToString([]string{contentMd5})
data := fmt.Sprintf("path=%s&size=%d&isdir=0&rtype=3&block_list=%s&local_mtime=%d&local_ctime=%d",
path, streamSize, blockList, mtime, ctime)
params := map[string]string{
"method": "create",
}
log.Debugf("[baidu_netdisk] precreate data: %s", data)
var newFile File
_, err := d.post("/xpan/file", params, data, &newFile)
if err != nil {
return nil, err
}
return fileToObj(newFile), nil
}
func (d *BaiduNetdisk) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) (model.Obj, error) {
// rapid upload
if newObj, err := d.PutRapid(ctx, dstDir, stream); err == nil {
return newObj, nil
}
tempFile, err := stream.CacheFullInTempFile()
if err != nil {
return nil, err
@ -264,6 +296,7 @@ func (d *BaiduNetdisk) Put(ctx context.Context, dstDir model.Obj, stream model.F
}
return fileToObj(newFile), nil
}
func (d *BaiduNetdisk) uploadSlice(ctx context.Context, params map[string]string, fileName string, file io.Reader) error {
res, err := base.RestyClient.R().
SetContext(ctx).

View File

@ -227,14 +227,14 @@ func (d *BaiduPhoto) Put(ctx context.Context, dstDir model.Obj, stream model.Fil
return nil, fmt.Errorf("file size cannot be zero")
}
// TODO:
// 暂时没有找到妙传方式
// 需要获取完整文件md5,必须支持 io.Seek
tempFile, err := stream.CacheFullInTempFile()
if err != nil {
return nil, err
}
defer func() {
_ = tempFile.Close()
}()
const DEFAULT int64 = 1 << 22
const SliceSize int64 = 1 << 18

View File

@ -2,9 +2,10 @@ package baiduphoto
import (
"fmt"
"github.com/alist-org/alist/v3/pkg/utils"
"time"
"github.com/alist-org/alist/v3/pkg/utils"
"github.com/alist-org/alist/v3/internal/model"
)
@ -52,34 +53,26 @@ type (
Ctime int64 `json:"ctime"` // 创建时间 s
Mtime int64 `json:"mtime"` // 修改时间 s
Thumburl []string `json:"thumburl"`
parseTime *time.Time
Md5 string `json:"md5"`
}
)
func (c *File) GetSize() int64 { return c.Size }
func (c *File) GetName() string { return getFileName(c.Path) }
func (c *File) ModTime() time.Time {
if c.parseTime == nil {
c.parseTime = toTime(c.Mtime)
}
return *c.parseTime
}
func (c *File) IsDir() bool { return false }
func (c *File) GetID() string { return "" }
func (c *File) GetPath() string { return "" }
func (c *File) GetSize() int64 { return c.Size }
func (c *File) GetName() string { return getFileName(c.Path) }
func (c *File) CreateTime() time.Time { return time.Unix(c.Ctime, 0) }
func (c *File) ModTime() time.Time { return time.Unix(c.Mtime, 0) }
func (c *File) IsDir() bool { return false }
func (c *File) GetID() string { return "" }
func (c *File) GetPath() string { return "" }
func (c *File) Thumb() string {
if len(c.Thumburl) > 0 {
return c.Thumburl[0]
}
return ""
}
func (c *File) CreateTime() time.Time {
return time.Unix(c.Ctime, 0)
}
func (c *File) GetHash() utils.HashInfo {
return utils.HashInfo{}
return utils.NewHashInfo(utils.MD5, c.Md5)
}
/*相册部分*/
@ -117,25 +110,17 @@ type (
}
)
func (a *Album) CreateTime() time.Time {
return time.Unix(a.CreationTime, 0)
}
func (a *Album) GetHash() utils.HashInfo {
return utils.HashInfo{}
}
func (a *Album) GetSize() int64 { return 0 }
func (a *Album) GetName() string { return a.Title }
func (a *Album) ModTime() time.Time {
if a.parseTime == nil {
a.parseTime = toTime(a.Mtime)
}
return *a.parseTime
}
func (a *Album) IsDir() bool { return true }
func (a *Album) GetID() string { return "" }
func (a *Album) GetPath() string { return "" }
func (a *Album) GetSize() int64 { return 0 }
func (a *Album) GetName() string { return a.Title }
func (a *Album) CreateTime() time.Time { return time.Unix(a.CreationTime, 0) }
func (a *Album) ModTime() time.Time { return time.Unix(a.Mtime, 0) }
func (a *Album) IsDir() bool { return true }
func (a *Album) GetID() string { return "" }
func (a *Album) GetPath() string { return "" }
type (
CopyFileResp struct {

View File

@ -7,6 +7,7 @@ import (
"io"
"net/http"
"strconv"
"strings"
"time"
"github.com/alist-org/alist/v3/drivers/base"
@ -117,6 +118,15 @@ func (d *MoPan) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (
return nil, err
}
data.DownloadUrl = strings.Replace(strings.ReplaceAll(data.DownloadUrl, "&amp;", "&"), "http://", "https://", 1)
res, err := base.NoRedirectClient.R().SetContext(ctx).Head(data.DownloadUrl)
if err != nil {
return nil, err
}
if res.StatusCode() == 302 {
data.DownloadUrl = res.Header().Get("location")
}
return &model.Link{
URL: data.DownloadUrl,
}, nil
@ -262,7 +272,7 @@ func (d *MoPan) Put(ctx context.Context, dstDir model.Obj, stream model.FileStre
}
if !initUpdload.FileDataExists {
fmt.Println(d.client.CloudDiskStartBusiness())
utils.Log.Error(d.client.CloudDiskStartBusiness())
threadG, upCtx := errgroup.NewGroupWithContext(ctx, d.uploadThread,
retry.Attempts(3),

View File

@ -4,6 +4,7 @@ import (
"time"
"github.com/alist-org/alist/v3/internal/model"
"github.com/alist-org/alist/v3/pkg/utils"
"github.com/foxxorcat/mopan-sdk-go"
)
@ -14,6 +15,8 @@ func fileToObj(f mopan.File) model.Obj {
Name: f.Name,
Size: int64(f.Size),
Modified: time.Time(f.LastOpTime),
Ctime: time.Time(f.CreateDate),
HashInfo: utils.NewHashInfo(utils.MD5, f.Md5),
},
Thumbnail: model.Thumbnail{
Thumbnail: f.Icon.SmallURL,
@ -26,6 +29,7 @@ func folderToObj(f mopan.Folder) model.Obj {
ID: string(f.ID),
Name: f.Name,
Modified: time.Time(f.LastOpTime),
Ctime: time.Time(f.CreateDate),
IsFolder: true,
}
}
@ -37,6 +41,7 @@ func CloneObj(o model.Obj, newID, newName string) model.Obj {
Name: newName,
IsFolder: true,
Modified: o.ModTime(),
Ctime: o.CreateTime(),
}
}
@ -50,6 +55,8 @@ func CloneObj(o model.Obj, newID, newName string) model.Obj {
Name: newName,
Size: o.GetSize(),
Modified: o.ModTime(),
Ctime: o.CreateTime(),
HashInfo: o.GetHash(),
},
Thumbnail: model.Thumbnail{
Thumbnail: thumb,

View File

@ -3,7 +3,6 @@ package pikpak
import (
"context"
"fmt"
"io"
"net/http"
"strings"
@ -11,6 +10,7 @@ import (
"github.com/alist-org/alist/v3/internal/driver"
"github.com/alist-org/alist/v3/internal/model"
"github.com/alist-org/alist/v3/pkg/utils"
hash_extend "github.com/alist-org/alist/v3/pkg/utils/hash"
"github.com/aws/aws-sdk-go/aws"
"github.com/aws/aws-sdk-go/aws/credentials"
"github.com/aws/aws-sdk-go/aws/session"
@ -123,22 +123,20 @@ func (d *PikPak) Remove(ctx context.Context, obj model.Obj) error {
}
func (d *PikPak) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) error {
tempFile, err := stream.CacheFullInTempFile()
if err != nil {
return err
}
defer func() {
_ = tempFile.Close()
}()
// cal gcid
sha1Str, err := getGcid(tempFile, stream.GetSize())
if err != nil {
return err
}
_, err = tempFile.Seek(0, io.SeekStart)
if err != nil {
return err
hi := stream.GetHash()
sha1Str := hi.GetHash(hash_extend.GCID)
if len(sha1Str) < hash_extend.GCID.Width {
tFile, err := stream.CacheFullInTempFile()
if err != nil {
return err
}
sha1Str, err = utils.HashFile(hash_extend.GCID, tFile, stream.GetSize())
if err != nil {
return err
}
}
var resp UploadTaskData
res, err := d.request("https://api-drive.mypikpak.com/drive/v1/files", http.MethodPost, func(req *resty.Request) {
req.SetBody(base.Json{
@ -177,7 +175,7 @@ func (d *PikPak) Put(ctx context.Context, dstDir model.Obj, stream model.FileStr
input := &s3manager.UploadInput{
Bucket: &params.Bucket,
Key: &params.Key,
Body: tempFile,
Body: stream,
}
_, err = uploader.UploadWithContext(ctx, input)
return err

View File

@ -5,6 +5,8 @@ import (
"time"
"github.com/alist-org/alist/v3/internal/model"
"github.com/alist-org/alist/v3/pkg/utils"
hash_extend "github.com/alist-org/alist/v3/pkg/utils/hash"
)
type RespErr struct {
@ -21,7 +23,9 @@ type File struct {
Id string `json:"id"`
Kind string `json:"kind"`
Name string `json:"name"`
CreatedTime time.Time `json:"created_time"`
ModifiedTime time.Time `json:"modified_time"`
Hash string `json:"hash"`
Size string `json:"size"`
ThumbnailLink string `json:"thumbnail_link"`
WebContentLink string `json:"web_content_link"`
@ -35,8 +39,10 @@ func fileToObj(f File) *model.ObjThumb {
ID: f.Id,
Name: f.Name,
Size: size,
Ctime: f.CreatedTime,
Modified: f.ModifiedTime,
IsFolder: f.Kind == "drive#folder",
HashInfo: utils.NewHashInfo(hash_extend.GCID, f.Hash),
},
Thumbnail: model.Thumbnail{
Thumbnail: f.ThumbnailLink,

View File

@ -3,7 +3,6 @@ package thunder
import (
"context"
"fmt"
"io"
"net/http"
"strings"
@ -13,6 +12,7 @@ import (
"github.com/alist-org/alist/v3/internal/model"
"github.com/alist-org/alist/v3/internal/op"
"github.com/alist-org/alist/v3/pkg/utils"
hash_extend "github.com/alist-org/alist/v3/pkg/utils/hash"
"github.com/aws/aws-sdk-go/aws"
"github.com/aws/aws-sdk-go/aws/credentials"
"github.com/aws/aws-sdk-go/aws/session"
@ -332,24 +332,22 @@ func (xc *XunLeiCommon) Remove(ctx context.Context, obj model.Obj) error {
}
func (xc *XunLeiCommon) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) error {
tempFile, err := stream.CacheFullInTempFile()
if err != nil {
return err
}
defer func() {
_ = tempFile.Close()
}()
hi := stream.GetHash()
gcid := hi.GetHash(hash_extend.GCID)
if len(gcid) < hash_extend.GCID.Width {
tFile, err := stream.CacheFullInTempFile()
if err != nil {
return err
}
gcid, err := getGcid(tempFile, stream.GetSize())
if err != nil {
return err
}
if _, err := tempFile.Seek(0, io.SeekStart); err != nil {
return err
gcid, err = utils.HashFile(hash_extend.GCID, tFile, stream.GetSize())
if err != nil {
return err
}
}
var resp UploadTaskResponse
_, err = xc.Request(FILE_API_URL, http.MethodPost, func(r *resty.Request) {
_, err := xc.Request(FILE_API_URL, http.MethodPost, func(r *resty.Request) {
r.SetContext(ctx)
r.SetBody(&base.Json{
"kind": FILE,
@ -379,7 +377,7 @@ func (xc *XunLeiCommon) Put(ctx context.Context, dstDir model.Obj, stream model.
Bucket: aws.String(param.Bucket),
Key: aws.String(param.Key),
Expires: aws.Time(param.Expiration),
Body: tempFile,
Body: stream,
})
return err
}

View File

@ -2,10 +2,12 @@ package thunder
import (
"fmt"
"github.com/alist-org/alist/v3/internal/model"
"github.com/alist-org/alist/v3/pkg/utils"
"strconv"
"time"
"github.com/alist-org/alist/v3/internal/model"
"github.com/alist-org/alist/v3/pkg/utils"
hash_extend "github.com/alist-org/alist/v3/pkg/utils/hash"
)
type ErrResp struct {
@ -104,39 +106,39 @@ type Files struct {
ModifiedTime time.Time `json:"modified_time"`
IconLink string `json:"icon_link"`
ThumbnailLink string `json:"thumbnail_link"`
//Md5Checksum string `json:"md5_checksum"`
//Hash string `json:"hash"`
Links map[string]Link `json:"links"`
Phase string `json:"phase"`
Audit struct {
Status string `json:"status"`
Message string `json:"message"`
Title string `json:"title"`
} `json:"audit"`
// Md5Checksum string `json:"md5_checksum"`
Hash string `json:"hash"`
// Links map[string]Link `json:"links"`
// Phase string `json:"phase"`
// Audit struct {
// Status string `json:"status"`
// Message string `json:"message"`
// Title string `json:"title"`
// } `json:"audit"`
Medias []struct {
Category string `json:"category"`
IconLink string `json:"icon_link"`
IsDefault bool `json:"is_default"`
IsOrigin bool `json:"is_origin"`
IsVisible bool `json:"is_visible"`
Link Link `json:"link"`
MediaID string `json:"media_id"`
MediaName string `json:"media_name"`
NeedMoreQuota bool `json:"need_more_quota"`
Priority int `json:"priority"`
RedirectLink string `json:"redirect_link"`
ResolutionName string `json:"resolution_name"`
Video struct {
AudioCodec string `json:"audio_codec"`
BitRate int `json:"bit_rate"`
Duration int `json:"duration"`
FrameRate int `json:"frame_rate"`
Height int `json:"height"`
VideoCodec string `json:"video_codec"`
VideoType string `json:"video_type"`
Width int `json:"width"`
} `json:"video"`
VipTypes []string `json:"vip_types"`
//Category string `json:"category"`
//IconLink string `json:"icon_link"`
//IsDefault bool `json:"is_default"`
//IsOrigin bool `json:"is_origin"`
//IsVisible bool `json:"is_visible"`
Link Link `json:"link"`
//MediaID string `json:"media_id"`
//MediaName string `json:"media_name"`
//NeedMoreQuota bool `json:"need_more_quota"`
//Priority int `json:"priority"`
//RedirectLink string `json:"redirect_link"`
//ResolutionName string `json:"resolution_name"`
// Video struct {
// AudioCodec string `json:"audio_codec"`
// BitRate int `json:"bit_rate"`
// Duration int `json:"duration"`
// FrameRate int `json:"frame_rate"`
// Height int `json:"height"`
// VideoCodec string `json:"video_codec"`
// VideoType string `json:"video_type"`
// Width int `json:"width"`
// } `json:"video"`
// VipTypes []string `json:"vip_types"`
} `json:"medias"`
Trashed bool `json:"trashed"`
DeleteTime string `json:"delete_time"`
@ -150,21 +152,18 @@ type Files struct {
//Collection interface{} `json:"collection"`
}
func (c *Files) CreateTime() time.Time {
return c.CreatedTime
}
func (c *Files) GetHash() utils.HashInfo {
return utils.HashInfo{}
return utils.NewHashInfo(hash_extend.GCID, c.Hash)
}
func (c *Files) GetSize() int64 { size, _ := strconv.ParseInt(c.Size, 10, 64); return size }
func (c *Files) GetName() string { return c.Name }
func (c *Files) ModTime() time.Time { return c.ModifiedTime }
func (c *Files) IsDir() bool { return c.Kind == FOLDER }
func (c *Files) GetID() string { return c.ID }
func (c *Files) GetPath() string { return "" }
func (c *Files) Thumb() string { return c.ThumbnailLink }
func (c *Files) GetSize() int64 { size, _ := strconv.ParseInt(c.Size, 10, 64); return size }
func (c *Files) GetName() string { return c.Name }
func (c *Files) CreateTime() time.Time { return c.CreatedTime }
func (c *Files) ModTime() time.Time { return c.ModifiedTime }
func (c *Files) IsDir() bool { return c.Kind == FOLDER }
func (c *Files) GetID() string { return c.ID }
func (c *Files) GetPath() string { return "" }
func (c *Files) Thumb() string { return c.ThumbnailLink }
/*
*

View File

@ -308,14 +308,14 @@ func (d *WeiYun) Remove(ctx context.Context, obj model.Obj) error {
}
func (d *WeiYun) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) (model.Obj, error) {
// NOTE:
// 秒传需要sha1最后一个状态,但sha1无法逆运算需要读完整个文件(或许可以??)
// 服务器支持上传进度恢复,不需要额外实现
if folder, ok := dstDir.(*Folder); ok {
file, err := stream.CacheFullInTempFile()
if err != nil {
return nil, err
}
defer func() {
_ = file.Close()
}()
// step 1.
preData, err := d.client.PreUpload(ctx, weiyunsdkgo.UpdloadFileParam{
@ -333,7 +333,7 @@ func (d *WeiYun) Put(ctx context.Context, dstDir model.Obj, stream model.FileStr
return nil, err
}
// fast upload
// not fast upload
if !preData.FileExist {
// step.2 增加上传通道
if len(preData.ChannelList) < d.uploadThread {

View File

@ -8,10 +8,11 @@ import (
"encoding/hex"
"encoding/json"
"errors"
"github.com/alist-org/alist/v3/internal/errs"
log "github.com/sirupsen/logrus"
"hash"
"io"
"github.com/alist-org/alist/v3/internal/errs"
log "github.com/sirupsen/logrus"
)
func GetMD5EncodeStr(data string) string {
@ -29,7 +30,7 @@ type HashType struct {
Width int
Name string
Alias string
NewFunc func() hash.Hash
NewFunc func(...any) hash.Hash
}
func (ht *HashType) MarshalJSON() ([]byte, error) {
@ -57,7 +58,10 @@ var (
// RegisterHash adds a new Hash to the list and returns its Type
func RegisterHash(name, alias string, width int, newFunc func() hash.Hash) *HashType {
return RegisterHashWithParam(name, alias, width, func(a ...any) hash.Hash { return newFunc() })
}
func RegisterHashWithParam(name, alias string, width int, newFunc func(...any) hash.Hash) *HashType {
newType := &HashType{
Name: name,
Alias: alias,
@ -83,15 +87,15 @@ var (
)
// HashData get hash of one hashType
func HashData(hashType *HashType, data []byte) string {
h := hashType.NewFunc()
func HashData(hashType *HashType, data []byte, params ...any) string {
h := hashType.NewFunc(params...)
h.Write(data)
return hex.EncodeToString(h.Sum(nil))
}
// HashReader get hash of one hashType from a reader
func HashReader(hashType *HashType, reader io.Reader) (string, error) {
h := hashType.NewFunc()
func HashReader(hashType *HashType, reader io.Reader, params ...any) (string, error) {
h := hashType.NewFunc(params...)
_, err := io.Copy(h, reader)
if err != nil {
return "", errs.NewErr(err, "HashReader error")
@ -100,8 +104,8 @@ func HashReader(hashType *HashType, reader io.Reader) (string, error) {
}
// HashFile get hash of one hashType from a model.File
func HashFile(hashType *HashType, file io.ReadSeeker) (string, error) {
str, err := HashReader(hashType, file)
func HashFile(hashType *HashType, file io.ReadSeeker, params ...any) (string, error) {
str, err := HashReader(hashType, file, params...)
if err != nil {
return "", err
}

96
pkg/utils/hash/gcid.go Normal file
View File

@ -0,0 +1,96 @@
package hash_extend
import (
"crypto/sha1"
"encoding"
"fmt"
"hash"
"strconv"
"github.com/alist-org/alist/v3/pkg/utils"
)
var GCID = utils.RegisterHashWithParam("gcid", "GCID", 40, func(a ...any) hash.Hash {
var (
size int64
err error
)
if len(a) > 0 {
size, err = strconv.ParseInt(fmt.Sprint(a[0]), 10, 64)
if err != nil {
panic(err)
}
}
return NewGcid(size)
})
func NewGcid(size int64) hash.Hash {
calcBlockSize := func(j int64) int64 {
var psize int64 = 0x40000
for float64(j)/float64(psize) > 0x200 && psize < 0x200000 {
psize = psize << 1
}
return psize
}
return &gcid{
hash: sha1.New(),
hashState: sha1.New(),
blockSize: int(calcBlockSize(size)),
}
}
type gcid struct {
hash hash.Hash
hashState hash.Hash
blockSize int
offset int
}
func (h *gcid) Write(p []byte) (n int, err error) {
n = len(p)
for len(p) > 0 {
if h.offset < h.blockSize {
var lastSize = h.blockSize - h.offset
if lastSize > len(p) {
lastSize = len(p)
}
h.hashState.Write(p[:lastSize])
h.offset += lastSize
p = p[lastSize:]
}
if h.offset >= h.blockSize {
h.hash.Write(h.hashState.Sum(nil))
h.hashState.Reset()
h.offset = 0
}
}
return
}
func (h *gcid) Sum(b []byte) []byte {
if hashm, ok := h.hash.(encoding.BinaryMarshaler); ok {
if hashum, ok := h.hash.(encoding.BinaryUnmarshaler); ok {
tempData, _ := hashm.MarshalBinary()
h.hash.Write(h.hashState.Sum(nil))
defer hashum.UnmarshalBinary(tempData)
}
}
return h.hash.Sum(b)
}
func (h *gcid) Reset() {
h.hash.Reset()
h.hashState.Reset()
}
func (h *gcid) Size() int {
return h.hash.Size()
}
func (h *gcid) BlockSize() int {
return h.blockSize
}