mirror of https://github.com/Xhofe/alist
commit
4e6a44253c
|
@ -7,6 +7,14 @@ body:
|
|||
value: |
|
||||
Thanks for taking the time to fill out this bug report, please **confirm that your issue is not a duplicate issue and not because of your operation or version issues**
|
||||
感谢您花时间填写此错误报告,请**务必确认您的issue不是重复的且不是因为您的操作或版本问题**
|
||||
- type: checkboxes
|
||||
attributes:
|
||||
label: Please make sure of the following things
|
||||
description: You may select more than one, even select all.
|
||||
options:
|
||||
- label: I have read the [documentation](https://alist-doc.nn.ci).
|
||||
- label: I'm sure there are no duplicate issues or discussions.
|
||||
- label: I'm sure it's due to `alist` and not something else(such as `Dependencies` or `Operational`).
|
||||
- type: input
|
||||
id: version
|
||||
attributes:
|
||||
|
|
|
@ -2,6 +2,15 @@ name: "Feature request"
|
|||
description: Feature request
|
||||
labels: ["enhancement: pending triage"]
|
||||
body:
|
||||
- type: checkboxes
|
||||
attributes:
|
||||
label: Please make sure of the following things
|
||||
description: You may select more than one, even select all.
|
||||
options:
|
||||
- label: I have read the [documentation](https://alist-doc.nn.ci).
|
||||
- label: I'm sure there are no duplicate issues or discussions.
|
||||
- label: I'm sure this feature is not implemented.
|
||||
- label: I'm sure it's a reasonable and popular requirement.
|
||||
- type: textarea
|
||||
id: feature-description
|
||||
attributes:
|
||||
|
|
|
@ -11,7 +11,7 @@ jobs:
|
|||
strategy:
|
||||
matrix:
|
||||
platform: [ubuntu-latest]
|
||||
go-version: [1.17]
|
||||
go-version: [1.18]
|
||||
name: Build
|
||||
runs-on: ${{ matrix.platform }}
|
||||
steps:
|
||||
|
@ -32,8 +32,8 @@ jobs:
|
|||
|
||||
- name: Install upx
|
||||
run: |
|
||||
docker pull techknowlogick/xgo:latest
|
||||
go install src.techknowlogick.com/xgo@latest
|
||||
docker pull crazymax/xgo:latest
|
||||
go install github.com/crazy-max/xgo@latest
|
||||
sudo apt install upx
|
||||
|
||||
- name: Build
|
||||
|
|
|
@ -10,7 +10,7 @@ jobs:
|
|||
strategy:
|
||||
matrix:
|
||||
platform: [ubuntu-latest]
|
||||
go-version: [1.17]
|
||||
go-version: [1.18]
|
||||
name: Release
|
||||
runs-on: ${{ matrix.platform }}
|
||||
steps:
|
||||
|
@ -29,8 +29,8 @@ jobs:
|
|||
|
||||
- name: Install upx
|
||||
run: |
|
||||
docker pull techknowlogick/xgo:latest
|
||||
go install src.techknowlogick.com/xgo@latest
|
||||
docker pull crazymax/xgo:latest
|
||||
go install github.com/crazy-max/xgo@latest
|
||||
sudo apt install upx
|
||||
|
||||
- name: Build
|
||||
|
|
|
@ -30,7 +30,7 @@ English | [中文](./README_cn.md) | [Contributors](./CONTRIBUTORS.md) | [Contri
|
|||
- [x] [PikPak](https://www.mypikpak.com/)
|
||||
- [x] [ShandianPan](https://shandianpan.com/)
|
||||
- [x] [S3](https://aws.amazon.com/s3/)
|
||||
- [x] WebDav
|
||||
- [x] WebDav(Support OneDrive/SharePoint without API)
|
||||
- [x] Teambition([China](https://www.teambition.com/ ),[International](https://us.teambition.com/ ))
|
||||
- [x] [Mediatrack](https://www.mediatrack.cn/)
|
||||
- [x] [139yun](https://yun.139.com/) (Personal, Family)
|
||||
|
@ -38,6 +38,7 @@ English | [中文](./README_cn.md) | [Contributors](./CONTRIBUTORS.md) | [Contri
|
|||
- [x] [Baidu Disk](http://pan.baidu.com/)
|
||||
- [x] [Quark](https://pan.quark.cn)
|
||||
- [x] [XunleiCloud](https://pan.xunlei.com/)
|
||||
- [x] SFTP
|
||||
- [x] Easy to deploy and out-of-the-box
|
||||
- [x] File preview (PDF, markdown, code, plain text, ...)
|
||||
- [x] Image preview in gallery mode
|
||||
|
|
|
@ -30,7 +30,7 @@
|
|||
- [x] [PikPak](https://www.mypikpak.com/)
|
||||
- [x] [闪电盘](https://shandianpan.com/)
|
||||
- [x] [S3](https://aws.amazon.com/cn/s3/)
|
||||
- [x] WebDav
|
||||
- [x] WebDav(支持无API的OneDrive/SharePoint)
|
||||
- [x] Teambition([中国](https://www.teambition.com/ ),[国际](https://us.teambition.com/ ))
|
||||
- [x] [分秒帧](https://www.mediatrack.cn/)
|
||||
- [x] [和彩云](https://yun.139.com/) (个人云, 家庭云)
|
||||
|
@ -38,6 +38,7 @@
|
|||
- [x] [百度网盘](http://pan.baidu.com/)
|
||||
- [x] [夸克网盘](https://pan.quark.cn)
|
||||
- [x] [迅雷云盘](https://pan.xunlei.com/)
|
||||
- [x] SFTP
|
||||
- [x] 部署方便,开箱即用
|
||||
- [x] 文件预览(PDF、markdown、代码、纯文本……)
|
||||
- [x] 画廊模式下的图像预览
|
||||
|
|
|
@ -7,6 +7,7 @@ import (
|
|||
log "github.com/sirupsen/logrus"
|
||||
"io/ioutil"
|
||||
"os"
|
||||
"path/filepath"
|
||||
)
|
||||
|
||||
// InitConf init config
|
||||
|
@ -46,7 +47,11 @@ func InitConf() {
|
|||
if !conf.Conf.Force {
|
||||
confFromEnv()
|
||||
}
|
||||
err := os.MkdirAll(conf.Conf.TempDir, 0700)
|
||||
err := os.RemoveAll(filepath.Join(conf.Conf.TempDir))
|
||||
if err != nil {
|
||||
log.Errorln("failed delete temp file:", err)
|
||||
}
|
||||
err = os.MkdirAll(conf.Conf.TempDir, 0700)
|
||||
if err != nil {
|
||||
log.Fatalf("create temp dir error: %s", err.Error())
|
||||
}
|
||||
|
|
|
@ -74,9 +74,9 @@ func InitModel() {
|
|||
log.Infof("auto migrate model...")
|
||||
if databaseConfig.Type == "mysql" {
|
||||
err = conf.DB.Set("gorm:table_options", "ENGINE=InnoDB CHARSET=utf8mb4").
|
||||
AutoMigrate(&model.SettingItem{}, &model.Account{}, &model.Meta{})
|
||||
AutoMigrate(&model.SettingItem{}, &model.Account{}, &model.Meta{}, &model.SearchFile{})
|
||||
} else {
|
||||
err = conf.DB.AutoMigrate(&model.SettingItem{}, &model.Account{}, &model.Meta{})
|
||||
err = conf.DB.AutoMigrate(&model.SettingItem{}, &model.Account{}, &model.Meta{}, &model.SearchFile{})
|
||||
}
|
||||
if err != nil {
|
||||
log.Fatalf("failed to auto migrate: %s", err.Error())
|
||||
|
|
|
@ -118,8 +118,8 @@ func InitSettings() {
|
|||
Group: model.FRONT,
|
||||
},
|
||||
{
|
||||
Key: "home readme url",
|
||||
Description: "when have multiple, the readme file to show",
|
||||
Key: "global readme url",
|
||||
Description: "Default display when directory has no readme",
|
||||
Type: "string",
|
||||
Access: model.PUBLIC,
|
||||
Group: model.FRONT,
|
||||
|
@ -258,6 +258,14 @@ func InitSettings() {
|
|||
Access: model.PRIVATE,
|
||||
Group: model.BACK,
|
||||
},
|
||||
{
|
||||
Key: "enable search",
|
||||
Value: "false",
|
||||
Type: "bool",
|
||||
Access: model.PUBLIC,
|
||||
Group: model.BACK,
|
||||
Description: "Experimental function, not recommended as it's still under development",
|
||||
},
|
||||
}
|
||||
for i, _ := range settings {
|
||||
v := settings[i]
|
||||
|
|
18
build.sh
18
build.sh
|
@ -52,6 +52,15 @@ BUILD() {
|
|||
webTag=$(wget -qO- -t1 -T2 "https://api.github.com/repos/alist-org/alist-web/releases/latest" | grep "tag_name" | head -n 1 | awk -F ":" '{print $2}' | sed 's/\"//g;s/,//g;s/ //g')
|
||||
echo "build version: $gitTag"
|
||||
ldflags="\
|
||||
-w -s --extldflags '-static -fpic' \
|
||||
-X 'github.com/Xhofe/alist/conf.BuiltAt=$builtAt' \
|
||||
-X 'github.com/Xhofe/alist/conf.GoVersion=$goVersion' \
|
||||
-X 'github.com/Xhofe/alist/conf.GitAuthor=$gitAuthor' \
|
||||
-X 'github.com/Xhofe/alist/conf.GitCommit=$gitCommit' \
|
||||
-X 'github.com/Xhofe/alist/conf.GitTag=$gitTag' \
|
||||
-X 'github.com/Xhofe/alist/conf.WebTag=$webTag' \
|
||||
"
|
||||
ldflagsDarwin="\
|
||||
-w -s \
|
||||
-X 'github.com/Xhofe/alist/conf.BuiltAt=$builtAt' \
|
||||
-X 'github.com/Xhofe/alist/conf.GoVersion=$goVersion' \
|
||||
|
@ -60,11 +69,12 @@ BUILD() {
|
|||
-X 'github.com/Xhofe/alist/conf.GitTag=$gitTag' \
|
||||
-X 'github.com/Xhofe/alist/conf.WebTag=$webTag' \
|
||||
"
|
||||
|
||||
if [ "$1" == "release" ]; then
|
||||
xgo -out "$appName" -ldflags="$ldflags" -tags=jsoniter .
|
||||
xgo -targets=linux/*,windows/* -out "$appName" -ldflags="$ldflags" -tags=jsoniter .
|
||||
xgo -targets=darwin/* -out "$appName" -ldflags="$ldflagsDarwin" -tags=jsoniter .
|
||||
else
|
||||
xgo -targets=linux/amd64,windows/amd64,darwin/amd64 -out "$appName" -ldflags="$ldflags" -tags=jsoniter .
|
||||
xgo -targets=linux/amd64,windows/amd64 -out "$appName" -ldflags="$ldflags" -tags=jsoniter .
|
||||
xgo -targets=darwin/amd64 -out "$appName" -ldflags="$ldflagsDarwin" -tags=jsoniter .
|
||||
fi
|
||||
mkdir -p "build"
|
||||
mv alist-* build
|
||||
|
@ -96,7 +106,7 @@ BUILD_MUSL() {
|
|||
gitTag=$(git describe --long --tags --dirty --always)
|
||||
webTag=$(wget -qO- -t1 -T2 "https://api.github.com/repos/alist-org/alist-web/releases/latest" | grep "tag_name" | head -n 1 | awk -F ":" '{print $2}' | sed 's/\"//g;s/,//g;s/ //g')
|
||||
ldflags="\
|
||||
-w -s \
|
||||
-w -s --extldflags '-static -fpic' \
|
||||
-X 'github.com/Xhofe/alist/conf.BuiltAt=$builtAt' \
|
||||
-X 'github.com/Xhofe/alist/conf.GoVersion=$goVersion' \
|
||||
-X 'github.com/Xhofe/alist/conf.GitAuthor=$gitAuthor' \
|
||||
|
|
|
@ -85,6 +85,7 @@ var (
|
|||
"Visitor WebDAV username", "Visitor WebDAV password",
|
||||
"default page size", "load type",
|
||||
"ocr api", "favicon",
|
||||
"enable search",
|
||||
}
|
||||
)
|
||||
|
||||
|
|
|
@ -12,58 +12,8 @@ import (
|
|||
log "github.com/sirupsen/logrus"
|
||||
"path/filepath"
|
||||
"strconv"
|
||||
"time"
|
||||
)
|
||||
|
||||
type BaseResp struct {
|
||||
Code int `json:"code"`
|
||||
Message string `json:"message"`
|
||||
}
|
||||
|
||||
type Pan123TokenResp struct {
|
||||
BaseResp
|
||||
Data struct {
|
||||
Token string `json:"token"`
|
||||
} `json:"data"`
|
||||
}
|
||||
|
||||
type Pan123File struct {
|
||||
FileName string `json:"FileName"`
|
||||
Size int64 `json:"Size"`
|
||||
UpdateAt *time.Time `json:"UpdateAt"`
|
||||
FileId int64 `json:"FileId"`
|
||||
Type int `json:"Type"`
|
||||
Etag string `json:"Etag"`
|
||||
S3KeyFlag string `json:"S3KeyFlag"`
|
||||
}
|
||||
|
||||
type Pan123Files struct {
|
||||
BaseResp
|
||||
Data struct {
|
||||
InfoList []Pan123File `json:"InfoList"`
|
||||
Next string `json:"Next"`
|
||||
} `json:"data"`
|
||||
}
|
||||
|
||||
type Pan123DownResp struct {
|
||||
BaseResp
|
||||
Data struct {
|
||||
DownloadUrl string `json:"DownloadUrl"`
|
||||
} `json:"data"`
|
||||
}
|
||||
|
||||
type UploadResp struct {
|
||||
BaseResp
|
||||
Data struct {
|
||||
AccessKeyId string `json:"AccessKeyId"`
|
||||
Bucket string `json:"Bucket"`
|
||||
Key string `json:"Key"`
|
||||
SecretAccessKey string `json:"SecretAccessKey"`
|
||||
SessionToken string `json:"SessionToken"`
|
||||
FileId int64 `json:"FileId"`
|
||||
} `json:"data"`
|
||||
}
|
||||
|
||||
func (driver Pan123) Login(account *model.Account) error {
|
||||
url := "https://www.123pan.com/api/user/sign_in"
|
||||
if account.APIProxyUrl != "" {
|
||||
|
@ -98,11 +48,7 @@ func (driver Pan123) FormatFile(file *Pan123File) *model.File {
|
|||
Driver: driver.Config().Name,
|
||||
UpdatedAt: file.UpdateAt,
|
||||
}
|
||||
if file.Type == 1 {
|
||||
f.Type = conf.FOLDER
|
||||
} else {
|
||||
f.Type = utils.GetFileType(filepath.Ext(file.FileName))
|
||||
}
|
||||
f.Type = file.GetType()
|
||||
return f
|
||||
}
|
||||
|
||||
|
|
|
@ -0,0 +1,72 @@
|
|||
package _23
|
||||
|
||||
import (
|
||||
"github.com/Xhofe/alist/conf"
|
||||
"github.com/Xhofe/alist/utils"
|
||||
"path"
|
||||
"time"
|
||||
)
|
||||
|
||||
type Pan123File struct {
|
||||
FileName string `json:"FileName"`
|
||||
Size int64 `json:"Size"`
|
||||
UpdateAt *time.Time `json:"UpdateAt"`
|
||||
FileId int64 `json:"FileId"`
|
||||
Type int `json:"Type"`
|
||||
Etag string `json:"Etag"`
|
||||
S3KeyFlag string `json:"S3KeyFlag"`
|
||||
}
|
||||
|
||||
func (f Pan123File) GetSize() uint64 {
|
||||
return uint64(f.Size)
|
||||
}
|
||||
|
||||
func (f Pan123File) GetName() string {
|
||||
return f.FileName
|
||||
}
|
||||
|
||||
func (f Pan123File) GetType() int {
|
||||
if f.Type == 1 {
|
||||
return conf.FOLDER
|
||||
}
|
||||
return utils.GetFileType(path.Ext(f.FileName))
|
||||
}
|
||||
|
||||
type BaseResp struct {
|
||||
Code int `json:"code"`
|
||||
Message string `json:"message"`
|
||||
}
|
||||
|
||||
type Pan123TokenResp struct {
|
||||
BaseResp
|
||||
Data struct {
|
||||
Token string `json:"token"`
|
||||
} `json:"data"`
|
||||
}
|
||||
|
||||
type Pan123Files struct {
|
||||
BaseResp
|
||||
Data struct {
|
||||
InfoList []Pan123File `json:"InfoList"`
|
||||
Next string `json:"Next"`
|
||||
} `json:"data"`
|
||||
}
|
||||
|
||||
type Pan123DownResp struct {
|
||||
BaseResp
|
||||
Data struct {
|
||||
DownloadUrl string `json:"DownloadUrl"`
|
||||
} `json:"data"`
|
||||
}
|
||||
|
||||
type UploadResp struct {
|
||||
BaseResp
|
||||
Data struct {
|
||||
AccessKeyId string `json:"AccessKeyId"`
|
||||
Bucket string `json:"Bucket"`
|
||||
Key string `json:"Key"`
|
||||
SecretAccessKey string `json:"SecretAccessKey"`
|
||||
SessionToken string `json:"SessionToken"`
|
||||
FileId int64 `json:"FileId"`
|
||||
} `json:"data"`
|
||||
}
|
|
@ -18,7 +18,6 @@ import (
|
|||
"math"
|
||||
"net/http"
|
||||
"net/http/cookiejar"
|
||||
"path/filepath"
|
||||
"regexp"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
@ -60,11 +59,9 @@ func (driver Cloud189) FormatFile(file *Cloud189File) *model.File {
|
|||
f.UpdatedAt = &lastOpTime
|
||||
}
|
||||
if file.Size == -1 {
|
||||
f.Type = conf.FOLDER
|
||||
f.Size = 0
|
||||
} else {
|
||||
f.Type = utils.GetFileType(filepath.Ext(file.Name))
|
||||
}
|
||||
f.Type = file.GetType()
|
||||
return f
|
||||
}
|
||||
|
||||
|
@ -132,7 +129,7 @@ func (driver Cloud189) Login(account *model.Account) error {
|
|||
}
|
||||
}
|
||||
if lt == "" {
|
||||
return fmt.Errorf("get empty login page")
|
||||
return errors.New("get page: " + b)
|
||||
}
|
||||
captchaToken := regexp.MustCompile(`captchaToken' value='(.+?)'`).FindStringSubmatch(b)[1]
|
||||
returnUrl := regexp.MustCompile(`returnUrl = '(.+?)'`).FindStringSubmatch(b)[1]
|
||||
|
|
|
@ -187,7 +187,7 @@ func (driver Cloud189) Link(args base.Args, account *model.Account) (*base.Link,
|
|||
link := base.Link{
|
||||
Headers: []base.Header{
|
||||
{Name: "User-Agent", Value: base.UserAgent},
|
||||
{Name: "Authorization", Value: ""},
|
||||
//{Name: "Authorization", Value: ""},
|
||||
},
|
||||
}
|
||||
if res.StatusCode() == 302 {
|
||||
|
|
|
@ -1,5 +1,11 @@
|
|||
package _89
|
||||
|
||||
import (
|
||||
"github.com/Xhofe/alist/conf"
|
||||
"github.com/Xhofe/alist/utils"
|
||||
"path"
|
||||
)
|
||||
|
||||
type Cloud189Error struct {
|
||||
ErrorCode string `json:"errorCode"`
|
||||
ErrorMsg string `json:"errorMsg"`
|
||||
|
@ -17,6 +23,24 @@ type Cloud189File struct {
|
|||
Url string `json:"url"`
|
||||
}
|
||||
|
||||
func (f Cloud189File) GetSize() uint64 {
|
||||
if f.Size == -1 {
|
||||
return 0
|
||||
}
|
||||
return uint64(f.Size)
|
||||
}
|
||||
|
||||
func (f Cloud189File) GetName() string {
|
||||
return f.Name
|
||||
}
|
||||
|
||||
func (f Cloud189File) GetType() int {
|
||||
if f.Size == -1 {
|
||||
return conf.FOLDER
|
||||
}
|
||||
return utils.GetFileType(path.Ext(f.Name))
|
||||
}
|
||||
|
||||
type Cloud189Folder struct {
|
||||
Id int64 `json:"id"`
|
||||
LastOpTime string `json:"lastOpTime"`
|
||||
|
|
|
@ -34,7 +34,7 @@ func GetState(account *model.Account) *State {
|
|||
SetHeaders(map[string]string{
|
||||
"Accept": "application/json;charset=UTF-8",
|
||||
"User-Agent": base.UserAgent,
|
||||
}),
|
||||
}).SetTimeout(base.DefaultTimeout),
|
||||
}
|
||||
userStateCache.States[account.Username] = state
|
||||
return state
|
||||
|
@ -198,7 +198,7 @@ func (s *State) refreshSession(account *model.Account) error {
|
|||
"accessToken": s.AccessToken,
|
||||
}).
|
||||
SetHeader("X-Request-ID", uuid.NewString()).
|
||||
Get("https://api.cloud.189.cn/getSessionForPC.action")
|
||||
Get(API_URL + "/getSessionForPC.action")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
@ -223,10 +223,8 @@ func (s *State) refreshSession(account *model.Account) error {
|
|||
return nil
|
||||
}
|
||||
|
||||
func (s *State) IsLogin() bool {
|
||||
_, err := s.Request("GET", API_URL+"/getUserInfo.action", nil, func(r *resty.Request) {
|
||||
r.SetQueryParams(clientSuffix())
|
||||
}, nil)
|
||||
func (s *State) IsLogin(account *model.Account) bool {
|
||||
_, err := s.Request(http.MethodGet, API_URL+"/getUserInfo.action", nil, func(r *resty.Request) { r.SetQueryParams(clientSuffix()) }, account)
|
||||
return err == nil
|
||||
}
|
||||
|
||||
|
@ -242,12 +240,12 @@ func (s *State) RefreshSession(account *model.Account) error {
|
|||
return s.refreshSession(account)
|
||||
}
|
||||
|
||||
func (s *State) Request(method string, fullUrl string, params url.Values, callback func(*resty.Request), account *model.Account) (*resty.Response, error) {
|
||||
func (s *State) Request(method string, fullUrl string, params Params, callback func(*resty.Request), account *model.Account) (*resty.Response, error) {
|
||||
s.Lock()
|
||||
dateOfGmt := getHttpDateStr()
|
||||
sessionKey := s.SessionKey
|
||||
sessionSecret := s.SessionSecret
|
||||
if account != nil && isFamily(account) {
|
||||
if isFamily(account) {
|
||||
sessionKey = s.FamilySessionKey
|
||||
sessionSecret = s.FamilySessionSecret
|
||||
}
|
||||
|
@ -267,25 +265,12 @@ func (s *State) Request(method string, fullUrl string, params url.Values, callba
|
|||
}
|
||||
req.SetHeader("Signature", signatureOfHmac(sessionSecret, sessionKey, method, fullUrl, dateOfGmt, paramsData))
|
||||
|
||||
callback(req)
|
||||
if callback != nil {
|
||||
callback(req)
|
||||
}
|
||||
s.Unlock()
|
||||
|
||||
var err error
|
||||
var res *resty.Response
|
||||
switch method {
|
||||
case "GET":
|
||||
res, err = req.Get(fullUrl)
|
||||
case "POST":
|
||||
res, err = req.Post(fullUrl)
|
||||
case "DELETE":
|
||||
res, err = req.Delete(fullUrl)
|
||||
case "PATCH":
|
||||
res, err = req.Patch(fullUrl)
|
||||
case "PUT":
|
||||
res, err = req.Put(fullUrl)
|
||||
default:
|
||||
return nil, base.ErrNotSupport
|
||||
}
|
||||
res, err := req.Execute(method, fullUrl)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
@ -298,6 +283,9 @@ func (s *State) Request(method string, fullUrl string, params url.Values, callba
|
|||
}
|
||||
if erron.Code != "" && erron.Code != "SUCCESS" {
|
||||
if erron.Msg == "" {
|
||||
if erron.Message == "" {
|
||||
return nil, fmt.Errorf(res.String())
|
||||
}
|
||||
return nil, fmt.Errorf(erron.Message)
|
||||
}
|
||||
return nil, fmt.Errorf(erron.Msg)
|
||||
|
@ -306,25 +294,18 @@ func (s *State) Request(method string, fullUrl string, params url.Values, callba
|
|||
return nil, fmt.Errorf(erron.ErrorMsg)
|
||||
}
|
||||
|
||||
if account != nil {
|
||||
switch utils.Json.Get(res.Body(), "res_code").ToInt64() {
|
||||
case 11, 18:
|
||||
if err := s.RefreshSession(account); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return s.Request(method, fullUrl, params, callback, account)
|
||||
case 0:
|
||||
if res.StatusCode() == http.StatusOK {
|
||||
return res, nil
|
||||
}
|
||||
fallthrough
|
||||
default:
|
||||
return nil, fmt.Errorf(res.String())
|
||||
switch utils.Json.Get(res.Body(), "res_code").ToInt64() {
|
||||
case 11, 18:
|
||||
if err := s.RefreshSession(account); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return s.Request(method, fullUrl, params, callback, account)
|
||||
case 0:
|
||||
if res.StatusCode() == http.StatusOK {
|
||||
return res, nil
|
||||
}
|
||||
return nil, fmt.Errorf(res.String())
|
||||
default:
|
||||
return nil, fmt.Errorf(utils.Json.Get(res.Body(), "res_message").ToString())
|
||||
}
|
||||
|
||||
if utils.Json.Get(res.Body(), "res_code").ToInt64() != 0 {
|
||||
return res, fmt.Errorf(utils.Json.Get(res.Body(), "res_message").ToString())
|
||||
}
|
||||
return res, nil
|
||||
}
|
||||
|
|
|
@ -1,16 +1,19 @@
|
|||
package _189
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"crypto/md5"
|
||||
"encoding/base64"
|
||||
"encoding/hex"
|
||||
"fmt"
|
||||
"io"
|
||||
"io/ioutil"
|
||||
"math"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/Xhofe/alist/conf"
|
||||
"github.com/Xhofe/alist/drivers/base"
|
||||
|
@ -50,10 +53,9 @@ func (driver Cloud189) Items() []base.Item {
|
|||
Description: "account password",
|
||||
},
|
||||
{
|
||||
Name: "root_folder",
|
||||
Label: "root folder file_id",
|
||||
Type: base.TypeString,
|
||||
Required: true,
|
||||
Name: "root_folder",
|
||||
Label: "root folder file_id",
|
||||
Type: base.TypeString,
|
||||
},
|
||||
{
|
||||
Name: "internal_type",
|
||||
|
@ -63,10 +65,9 @@ func (driver Cloud189) Items() []base.Item {
|
|||
Values: "Personal,Family",
|
||||
},
|
||||
{
|
||||
Name: "site_id",
|
||||
Label: "family id",
|
||||
Type: base.TypeString,
|
||||
Required: true,
|
||||
Name: "site_id",
|
||||
Label: "family id",
|
||||
Type: base.TypeString,
|
||||
},
|
||||
{
|
||||
Name: "order_by",
|
||||
|
@ -82,6 +83,11 @@ func (driver Cloud189) Items() []base.Item {
|
|||
Values: "true,false",
|
||||
Required: true,
|
||||
},
|
||||
{
|
||||
Name: "bool_1",
|
||||
Label: "fast upload",
|
||||
Type: base.TypeBool,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -92,10 +98,14 @@ func (driver Cloud189) Save(account *model.Account, old *model.Account) error {
|
|||
|
||||
if !isFamily(account) && account.RootFolder == "" {
|
||||
account.RootFolder = "-11"
|
||||
account.SiteId = ""
|
||||
}
|
||||
if isFamily(account) && account.RootFolder == "-11" {
|
||||
account.RootFolder = ""
|
||||
}
|
||||
|
||||
state := GetState(account)
|
||||
if !state.IsLogin() {
|
||||
if !state.IsLogin(account) {
|
||||
if err := state.Login(account); err != nil {
|
||||
return err
|
||||
}
|
||||
|
@ -121,7 +131,7 @@ func (driver Cloud189) Save(account *model.Account, old *model.Account) error {
|
|||
|
||||
func (driver Cloud189) getFamilyInfoList(account *model.Account) ([]FamilyInfoResp, error) {
|
||||
var resp FamilyInfoListResp
|
||||
_, err := GetState(account).Request("GET", API_URL+"/family/manage/getFamilyList.action", nil, func(r *resty.Request) {
|
||||
_, err := GetState(account).Request(http.MethodGet, API_URL+"/family/manage/getFamilyList.action", nil, func(r *resty.Request) {
|
||||
r.SetQueryParams(clientSuffix())
|
||||
r.SetResult(&resp)
|
||||
}, account)
|
||||
|
@ -179,16 +189,16 @@ func (driver Cloud189) Files(path string, account *model.Account) ([]model.File,
|
|||
client := GetState(account)
|
||||
for pageNum := 1; ; pageNum++ {
|
||||
var resp Cloud189FilesResp
|
||||
queryparam := map[string]string{
|
||||
"folderId": file.Id,
|
||||
"fileType": "0",
|
||||
"mediaAttr": "0",
|
||||
"iconOption": "5",
|
||||
"pageNum": fmt.Sprint(pageNum),
|
||||
"pageSize": "130",
|
||||
}
|
||||
_, err = client.Request("GET", fullUrl, nil, func(r *resty.Request) {
|
||||
r.SetQueryParams(clientSuffix()).SetQueryParams(queryparam)
|
||||
_, err = client.Request(http.MethodGet, fullUrl, nil, func(r *resty.Request) {
|
||||
r.SetQueryParams(clientSuffix()).
|
||||
SetQueryParams(map[string]string{
|
||||
"folderId": file.Id,
|
||||
"fileType": "0",
|
||||
"mediaAttr": "0",
|
||||
"iconOption": "5",
|
||||
"pageNum": fmt.Sprint(pageNum),
|
||||
"pageSize": "130",
|
||||
})
|
||||
if isFamily(account) {
|
||||
r.SetQueryParams(map[string]string{
|
||||
"familyId": account.SiteId,
|
||||
|
@ -212,10 +222,6 @@ func (driver Cloud189) Files(path string, account *model.Account) ([]model.File,
|
|||
break
|
||||
}
|
||||
|
||||
mustTime := func(str string) *time.Time {
|
||||
time, _ := http.ParseTime(str)
|
||||
return &time
|
||||
}
|
||||
for _, folder := range resp.FileListAO.FolderList {
|
||||
files = append(files, model.File{
|
||||
Id: fmt.Sprint(folder.ID),
|
||||
|
@ -223,7 +229,7 @@ func (driver Cloud189) Files(path string, account *model.Account) ([]model.File,
|
|||
Size: 0,
|
||||
Type: conf.FOLDER,
|
||||
Driver: driver.Config().Name,
|
||||
UpdatedAt: mustTime(folder.CreateDate),
|
||||
UpdatedAt: MustParseTime(folder.LastOpTime),
|
||||
})
|
||||
}
|
||||
for _, file := range resp.FileListAO.FileList {
|
||||
|
@ -233,7 +239,7 @@ func (driver Cloud189) Files(path string, account *model.Account) ([]model.File,
|
|||
Size: file.Size,
|
||||
Type: utils.GetFileType(filepath.Ext(file.Name)),
|
||||
Driver: driver.Config().Name,
|
||||
UpdatedAt: mustTime(file.CreateDate),
|
||||
UpdatedAt: MustParseTime(file.LastOpTime),
|
||||
Thumbnail: file.Icon.SmallUrl,
|
||||
})
|
||||
}
|
||||
|
@ -279,7 +285,7 @@ func (driver Cloud189) Link(args base.Args, account *model.Account) (*base.Link,
|
|||
var downloadUrl struct {
|
||||
URL string `json:"fileDownloadUrl"`
|
||||
}
|
||||
_, err = GetState(account).Request("GET", fullUrl, nil, func(r *resty.Request) {
|
||||
_, err = GetState(account).Request(http.MethodGet, fullUrl, nil, func(r *resty.Request) {
|
||||
r.SetQueryParams(clientSuffix()).SetQueryParam("fileId", file.Id)
|
||||
if isFamily(account) {
|
||||
r.SetQueryParams(map[string]string{
|
||||
|
@ -324,7 +330,7 @@ func (driver Cloud189) MakeDir(path string, account *model.Account) error {
|
|||
}
|
||||
fullUrl += "/createFolder.action"
|
||||
|
||||
_, err = GetState(account).Request("POST", fullUrl, nil, func(r *resty.Request) {
|
||||
_, err = GetState(account).Request(http.MethodPost, fullUrl, nil, func(r *resty.Request) {
|
||||
r.SetQueryParams(clientSuffix()).SetQueryParams(map[string]string{
|
||||
"folderName": name,
|
||||
"relativePath": "",
|
||||
|
@ -354,7 +360,7 @@ func (driver Cloud189) Move(src string, dst string, account *model.Account) erro
|
|||
return err
|
||||
}
|
||||
|
||||
_, err = GetState(account).Request("POST", API_URL+"/batch/createBatchTask.action", nil, func(r *resty.Request) {
|
||||
_, err = GetState(account).Request(http.MethodPost, API_URL+"/batch/createBatchTask.action", nil, func(r *resty.Request) {
|
||||
r.SetFormData(clientSuffix()).SetFormData(map[string]string{
|
||||
"type": "MOVE",
|
||||
"taskInfos": string(MustToBytes(utils.Json.Marshal(
|
||||
|
@ -390,10 +396,10 @@ func (driver Cloud189) Move(src string, dst string, account *model.Account) erro
|
|||
|
||||
var queryParam map[string]string
|
||||
fullUrl := API_URL
|
||||
method := "POST"
|
||||
method := http.MethodPost
|
||||
if isFamily(account) {
|
||||
fullUrl += "/family/file"
|
||||
method = "GET"
|
||||
method = http.MethodGet
|
||||
}
|
||||
if srcFile.IsDir() {
|
||||
fullUrl += "/moveFolder.action"
|
||||
|
@ -431,10 +437,10 @@ func (driver Cloud189) Rename(src string, dst string, account *model.Account) er
|
|||
|
||||
var queryParam map[string]string
|
||||
fullUrl := API_URL
|
||||
method := "POST"
|
||||
method := http.MethodPost
|
||||
if isFamily(account) {
|
||||
fullUrl += "/family/file"
|
||||
method = "GET"
|
||||
method = http.MethodGet
|
||||
}
|
||||
if srcFile.IsDir() {
|
||||
fullUrl += "/renameFolder.action"
|
||||
|
@ -470,7 +476,7 @@ func (driver Cloud189) Copy(src string, dst string, account *model.Account) erro
|
|||
return err
|
||||
}
|
||||
|
||||
_, err = GetState(account).Request("POST", API_URL+"/batch/createBatchTask.action", nil, func(r *resty.Request) {
|
||||
_, err = GetState(account).Request(http.MethodPost, API_URL+"/batch/createBatchTask.action", nil, func(r *resty.Request) {
|
||||
r.SetFormData(clientSuffix()).SetFormData(map[string]string{
|
||||
"type": "COPY",
|
||||
"taskInfos": string(MustToBytes(utils.Json.Marshal(
|
||||
|
@ -500,7 +506,7 @@ func (driver Cloud189) Delete(path string, account *model.Account) error {
|
|||
return err
|
||||
}
|
||||
|
||||
_, err = GetState(account).Request("POST", API_URL+"/batch/createBatchTask.action", nil, func(r *resty.Request) {
|
||||
_, err = GetState(account).Request(http.MethodPost, API_URL+"/batch/createBatchTask.action", nil, func(r *resty.Request) {
|
||||
r.SetFormData(clientSuffix()).SetFormData(map[string]string{
|
||||
"type": "DELETE",
|
||||
"taskInfos": string(MustToBytes(utils.Json.Marshal(
|
||||
|
@ -535,12 +541,209 @@ func (driver Cloud189) Upload(file *model.FileStream, account *model.Account) er
|
|||
return base.ErrNotFolder
|
||||
}
|
||||
|
||||
if isFamily(account) {
|
||||
return driver.uploadFamily(file, parentFile, account)
|
||||
if account.Bool1 {
|
||||
return driver.FastUpload(file, parentFile, account)
|
||||
}
|
||||
return driver.uploadPerson(file, parentFile, account)
|
||||
return driver.CommonUpload(file, parentFile, account)
|
||||
/*
|
||||
if isFamily(account) {
|
||||
return driver.uploadFamily(file, parentFile, account)
|
||||
}
|
||||
return driver.uploadPerson(file, parentFile, account)
|
||||
*/
|
||||
}
|
||||
|
||||
func (driver Cloud189) CommonUpload(file *model.FileStream, parentFile *model.File, account *model.Account) error {
|
||||
// 初始化上传
|
||||
state := GetState(account)
|
||||
const DEFAULT int64 = 10485760
|
||||
count := int(math.Ceil(float64(file.Size) / float64(DEFAULT)))
|
||||
|
||||
params := Params{
|
||||
"parentFolderId": parentFile.Id,
|
||||
"fileName": url.PathEscape(file.Name),
|
||||
"fileSize": fmt.Sprint(file.Size),
|
||||
"sliceSize": fmt.Sprint(DEFAULT),
|
||||
"lazyCheck": "1",
|
||||
}
|
||||
|
||||
fullUrl := UPLOAD_URL
|
||||
if isFamily(account) {
|
||||
params.Set("familyId", account.SiteId)
|
||||
fullUrl += "/family"
|
||||
} else {
|
||||
//params.Set("extend", `{"opScene":"1","relativepath":"","rootfolderid":""}`)
|
||||
fullUrl += "/person"
|
||||
}
|
||||
|
||||
var initMultiUpload InitMultiUploadResp
|
||||
_, err := state.Request(http.MethodGet, fullUrl+"/initMultiUpload", params, func(r *resty.Request) { r.SetQueryParams(clientSuffix()).SetResult(&initMultiUpload) }, account)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
fileMd5 := md5.New()
|
||||
silceMd5 := md5.New()
|
||||
silceMd5Hexs := make([]string, 0, count)
|
||||
byteData := bytes.NewBuffer(make([]byte, DEFAULT))
|
||||
for i := 1; i <= count; i++ {
|
||||
byteData.Reset()
|
||||
silceMd5.Reset()
|
||||
if n, err := io.CopyN(io.MultiWriter(fileMd5, silceMd5, byteData), file, DEFAULT); err != io.EOF && n == 0 {
|
||||
return err
|
||||
}
|
||||
md5Bytes := silceMd5.Sum(nil)
|
||||
silceMd5Hexs = append(silceMd5Hexs, strings.ToUpper(hex.EncodeToString(md5Bytes)))
|
||||
silceMd5Base64 := base64.StdEncoding.EncodeToString(md5Bytes)
|
||||
|
||||
var uploadUrl UploadUrlsResp
|
||||
_, err = state.Request(http.MethodGet, fullUrl+"/getMultiUploadUrls",
|
||||
Params{"partInfo": fmt.Sprintf("%d-%s", i, silceMd5Base64), "uploadFileId": initMultiUpload.Data.UploadFileID},
|
||||
func(r *resty.Request) { r.SetQueryParams(clientSuffix()).SetResult(&uploadUrl) },
|
||||
account)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
uploadData := uploadUrl.UploadUrls[fmt.Sprint("partNumber_", i)]
|
||||
req, _ := http.NewRequest(http.MethodPut, uploadData.RequestURL, byteData)
|
||||
for k, v := range ParseHttpHeader(uploadData.RequestHeader) {
|
||||
req.Header.Set(k, v)
|
||||
}
|
||||
for k, v := range clientSuffix() {
|
||||
req.URL.RawQuery += fmt.Sprintf("&%s=%s", k, v)
|
||||
}
|
||||
r, err := base.HttpClient.Do(req)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if r.StatusCode != http.StatusOK {
|
||||
data, _ := io.ReadAll(r.Body)
|
||||
return fmt.Errorf(string(data))
|
||||
}
|
||||
}
|
||||
|
||||
fileMd5Hex := strings.ToUpper(hex.EncodeToString(fileMd5.Sum(nil)))
|
||||
sliceMd5Hex := fileMd5Hex
|
||||
if int64(file.Size) > DEFAULT {
|
||||
sliceMd5Hex = strings.ToUpper(utils.GetMD5Encode(strings.Join(silceMd5Hexs, "\n")))
|
||||
}
|
||||
|
||||
_, err = state.Request(http.MethodGet, fullUrl+"/commitMultiUploadFile",
|
||||
Params{
|
||||
"uploadFileId": initMultiUpload.Data.UploadFileID,
|
||||
"fileMd5": fileMd5Hex,
|
||||
"sliceMd5": sliceMd5Hex,
|
||||
"lazyCheck": "1",
|
||||
"isLog": "0",
|
||||
"opertype": "3",
|
||||
},
|
||||
func(r *resty.Request) { r.SetQueryParams(clientSuffix()) }, account)
|
||||
return err
|
||||
}
|
||||
|
||||
func (driver Cloud189) FastUpload(file *model.FileStream, parentFile *model.File, account *model.Account) error {
|
||||
tempFile, err := ioutil.TempFile(conf.Conf.TempDir, "file-*")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer tempFile.Close()
|
||||
defer os.Remove(tempFile.Name())
|
||||
|
||||
// 初始化上传
|
||||
state := GetState(account)
|
||||
|
||||
const DEFAULT int64 = 10485760
|
||||
count := int(math.Ceil(float64(file.Size) / float64(DEFAULT)))
|
||||
|
||||
// 优先计算所需信息
|
||||
fileMd5 := md5.New()
|
||||
silceMd5 := md5.New()
|
||||
silceMd5Hexs := make([]string, 0, count)
|
||||
silceMd5Base64s := make([]string, 0, count)
|
||||
for i := 1; i <= count; i++ {
|
||||
silceMd5.Reset()
|
||||
if n, err := io.CopyN(io.MultiWriter(fileMd5, silceMd5, tempFile), file, DEFAULT); err != nil && n == 0 {
|
||||
return err
|
||||
}
|
||||
md5Byte := silceMd5.Sum(nil)
|
||||
silceMd5Hexs = append(silceMd5Hexs, strings.ToUpper(hex.EncodeToString(md5Byte)))
|
||||
silceMd5Base64s = append(silceMd5Base64s, fmt.Sprint(i, "-", base64.StdEncoding.EncodeToString(md5Byte)))
|
||||
}
|
||||
fileMd5Hex := strings.ToUpper(hex.EncodeToString(fileMd5.Sum(nil)))
|
||||
sliceMd5Hex := fileMd5Hex
|
||||
if int64(file.Size) > DEFAULT {
|
||||
sliceMd5Hex = strings.ToUpper(utils.GetMD5Encode(strings.Join(silceMd5Hexs, "\n")))
|
||||
}
|
||||
|
||||
params := Params{
|
||||
"parentFolderId": parentFile.Id,
|
||||
"fileName": url.PathEscape(file.Name),
|
||||
"fileSize": fmt.Sprint(file.Size),
|
||||
"fileMd5": fileMd5Hex,
|
||||
"sliceSize": fmt.Sprint(DEFAULT),
|
||||
"sliceMd5": sliceMd5Hex,
|
||||
}
|
||||
|
||||
fullUrl := UPLOAD_URL
|
||||
if isFamily(account) {
|
||||
params.Set("familyId", account.SiteId)
|
||||
fullUrl += "/family"
|
||||
} else {
|
||||
//params.Set("extend", `{"opScene":"1","relativepath":"","rootfolderid":""}`)
|
||||
fullUrl += "/person"
|
||||
}
|
||||
|
||||
var uploadInfo InitMultiUploadResp
|
||||
_, err = state.Request(http.MethodGet, fullUrl+"/initMultiUpload", params, func(r *resty.Request) { r.SetQueryParams(clientSuffix()).SetResult(&uploadInfo) }, account)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if uploadInfo.Data.FileDataExists != 1 {
|
||||
var uploadUrls UploadUrlsResp
|
||||
_, err := state.Request(http.MethodGet, fullUrl+"/getMultiUploadUrls",
|
||||
Params{
|
||||
"uploadFileId": uploadInfo.Data.UploadFileID,
|
||||
"partInfo": strings.Join(silceMd5Base64s, ","),
|
||||
},
|
||||
func(r *resty.Request) { r.SetQueryParams(clientSuffix()).SetResult(&uploadUrls) },
|
||||
account)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
for i := 1; i <= count; i++ {
|
||||
uploadData := uploadUrls.UploadUrls[fmt.Sprint("partNumber_", i)]
|
||||
req, _ := http.NewRequest(http.MethodPut, uploadData.RequestURL, io.NewSectionReader(tempFile, int64(i-1)*DEFAULT, DEFAULT))
|
||||
for k, v := range ParseHttpHeader(uploadData.RequestHeader) {
|
||||
req.Header.Set(k, v)
|
||||
}
|
||||
for k, v := range clientSuffix() {
|
||||
req.URL.RawQuery += fmt.Sprintf("&%s=%s", k, v)
|
||||
}
|
||||
r, err := base.HttpClient.Do(req)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if r.StatusCode != http.StatusOK {
|
||||
data, _ := io.ReadAll(r.Body)
|
||||
return fmt.Errorf(string(data))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
_, err = state.Request(http.MethodGet, fullUrl+"/commitMultiUploadFile",
|
||||
Params{
|
||||
"uploadFileId": uploadInfo.Data.UploadFileID,
|
||||
"isLog": "0",
|
||||
"opertype": "3",
|
||||
},
|
||||
func(r *resty.Request) { r.SetQueryParams(clientSuffix()) },
|
||||
account)
|
||||
return err
|
||||
}
|
||||
|
||||
/*
|
||||
func (driver Cloud189) uploadFamily(file *model.FileStream, parentFile *model.File, account *model.Account) error {
|
||||
tempFile, err := ioutil.TempFile(conf.Conf.TempDir, "file-*")
|
||||
if err != nil {
|
||||
|
@ -557,7 +760,7 @@ func (driver Cloud189) uploadFamily(file *model.FileStream, parentFile *model.Fi
|
|||
|
||||
client := GetState(account)
|
||||
var createUpload CreateUploadFileResult
|
||||
_, err = client.Request("GET", API_URL+"/family/file/createFamilyFile.action", nil, func(r *resty.Request) {
|
||||
_, err = client.Request(http.MethodGet, API_URL+"/family/file/createFamilyFile.action", nil, func(r *resty.Request) {
|
||||
r.SetQueryParams(map[string]string{
|
||||
"fileMd5": hex.EncodeToString(fileMd5.Sum(nil)),
|
||||
"fileName": file.Name,
|
||||
|
@ -579,7 +782,7 @@ func (driver Cloud189) uploadFamily(file *model.FileStream, parentFile *model.Fi
|
|||
}
|
||||
}
|
||||
|
||||
_, err = client.Request("GET", createUpload.FileCommitUrl, nil, func(r *resty.Request) {
|
||||
_, err = client.Request(http.MethodGet, createUpload.FileCommitUrl, nil, func(r *resty.Request) {
|
||||
r.SetQueryParams(clientSuffix())
|
||||
r.SetHeaders(map[string]string{
|
||||
"FamilyId": account.SiteId,
|
||||
|
@ -606,7 +809,7 @@ func (driver Cloud189) uploadPerson(file *model.FileStream, parentFile *model.Fi
|
|||
|
||||
client := GetState(account)
|
||||
var createUpload CreateUploadFileResult
|
||||
_, err = client.Request("POST", API_URL+"/createUploadFile.action", nil, func(r *resty.Request) {
|
||||
_, err = client.Request(http.MethodPost, API_URL+"/createUploadFile.action", nil, func(r *resty.Request) {
|
||||
r.SetQueryParams(clientSuffix())
|
||||
r.SetFormData(clientSuffix()).SetFormData(map[string]string{
|
||||
"parentFolderId": parentFile.Id,
|
||||
|
@ -634,7 +837,7 @@ func (driver Cloud189) uploadPerson(file *model.FileStream, parentFile *model.Fi
|
|||
}
|
||||
}
|
||||
|
||||
_, err = client.Request("POST", createUpload.FileCommitUrl, nil, func(r *resty.Request) {
|
||||
_, err = client.Request(http.MethodPost, createUpload.FileCommitUrl, nil, func(r *resty.Request) {
|
||||
r.SetQueryParams(clientSuffix())
|
||||
r.SetFormData(map[string]string{
|
||||
"uploadFileId": fmt.Sprint(createUpload.UploadFileId),
|
||||
|
@ -689,7 +892,7 @@ func (driver Cloud189) getUploadFileState(uploadFileId int64, account *model.Acc
|
|||
fullUrl += "/getUploadFileStatus.action"
|
||||
}
|
||||
var uploadFileState UploadFileStatusResult
|
||||
_, err := GetState(account).Request("GET", fullUrl, nil, func(r *resty.Request) {
|
||||
_, err := GetState(account).Request(http.MethodGet, fullUrl, nil, func(r *resty.Request) {
|
||||
r.SetQueryParams(clientSuffix())
|
||||
r.SetQueryParams(map[string]string{
|
||||
"uploadFileId": fmt.Sprint(uploadFileId),
|
||||
|
@ -704,128 +907,6 @@ func (driver Cloud189) getUploadFileState(uploadFileId int64, account *model.Acc
|
|||
return nil, err
|
||||
}
|
||||
return &uploadFileState, nil
|
||||
}
|
||||
}*/
|
||||
|
||||
/*
|
||||
暂时未解决
|
||||
func (driver Cloud189) Upload(file *model.FileStream, account *model.Account) error {
|
||||
if file == nil {
|
||||
return base.ErrEmptyFile
|
||||
}
|
||||
|
||||
parentFile, err := driver.File(file.ParentPath, account)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if !parentFile.IsDir() {
|
||||
return base.ErrNotFolder
|
||||
}
|
||||
|
||||
fullUrl := UPLOAD_URL
|
||||
if isFamily(account) {
|
||||
fullUrl += "/family"
|
||||
} else {
|
||||
fullUrl += "/person"
|
||||
}
|
||||
|
||||
tempFile, err := ioutil.TempFile(conf.Conf.TempDir, "file-*")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
defer tempFile.Close()
|
||||
defer os.Remove(tempFile.Name())
|
||||
|
||||
// 初始化上传
|
||||
const DEFAULT int64 = 10485760
|
||||
count := int64(math.Ceil(float64(file.Size) / float64(DEFAULT)))
|
||||
fileMd5 := md5.New()
|
||||
silceMd5 := md5.New()
|
||||
silceMd5Hexs := make([]string, 0, count)
|
||||
silceMd5Base64s := make([]string, 0, count)
|
||||
for i := int64(1); i <= count; i++ {
|
||||
if _, err := io.CopyN(io.MultiWriter(fileMd5, silceMd5, tempFile), file, DEFAULT); err != io.EOF {
|
||||
return err
|
||||
}
|
||||
md5Byte := silceMd5.Sum(nil)
|
||||
silceMd5Hexs = append(silceMd5Hexs, strings.ToUpper(hex.EncodeToString(md5Byte)))
|
||||
silceMd5Base64s = append(silceMd5Base64s, fmt.Sprint(i, "-", base64.StdEncoding.EncodeToString(md5Byte)))
|
||||
}
|
||||
fileMd5Hex := strings.ToUpper(hex.EncodeToString(fileMd5.Sum(nil)))
|
||||
sliceMd5Hex := fileMd5Hex
|
||||
if int64(file.Size) > DEFAULT {
|
||||
sliceMd5Hex = strings.ToUpper(utils.GetMD5Encode(strings.Join(silceMd5Hexs, "\n")))
|
||||
}
|
||||
|
||||
qID := uuid.NewString()
|
||||
client := GetState(account)
|
||||
param := MapToUrlValues(map[string]interface{}{
|
||||
"parentFolderId": parentFile.Id,
|
||||
"fileName": url.QueryEscape(file.Name),
|
||||
"fileMd5": fileMd5Hex,
|
||||
"fileSize": fmt.Sprint(file.Size),
|
||||
"sliceMd5": sliceMd5Hex,
|
||||
"sliceSize": fmt.Sprint(DEFAULT),
|
||||
})
|
||||
if isFamily(account) {
|
||||
param.Set("familyId", account.SiteId)
|
||||
}
|
||||
|
||||
var uploadInfo InitMultiUploadResp
|
||||
_, err = client.Request("GET", fullUrl+"/initMultiUpload", param, func(r *resty.Request) {
|
||||
r.SetQueryParams(clientSuffix())
|
||||
r.SetHeader("X-Request-ID", qID)
|
||||
r.SetResult(&uploadInfo)
|
||||
}, account)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if uploadInfo.Data.FileDataExists != 1 {
|
||||
param = MapToUrlValues(map[string]interface{}{
|
||||
"uploadFileId": uploadInfo.Data.UploadFileID,
|
||||
"partInfo": strings.Join(silceMd5Base64s, ","),
|
||||
})
|
||||
if isFamily(account) {
|
||||
param.Set("familyId", account.SiteId)
|
||||
}
|
||||
var uploadUrls UploadUrlsResp
|
||||
_, err := client.Request("GET", fullUrl+"/getMultiUploadUrls", param, func(r *resty.Request) {
|
||||
r.SetQueryParams(clientSuffix())
|
||||
r.SetHeader("X-Request-ID", qID).SetHeader("content-type", "application/x-www-form-urlencoded")
|
||||
r.SetResult(&uploadUrls)
|
||||
|
||||
}, account)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
var i int64
|
||||
for _, uploadurl := range uploadUrls.UploadUrls {
|
||||
req := resty.New().SetTLSClientConfig(&tls.Config{InsecureSkipVerify: true}).SetProxy("http://192.168.0.30:8888").R()
|
||||
for _, header := range strings.Split(decodeURIComponent(uploadurl.RequestHeader), "&") {
|
||||
i := strings.Index(header, "=")
|
||||
req.SetHeader(header[0:i], header[i+1:])
|
||||
}
|
||||
_, err := req.SetBody(io.NewSectionReader(tempFile, i*DEFAULT, DEFAULT)).Put(uploadurl.RequestURL)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
param = MapToUrlValues(map[string]interface{}{
|
||||
"uploadFileId": uploadInfo.Data.UploadFileID,
|
||||
"isLog": "0",
|
||||
"opertype": "1",
|
||||
})
|
||||
if isFamily(account) {
|
||||
param.Set("familyId", account.SiteId)
|
||||
}
|
||||
_, err = client.Request("GET", fullUrl+"/commitMultiUploadFile", param, func(r *resty.Request) {
|
||||
r.SetHeader("X-Request-ID", qID)
|
||||
r.SetQueryParams(clientSuffix())
|
||||
}, account)
|
||||
return err
|
||||
}
|
||||
*/
|
||||
var _ base.Driver = (*Cloud189)(nil)
|
||||
|
|
|
@ -137,6 +137,7 @@ type BatchTaskInfo struct {
|
|||
//SrcParentId string `json:"srcParentId"`
|
||||
}
|
||||
|
||||
/*
|
||||
type CreateUploadFileResult struct {
|
||||
// UploadFileId 上传文件请求ID
|
||||
UploadFileId int64 `json:"uploadFileId"`
|
||||
|
@ -157,8 +158,8 @@ type UploadFileStatusResult struct {
|
|||
FileCommitUrl string `json:"fileCommitUrl"`
|
||||
FileDataExists int `json:"fileDataExists"`
|
||||
}
|
||||
*/
|
||||
|
||||
/*
|
||||
type InitMultiUploadResp struct {
|
||||
//Code string `json:"code"`
|
||||
Data struct {
|
||||
|
@ -177,4 +178,3 @@ type Part struct {
|
|||
RequestURL string `json:"requestURL"`
|
||||
RequestHeader string `json:"requestHeader"`
|
||||
}
|
||||
*/
|
||||
|
|
|
@ -15,6 +15,7 @@ import (
|
|||
rand2 "math/rand"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"sort"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
|
@ -118,19 +119,17 @@ func toFamilyOrderBy(o string) string {
|
|||
}
|
||||
}
|
||||
|
||||
func MapToUrlValues(m map[string]interface{}) url.Values {
|
||||
url := make(url.Values, len(m))
|
||||
for k, v := range m {
|
||||
url.Add(k, fmt.Sprint(v))
|
||||
func ParseHttpHeader(str string) map[string]string {
|
||||
header := make(map[string]string)
|
||||
for _, value := range strings.Split(str, "&") {
|
||||
i := strings.Index(value, "=")
|
||||
header[strings.TrimSpace(value[0:i])] = strings.TrimSpace(value[i+1:])
|
||||
}
|
||||
return url
|
||||
return header
|
||||
}
|
||||
|
||||
func decodeURIComponent(str string) string {
|
||||
r, _ := url.QueryUnescape(str)
|
||||
//r, _ := url.PathUnescape(str)
|
||||
//r = strings.ReplaceAll(r, " ", "+")
|
||||
return r
|
||||
func MustString(str string, err error) string {
|
||||
return str
|
||||
}
|
||||
|
||||
func MustToBytes(b []byte, err error) []byte {
|
||||
|
@ -143,3 +142,36 @@ func BoolToNumber(b bool) int {
|
|||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
func MustParseTime(str string) *time.Time {
|
||||
loc, _ := time.LoadLocation("Local")
|
||||
lastOpTime, _ := time.ParseInLocation("2006-01-02 15:04:05", str, loc)
|
||||
return &lastOpTime
|
||||
}
|
||||
|
||||
type Params map[string]string
|
||||
|
||||
func (p Params) Set(k, v string) {
|
||||
p[k] = v
|
||||
}
|
||||
|
||||
func (p Params) Encode() string {
|
||||
if p == nil {
|
||||
return ""
|
||||
}
|
||||
var buf strings.Builder
|
||||
keys := make([]string, 0, len(p))
|
||||
for k := range p {
|
||||
keys = append(keys, k)
|
||||
}
|
||||
sort.Strings(keys)
|
||||
for _, k := range keys {
|
||||
if buf.Len() > 0 {
|
||||
buf.WriteByte('&')
|
||||
}
|
||||
buf.WriteString(k)
|
||||
buf.WriteByte('=')
|
||||
buf.WriteString(p[k])
|
||||
}
|
||||
return buf.String()
|
||||
}
|
||||
|
|
|
@ -3,7 +3,6 @@ package alidrive
|
|||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"github.com/Xhofe/alist/conf"
|
||||
"github.com/Xhofe/alist/drivers/base"
|
||||
"github.com/Xhofe/alist/model"
|
||||
"github.com/Xhofe/alist/utils"
|
||||
|
@ -11,36 +10,10 @@ import (
|
|||
jsoniter "github.com/json-iterator/go"
|
||||
log "github.com/sirupsen/logrus"
|
||||
"path/filepath"
|
||||
"time"
|
||||
)
|
||||
|
||||
var aliClient = resty.New()
|
||||
|
||||
type AliRespError struct {
|
||||
Code string `json:"code"`
|
||||
Message string `json:"message"`
|
||||
}
|
||||
|
||||
type AliFiles struct {
|
||||
Items []AliFile `json:"items"`
|
||||
NextMarker string `json:"next_marker"`
|
||||
}
|
||||
|
||||
type AliFile struct {
|
||||
DriveId string `json:"drive_id"`
|
||||
CreatedAt *time.Time `json:"created_at"`
|
||||
FileExtension string `json:"file_extension"`
|
||||
FileId string `json:"file_id"`
|
||||
Type string `json:"type"`
|
||||
Name string `json:"name"`
|
||||
Category string `json:"category"`
|
||||
ParentFileId string `json:"parent_file_id"`
|
||||
UpdatedAt *time.Time `json:"updated_at"`
|
||||
Size int64 `json:"size"`
|
||||
Thumbnail string `json:"thumbnail"`
|
||||
Url string `json:"url"`
|
||||
}
|
||||
|
||||
func (driver AliDrive) FormatFile(file *AliFile) *model.File {
|
||||
f := &model.File{
|
||||
Id: file.FileId,
|
||||
|
@ -51,17 +24,7 @@ func (driver AliDrive) FormatFile(file *AliFile) *model.File {
|
|||
Driver: driver.Config().Name,
|
||||
Url: file.Url,
|
||||
}
|
||||
if file.Type == "folder" {
|
||||
f.Type = conf.FOLDER
|
||||
} else {
|
||||
f.Type = utils.GetFileType(file.FileExtension)
|
||||
}
|
||||
if file.Category == "video" {
|
||||
f.Type = conf.VIDEO
|
||||
}
|
||||
if file.Category == "image" {
|
||||
f.Type = conf.IMAGE
|
||||
}
|
||||
f.Type = file.GetType()
|
||||
return f
|
||||
}
|
||||
|
||||
|
|
|
@ -66,6 +66,11 @@ func (driver AliDrive) Items() []base.Item {
|
|||
Required: false,
|
||||
Description: ">0 and <=200",
|
||||
},
|
||||
{
|
||||
Name: "bool_1",
|
||||
Label: "fast upload",
|
||||
Type: base.TypeBool,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -371,7 +376,7 @@ func (driver AliDrive) Delete(path string, account *model.Account) error {
|
|||
}
|
||||
return fmt.Errorf("%s", e.Message)
|
||||
}
|
||||
if res.StatusCode() == 204 {
|
||||
if res.StatusCode() < 400 {
|
||||
return nil
|
||||
}
|
||||
return errors.New(res.String())
|
||||
|
@ -391,8 +396,7 @@ func (driver AliDrive) Upload(file *model.FileStream, account *model.Account) er
|
|||
if file == nil {
|
||||
return base.ErrEmptyFile
|
||||
}
|
||||
const DEFAULT int64 = 10485760
|
||||
var count = int64(math.Ceil(float64(file.GetSize()) / float64(DEFAULT)))
|
||||
|
||||
parentFile, err := driver.File(file.ParentPath, account)
|
||||
if err != nil {
|
||||
return err
|
||||
|
@ -401,16 +405,14 @@ func (driver AliDrive) Upload(file *model.FileStream, account *model.Account) er
|
|||
return base.ErrNotFolder
|
||||
}
|
||||
|
||||
const DEFAULT int64 = 10485760
|
||||
var count = int(math.Ceil(float64(file.GetSize()) / float64(DEFAULT)))
|
||||
|
||||
partInfoList := make([]base.Json, 0, count)
|
||||
var i int64
|
||||
for i = 0; i < count; i++ {
|
||||
partInfoList = append(partInfoList, base.Json{
|
||||
"part_number": i + 1,
|
||||
})
|
||||
for i := 1; i <= count; i++ {
|
||||
partInfoList = append(partInfoList, base.Json{"part_number": i})
|
||||
}
|
||||
|
||||
buf := make([]byte, 1024)
|
||||
n, _ := file.Read(buf[:])
|
||||
reqBody := base.Json{
|
||||
"check_name_mode": "overwrite",
|
||||
"drive_id": account.DriveId,
|
||||
|
@ -419,9 +421,17 @@ func (driver AliDrive) Upload(file *model.FileStream, account *model.Account) er
|
|||
"part_info_list": partInfoList,
|
||||
"size": file.GetSize(),
|
||||
"type": "file",
|
||||
"pre_hash": utils.GetSHA1Encode(string(buf[:n])),
|
||||
}
|
||||
fileReader := io.MultiReader(bytes.NewReader(buf[:n]), file.File)
|
||||
|
||||
if account.Bool1 {
|
||||
buf := make([]byte, 1024)
|
||||
n, _ := file.Read(buf[:])
|
||||
reqBody["pre_hash"] = utils.GetSHA1Encode(string(buf[:n]))
|
||||
file.File = io.NopCloser(io.MultiReader(bytes.NewReader(buf[:n]), file.File))
|
||||
} else {
|
||||
reqBody["content_hash_name"] = "none"
|
||||
reqBody["proof_version"] = "v1"
|
||||
}
|
||||
|
||||
var resp UploadResp
|
||||
var e AliRespError
|
||||
|
@ -444,7 +454,7 @@ func (driver AliDrive) Upload(file *model.FileStream, account *model.Account) er
|
|||
return fmt.Errorf("%s", e.Message)
|
||||
}
|
||||
|
||||
if e.Code == "PreHashMatched" {
|
||||
if e.Code == "PreHashMatched" && account.Bool1 {
|
||||
tempFile, err := ioutil.TempFile(conf.Conf.TempDir, "file-*")
|
||||
if err != nil {
|
||||
return err
|
||||
|
@ -455,7 +465,7 @@ func (driver AliDrive) Upload(file *model.FileStream, account *model.Account) er
|
|||
|
||||
delete(reqBody, "pre_hash")
|
||||
h := sha1.New()
|
||||
if _, err = io.Copy(tempFile, io.TeeReader(fileReader, h)); err != nil {
|
||||
if _, err = io.Copy(io.MultiWriter(tempFile, h), file.File); err != nil {
|
||||
return err
|
||||
}
|
||||
reqBody["content_hash"] = hex.EncodeToString(h.Sum(nil))
|
||||
|
@ -470,10 +480,11 @@ func (driver AliDrive) Upload(file *model.FileStream, account *model.Account) er
|
|||
o = i ? r.mod(i) : new gt.BigNumber(0);
|
||||
(t.file.slice(o.toNumber(), Math.min(o.plus(8).toNumber(), t.file.size)))
|
||||
*/
|
||||
buf := make([]byte, 8)
|
||||
r, _ := new(big.Int).SetString(utils.GetMD5Encode(account.AccessToken)[:16], 16)
|
||||
i := new(big.Int).SetUint64(file.Size)
|
||||
o := r.Mod(r, i)
|
||||
n, _ = io.NewSectionReader(tempFile, o.Int64(), 8).Read(buf[:8])
|
||||
n, _ := io.NewSectionReader(tempFile, o.Int64(), 8).Read(buf[:8])
|
||||
reqBody["proof_code"] = base64.StdEncoding.EncodeToString(buf[:n])
|
||||
|
||||
_, err = client.Post("https://api.aliyundrive.com/adrive/v2/file/createWithFolders")
|
||||
|
@ -491,11 +502,11 @@ func (driver AliDrive) Upload(file *model.FileStream, account *model.Account) er
|
|||
if _, err = tempFile.Seek(0, io.SeekStart); err != nil {
|
||||
return err
|
||||
}
|
||||
fileReader = tempFile
|
||||
file.File = tempFile
|
||||
}
|
||||
|
||||
for i = 0; i < count; i++ {
|
||||
req, err := http.NewRequest("PUT", resp.PartInfoList[i].UploadUrl, io.LimitReader(fileReader, DEFAULT))
|
||||
for _, partInfo := range resp.PartInfoList {
|
||||
req, err := http.NewRequest("PUT", partInfo.UploadUrl, io.LimitReader(file.File, DEFAULT))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
@ -523,7 +534,7 @@ func (driver AliDrive) Upload(file *model.FileStream, account *model.Account) er
|
|||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if e.Code != "" {
|
||||
if e.Code != "" && e.Code != "PreHashMatched" {
|
||||
//if e.Code == "AccessTokenInvalid" {
|
||||
// err = driver.RefreshToken(account)
|
||||
// if err != nil {
|
||||
|
|
|
@ -0,0 +1,53 @@
|
|||
package alidrive
|
||||
|
||||
import (
|
||||
"github.com/Xhofe/alist/conf"
|
||||
"github.com/Xhofe/alist/utils"
|
||||
"time"
|
||||
)
|
||||
|
||||
type AliRespError struct {
|
||||
Code string `json:"code"`
|
||||
Message string `json:"message"`
|
||||
}
|
||||
|
||||
type AliFiles struct {
|
||||
Items []AliFile `json:"items"`
|
||||
NextMarker string `json:"next_marker"`
|
||||
}
|
||||
|
||||
type AliFile struct {
|
||||
DriveId string `json:"drive_id"`
|
||||
CreatedAt *time.Time `json:"created_at"`
|
||||
FileExtension string `json:"file_extension"`
|
||||
FileId string `json:"file_id"`
|
||||
Type string `json:"type"`
|
||||
Name string `json:"name"`
|
||||
Category string `json:"category"`
|
||||
ParentFileId string `json:"parent_file_id"`
|
||||
UpdatedAt *time.Time `json:"updated_at"`
|
||||
Size int64 `json:"size"`
|
||||
Thumbnail string `json:"thumbnail"`
|
||||
Url string `json:"url"`
|
||||
}
|
||||
|
||||
func (f AliFile) GetSize() uint64 {
|
||||
return uint64(f.Size)
|
||||
}
|
||||
|
||||
func (f AliFile) GetName() string {
|
||||
return f.Name
|
||||
}
|
||||
|
||||
func (f AliFile) GetType() int {
|
||||
if f.Type == "folder" {
|
||||
return conf.FOLDER
|
||||
}
|
||||
if f.Category == "video" {
|
||||
return conf.VIDEO
|
||||
}
|
||||
if f.Category == "image" {
|
||||
return conf.IMAGE
|
||||
}
|
||||
return utils.GetFileType(f.FileExtension)
|
||||
}
|
|
@ -18,6 +18,7 @@ import (
|
|||
_ "github.com/Xhofe/alist/drivers/pikpak"
|
||||
_ "github.com/Xhofe/alist/drivers/quark"
|
||||
_ "github.com/Xhofe/alist/drivers/s3"
|
||||
_ "github.com/Xhofe/alist/drivers/sftp"
|
||||
_ "github.com/Xhofe/alist/drivers/shandian"
|
||||
_ "github.com/Xhofe/alist/drivers/teambition"
|
||||
_ "github.com/Xhofe/alist/drivers/uss"
|
||||
|
|
|
@ -48,8 +48,7 @@ func (driver Baidu) refreshToken(account *model.Account) error {
|
|||
return nil
|
||||
}
|
||||
|
||||
func (driver Baidu) Request(pathname string, method int, headers, query, form map[string]string, data interface{}, resp interface{}, account *model.Account) ([]byte, error) {
|
||||
u := "https://pan.baidu.com/rest/2.0" + pathname
|
||||
func (driver Baidu) Request(fullurl string, method int, headers, query, form map[string]string, data interface{}, resp interface{}, account *model.Account) ([]byte, error) {
|
||||
req := base.RestyClient.R()
|
||||
req.SetQueryParam("access_token", account.AccessToken)
|
||||
if headers != nil {
|
||||
|
@ -71,15 +70,15 @@ func (driver Baidu) Request(pathname string, method int, headers, query, form ma
|
|||
var err error
|
||||
switch method {
|
||||
case base.Get:
|
||||
res, err = req.Get(u)
|
||||
res, err = req.Get(fullurl)
|
||||
case base.Post:
|
||||
res, err = req.Post(u)
|
||||
res, err = req.Post(fullurl)
|
||||
case base.Patch:
|
||||
res, err = req.Patch(u)
|
||||
res, err = req.Patch(fullurl)
|
||||
case base.Delete:
|
||||
res, err = req.Delete(u)
|
||||
res, err = req.Delete(fullurl)
|
||||
case base.Put:
|
||||
res, err = req.Put(u)
|
||||
res, err = req.Put(fullurl)
|
||||
default:
|
||||
return nil, base.ErrNotSupport
|
||||
}
|
||||
|
@ -94,7 +93,7 @@ func (driver Baidu) Request(pathname string, method int, headers, query, form ma
|
|||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return driver.Request(pathname, method, headers, query, form, data, resp, account)
|
||||
return driver.Request(fullurl, method, headers, query, form, data, resp, account)
|
||||
}
|
||||
return nil, fmt.Errorf("errno: %d, refer to https://pan.baidu.com/union/doc/", errno)
|
||||
}
|
||||
|
@ -102,11 +101,11 @@ func (driver Baidu) Request(pathname string, method int, headers, query, form ma
|
|||
}
|
||||
|
||||
func (driver Baidu) Get(pathname string, params map[string]string, resp interface{}, account *model.Account) ([]byte, error) {
|
||||
return driver.Request(pathname, base.Get, nil, params, nil, nil, resp, account)
|
||||
return driver.Request("https://pan.baidu.com/rest/2.0"+pathname, base.Get, nil, params, nil, nil, resp, account)
|
||||
}
|
||||
|
||||
func (driver Baidu) Post(pathname string, params map[string]string, data interface{}, resp interface{}, account *model.Account) ([]byte, error) {
|
||||
return driver.Request(pathname, base.Post, nil, params, nil, data, resp, account)
|
||||
return driver.Request("https://pan.baidu.com/rest/2.0"+pathname, base.Post, nil, params, nil, data, resp, account)
|
||||
}
|
||||
|
||||
func (driver Baidu) manage(opera string, filelist interface{}, account *model.Account) ([]byte, error) {
|
||||
|
|
|
@ -58,6 +58,14 @@ func (driver Baidu) Items() []base.Item {
|
|||
Default: "asc",
|
||||
Required: false,
|
||||
},
|
||||
{
|
||||
Name: "internal_type",
|
||||
Label: "download api",
|
||||
Type: base.TypeSelect,
|
||||
Required: true,
|
||||
Values: "official,crack",
|
||||
Default: "official",
|
||||
},
|
||||
{
|
||||
Name: "client_id",
|
||||
Label: "client id",
|
||||
|
@ -125,6 +133,13 @@ func (driver Baidu) Files(path string, account *model.Account) ([]model.File, er
|
|||
}
|
||||
|
||||
func (driver Baidu) Link(args base.Args, account *model.Account) (*base.Link, error) {
|
||||
if account.InternalType == "crack" {
|
||||
return driver.LinkCrack(args, account)
|
||||
}
|
||||
return driver.LinkOfficial(args, account)
|
||||
}
|
||||
|
||||
func (driver Baidu) LinkOfficial(args base.Args, account *model.Account) (*base.Link, error) {
|
||||
file, err := driver.File(args.Path, account)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
|
@ -157,6 +172,32 @@ func (driver Baidu) Link(args base.Args, account *model.Account) (*base.Link, er
|
|||
}}, nil
|
||||
}
|
||||
|
||||
func (driver Baidu) LinkCrack(args base.Args, account *model.Account) (*base.Link, error) {
|
||||
file, err := driver.File(args.Path, account)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if file.IsDir() {
|
||||
return nil, base.ErrNotFile
|
||||
}
|
||||
var resp DownloadResp2
|
||||
param := map[string]string{
|
||||
"target": fmt.Sprintf("[\"%s\"]", utils.Join(account.RootFolder, args.Path)),
|
||||
"dlink": "1",
|
||||
"web": "5",
|
||||
"origin": "dlna",
|
||||
}
|
||||
_, err = driver.Request("https://pan.baidu.com/api/filemetas", base.Get, nil, param, nil, nil, &resp, account)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return &base.Link{
|
||||
Url: resp.Info[0].Dlink,
|
||||
Headers: []base.Header{
|
||||
{Name: "User-Agent", Value: "pan.baidu.com"},
|
||||
}}, nil
|
||||
}
|
||||
|
||||
func (driver Baidu) Path(path string, account *model.Account) (*model.File, []model.File, error) {
|
||||
file, err := driver.File(path, account)
|
||||
if err != nil {
|
||||
|
|
|
@ -74,6 +74,65 @@ type DownloadResp struct {
|
|||
RequestId string `json:"request_id"`
|
||||
}
|
||||
|
||||
type DownloadResp2 struct {
|
||||
Errno int `json:"errno"`
|
||||
Info []struct {
|
||||
//ExtentTinyint4 int `json:"extent_tinyint4"`
|
||||
//ExtentTinyint1 int `json:"extent_tinyint1"`
|
||||
//Bitmap string `json:"bitmap"`
|
||||
//Category int `json:"category"`
|
||||
//Isdir int `json:"isdir"`
|
||||
//Videotag int `json:"videotag"`
|
||||
Dlink string `json:"dlink"`
|
||||
//OperID int64 `json:"oper_id"`
|
||||
//PathMd5 int `json:"path_md5"`
|
||||
//Wpfile int `json:"wpfile"`
|
||||
//LocalMtime int `json:"local_mtime"`
|
||||
/*Thumbs struct {
|
||||
Icon string `json:"icon"`
|
||||
URL3 string `json:"url3"`
|
||||
URL2 string `json:"url2"`
|
||||
URL1 string `json:"url1"`
|
||||
} `json:"thumbs"`*/
|
||||
//PlaySource int `json:"play_source"`
|
||||
//Share int `json:"share"`
|
||||
//FileKey string `json:"file_key"`
|
||||
//Errno int `json:"errno"`
|
||||
//LocalCtime int `json:"local_ctime"`
|
||||
//Rotate int `json:"rotate"`
|
||||
//Metadata time.Time `json:"metadata"`
|
||||
//Height int `json:"height"`
|
||||
//SampleRate int `json:"sample_rate"`
|
||||
//Width int `json:"width"`
|
||||
//OwnerType int `json:"owner_type"`
|
||||
//Privacy int `json:"privacy"`
|
||||
//ExtentInt3 int64 `json:"extent_int3"`
|
||||
//RealCategory string `json:"real_category"`
|
||||
//SrcLocation string `json:"src_location"`
|
||||
//MetaInfo string `json:"meta_info"`
|
||||
//ID string `json:"id"`
|
||||
//Duration int `json:"duration"`
|
||||
//FileSize string `json:"file_size"`
|
||||
//Channels int `json:"channels"`
|
||||
//UseSegment int `json:"use_segment"`
|
||||
//ServerCtime int `json:"server_ctime"`
|
||||
//Resolution string `json:"resolution"`
|
||||
//OwnerID int `json:"owner_id"`
|
||||
//ExtraInfo string `json:"extra_info"`
|
||||
//Size int `json:"size"`
|
||||
//FsID int64 `json:"fs_id"`
|
||||
//ExtentTinyint3 int `json:"extent_tinyint3"`
|
||||
//Md5 string `json:"md5"`
|
||||
//Path string `json:"path"`
|
||||
//FrameRate int `json:"frame_rate"`
|
||||
//ExtentTinyint2 int `json:"extent_tinyint2"`
|
||||
//ServerFilename string `json:"server_filename"`
|
||||
//ServerMtime int `json:"server_mtime"`
|
||||
//TkbindID int `json:"tkbind_id"`
|
||||
} `json:"info"`
|
||||
RequestID int64 `json:"request_id"`
|
||||
}
|
||||
|
||||
type PrecreateResp struct {
|
||||
Path string `json:"path"`
|
||||
Uploadid string `json:"uploadid"`
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
package base
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"github.com/Xhofe/alist/conf"
|
||||
"github.com/Xhofe/alist/model"
|
||||
"github.com/Xhofe/alist/utils"
|
||||
|
@ -9,12 +8,39 @@ import (
|
|||
)
|
||||
|
||||
func KeyCache(path string, account *model.Account) string {
|
||||
path = utils.ParsePath(path)
|
||||
return fmt.Sprintf("%s%s", account.Name, path)
|
||||
//path = utils.ParsePath(path)
|
||||
key := utils.ParsePath(utils.Join(account.Name, path))
|
||||
log.Debugln("cache key: ", key)
|
||||
return key
|
||||
}
|
||||
|
||||
func SetCache(path string, obj interface{}, account *model.Account) error {
|
||||
return conf.Cache.Set(conf.Ctx, KeyCache(path, account), obj, nil)
|
||||
func SaveSearchFiles[T model.ISearchFile](key string, obj []T) {
|
||||
err := model.DeleteSearchFilesByPath(key)
|
||||
if err != nil {
|
||||
log.Errorln("failed create search files", err)
|
||||
return
|
||||
}
|
||||
files := make([]model.SearchFile, len(obj))
|
||||
for i := 0; i < len(obj); i++ {
|
||||
files[i] = model.SearchFile{
|
||||
Path: key,
|
||||
Name: obj[i].GetName(),
|
||||
Size: obj[i].GetSize(),
|
||||
Type: obj[i].GetType(),
|
||||
}
|
||||
}
|
||||
err = model.CreateSearchFiles(files)
|
||||
if err != nil {
|
||||
log.Errorln("failed create search files", err)
|
||||
}
|
||||
}
|
||||
|
||||
func SetCache[T model.ISearchFile](path string, obj []T, account *model.Account) error {
|
||||
key := KeyCache(path, account)
|
||||
if conf.GetBool("enable search") {
|
||||
go SaveSearchFiles(key, obj)
|
||||
}
|
||||
return conf.Cache.Set(conf.Ctx, key, obj, nil)
|
||||
}
|
||||
|
||||
func GetCache(path string, account *model.Account) (interface{}, error) {
|
||||
|
|
|
@ -2,15 +2,10 @@ package google
|
|||
|
||||
import (
|
||||
"fmt"
|
||||
"github.com/Xhofe/alist/conf"
|
||||
"github.com/Xhofe/alist/drivers/base"
|
||||
"github.com/Xhofe/alist/model"
|
||||
"github.com/Xhofe/alist/utils"
|
||||
"github.com/go-resty/resty/v2"
|
||||
log "github.com/sirupsen/logrus"
|
||||
"path/filepath"
|
||||
"strconv"
|
||||
"time"
|
||||
)
|
||||
|
||||
type TokenError struct {
|
||||
|
@ -44,19 +39,6 @@ func (driver GoogleDrive) RefreshToken(account *model.Account) error {
|
|||
return nil
|
||||
}
|
||||
|
||||
type File struct {
|
||||
Id string `json:"id"`
|
||||
Name string `json:"name"`
|
||||
MimeType string `json:"mimeType"`
|
||||
ModifiedTime *time.Time `json:"modifiedTime"`
|
||||
Size string `json:"size"`
|
||||
ThumbnailLink string `json:"thumbnailLink"`
|
||||
}
|
||||
|
||||
func (driver GoogleDrive) IsDir(mimeType string) bool {
|
||||
return mimeType == "application/vnd.google-apps.folder" || mimeType == "application/vnd.google-apps.shortcut"
|
||||
}
|
||||
|
||||
func (driver GoogleDrive) FormatFile(file *File, account *model.Account) *model.File {
|
||||
f := &model.File{
|
||||
Id: file.Id,
|
||||
|
@ -65,13 +47,8 @@ func (driver GoogleDrive) FormatFile(file *File, account *model.Account) *model.
|
|||
UpdatedAt: file.ModifiedTime,
|
||||
Url: "",
|
||||
}
|
||||
if driver.IsDir(file.MimeType) {
|
||||
f.Type = conf.FOLDER
|
||||
} else {
|
||||
size, _ := strconv.ParseInt(file.Size, 10, 64)
|
||||
f.Size = size
|
||||
f.Type = utils.GetFileType(filepath.Ext(file.Name))
|
||||
}
|
||||
f.Size = int64(file.GetSize())
|
||||
f.Type = file.GetType()
|
||||
if file.ThumbnailLink != "" {
|
||||
if account.APIProxyUrl != "" {
|
||||
f.Thumbnail = fmt.Sprintf("%s/%s", account.APIProxyUrl, file.ThumbnailLink)
|
||||
|
|
|
@ -0,0 +1,38 @@
|
|||
package google
|
||||
|
||||
import (
|
||||
"github.com/Xhofe/alist/conf"
|
||||
"github.com/Xhofe/alist/utils"
|
||||
"path"
|
||||
"strconv"
|
||||
"time"
|
||||
)
|
||||
|
||||
type File struct {
|
||||
Id string `json:"id"`
|
||||
Name string `json:"name"`
|
||||
MimeType string `json:"mimeType"`
|
||||
ModifiedTime *time.Time `json:"modifiedTime"`
|
||||
Size string `json:"size"`
|
||||
ThumbnailLink string `json:"thumbnailLink"`
|
||||
}
|
||||
|
||||
func (f File) GetSize() uint64 {
|
||||
if f.GetType() == conf.FOLDER {
|
||||
return 0
|
||||
}
|
||||
size, _ := strconv.ParseUint(f.Size, 10, 64)
|
||||
return size
|
||||
}
|
||||
|
||||
func (f File) GetName() string {
|
||||
return f.Name
|
||||
}
|
||||
|
||||
func (f File) GetType() int {
|
||||
mimeType := f.MimeType
|
||||
if mimeType == "application/vnd.google-apps.folder" || mimeType == "application/vnd.google-apps.shortcut" {
|
||||
return conf.FOLDER
|
||||
}
|
||||
return utils.GetFileType(path.Ext(f.Name))
|
||||
}
|
|
@ -2,28 +2,15 @@ package lanzou
|
|||
|
||||
import (
|
||||
"fmt"
|
||||
"github.com/Xhofe/alist/conf"
|
||||
"github.com/Xhofe/alist/drivers/base"
|
||||
"github.com/Xhofe/alist/model"
|
||||
"github.com/Xhofe/alist/utils"
|
||||
log "github.com/sirupsen/logrus"
|
||||
"net/url"
|
||||
"path/filepath"
|
||||
"regexp"
|
||||
"strconv"
|
||||
"time"
|
||||
)
|
||||
|
||||
type LanZouFile struct {
|
||||
Name string `json:"name"`
|
||||
NameAll string `json:"name_all"`
|
||||
Id string `json:"id"`
|
||||
FolId string `json:"fol_id"`
|
||||
Size string `json:"size"`
|
||||
Time string `json:"time"`
|
||||
Folder bool
|
||||
}
|
||||
|
||||
func (driver *Lanzou) FormatFile(file *LanZouFile) *model.File {
|
||||
now := time.Now()
|
||||
f := &model.File{
|
||||
|
@ -35,12 +22,11 @@ func (driver *Lanzou) FormatFile(file *LanZouFile) *model.File {
|
|||
UpdatedAt: &now,
|
||||
}
|
||||
if file.Folder {
|
||||
f.Type = conf.FOLDER
|
||||
f.Id = file.FolId
|
||||
} else {
|
||||
f.Name = file.NameAll
|
||||
f.Type = utils.GetFileType(filepath.Ext(file.NameAll))
|
||||
}
|
||||
f.Type = file.GetType()
|
||||
return f
|
||||
}
|
||||
|
||||
|
|
|
@ -1,5 +1,39 @@
|
|||
package lanzou
|
||||
|
||||
import (
|
||||
"github.com/Xhofe/alist/conf"
|
||||
"github.com/Xhofe/alist/utils"
|
||||
"path"
|
||||
)
|
||||
|
||||
type LanZouFile struct {
|
||||
Name string `json:"name"`
|
||||
NameAll string `json:"name_all"`
|
||||
Id string `json:"id"`
|
||||
FolId string `json:"fol_id"`
|
||||
Size string `json:"size"`
|
||||
Time string `json:"time"`
|
||||
Folder bool
|
||||
}
|
||||
|
||||
func (f LanZouFile) GetSize() uint64 {
|
||||
return 0
|
||||
}
|
||||
|
||||
func (f LanZouFile) GetName() string {
|
||||
if f.Folder {
|
||||
return f.Name
|
||||
}
|
||||
return f.NameAll
|
||||
}
|
||||
|
||||
func (f LanZouFile) GetType() int {
|
||||
if f.Folder {
|
||||
return conf.FOLDER
|
||||
}
|
||||
return utils.GetFileType(path.Ext(f.NameAll))
|
||||
}
|
||||
|
||||
type DownPageResp struct {
|
||||
Zt int `json:"zt"`
|
||||
Info struct {
|
||||
|
|
|
@ -71,7 +71,6 @@ func (driver Native) File(path string, account *model.Account) (*model.File, err
|
|||
time := f.ModTime()
|
||||
file := &model.File{
|
||||
Name: f.Name(),
|
||||
Size: f.Size(),
|
||||
UpdatedAt: &time,
|
||||
Driver: driver.Config().Name,
|
||||
}
|
||||
|
@ -79,6 +78,7 @@ func (driver Native) File(path string, account *model.Account) (*model.File, err
|
|||
file.Type = conf.FOLDER
|
||||
} else {
|
||||
file.Type = utils.GetFileType(filepath.Ext(f.Name()))
|
||||
file.Size = f.Size()
|
||||
}
|
||||
return file, nil
|
||||
}
|
||||
|
@ -115,6 +115,10 @@ func (driver Native) Files(path string, account *model.Account) ([]model.File, e
|
|||
}
|
||||
files = append(files, file)
|
||||
}
|
||||
_, err = base.GetCache(path, account)
|
||||
if len(files) != 0 && err != nil {
|
||||
_ = base.SetCache(path, files, account)
|
||||
}
|
||||
return files, nil
|
||||
}
|
||||
|
||||
|
|
|
@ -138,6 +138,7 @@ func (driver Quark) Link(args base.Args, account *model.Account) (*base.Link, er
|
|||
Url: resp.Data[0].DownloadUrl,
|
||||
Headers: []base.Header{
|
||||
{Name: "Cookie", Value: account.AccessToken},
|
||||
{Name: "Referer", Value: "https://pan.quark.cn"},
|
||||
},
|
||||
}, nil
|
||||
}
|
||||
|
|
|
@ -103,7 +103,7 @@ func (driver Quark) GetFiles(parent string, account *model.Account) ([]model.Fil
|
|||
for _, f := range resp.Data.List {
|
||||
files = append(files, *driver.formatFile(&f))
|
||||
}
|
||||
if page*size >= resp.Metadata.Count {
|
||||
if page*size >= resp.Metadata.Total {
|
||||
break
|
||||
}
|
||||
page++
|
||||
|
|
|
@ -0,0 +1,220 @@
|
|||
package template
|
||||
|
||||
import (
|
||||
"github.com/Xhofe/alist/conf"
|
||||
"github.com/Xhofe/alist/drivers/base"
|
||||
"github.com/Xhofe/alist/model"
|
||||
"github.com/Xhofe/alist/utils"
|
||||
"io"
|
||||
"path"
|
||||
"path/filepath"
|
||||
)
|
||||
|
||||
type SFTP struct {
|
||||
}
|
||||
|
||||
func (driver SFTP) Config() base.DriverConfig {
|
||||
return base.DriverConfig{
|
||||
Name: "SFTP",
|
||||
OnlyProxy: true,
|
||||
OnlyLocal: true,
|
||||
LocalSort: true,
|
||||
}
|
||||
}
|
||||
|
||||
func (driver SFTP) Items() []base.Item {
|
||||
// TODO fill need info
|
||||
return []base.Item{
|
||||
{
|
||||
Name: "site_url",
|
||||
Label: "ip/host",
|
||||
Type: base.TypeString,
|
||||
Required: true,
|
||||
},
|
||||
{
|
||||
Name: "limit",
|
||||
Label: "port",
|
||||
Type: base.TypeNumber,
|
||||
Required: true,
|
||||
Default: "22",
|
||||
},
|
||||
{
|
||||
Name: "username",
|
||||
Label: "username",
|
||||
Type: base.TypeString,
|
||||
Required: true,
|
||||
},
|
||||
{
|
||||
Name: "password",
|
||||
Label: "password",
|
||||
Type: base.TypeString,
|
||||
Required: true,
|
||||
},
|
||||
{
|
||||
Name: "root_folder",
|
||||
Label: "root folder path",
|
||||
Type: base.TypeString,
|
||||
Default: "/",
|
||||
Required: true,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func (driver SFTP) Save(account *model.Account, old *model.Account) error {
|
||||
if old != nil {
|
||||
clientsMap.Lock()
|
||||
defer clientsMap.Unlock()
|
||||
delete(clientsMap.clients, old.Name)
|
||||
}
|
||||
if account == nil {
|
||||
return nil
|
||||
}
|
||||
_, err := GetClient(account)
|
||||
if err != nil {
|
||||
account.Status = err.Error()
|
||||
} else {
|
||||
account.Status = "work"
|
||||
}
|
||||
_ = model.SaveAccount(account)
|
||||
return err
|
||||
}
|
||||
|
||||
func (driver SFTP) File(path string, account *model.Account) (*model.File, error) {
|
||||
path = utils.ParsePath(path)
|
||||
if path == "/" {
|
||||
return &model.File{
|
||||
Id: account.RootFolder,
|
||||
Name: account.Name,
|
||||
Size: 0,
|
||||
Type: conf.FOLDER,
|
||||
Driver: driver.Config().Name,
|
||||
UpdatedAt: account.UpdatedAt,
|
||||
}, nil
|
||||
}
|
||||
dir, name := filepath.Split(path)
|
||||
files, err := driver.Files(dir, account)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
for _, file := range files {
|
||||
if file.Name == name {
|
||||
return &file, nil
|
||||
}
|
||||
}
|
||||
return nil, base.ErrPathNotFound
|
||||
}
|
||||
|
||||
func (driver SFTP) Files(path string, account *model.Account) ([]model.File, error) {
|
||||
path = utils.ParsePath(path)
|
||||
remotePath := utils.Join(account.RootFolder, path)
|
||||
cache, err := base.GetCache(path, account)
|
||||
if err == nil {
|
||||
files, _ := cache.([]model.File)
|
||||
return files, nil
|
||||
}
|
||||
client, err := GetClient(account)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
var files []model.File
|
||||
rawFiles, err := client.Files(remotePath)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
for i := 0; i < len(rawFiles); i++ {
|
||||
files = append(files, driver.formatFile(rawFiles[i]))
|
||||
}
|
||||
if len(files) > 0 {
|
||||
_ = base.SetCache(path, files, account)
|
||||
}
|
||||
return files, nil
|
||||
}
|
||||
|
||||
func (driver SFTP) Link(args base.Args, account *model.Account) (*base.Link, error) {
|
||||
client, err := GetClient(account)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
remoteFileName := utils.Join(account.RootFolder, args.Path)
|
||||
remoteFile, err := client.Open(remoteFileName)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return &base.Link{
|
||||
Data: remoteFile,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (driver SFTP) Path(path string, account *model.Account) (*model.File, []model.File, error) {
|
||||
path = utils.ParsePath(path)
|
||||
file, err := driver.File(path, account)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
if !file.IsDir() {
|
||||
return file, nil, nil
|
||||
}
|
||||
files, err := driver.Files(path, account)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
return nil, files, nil
|
||||
}
|
||||
|
||||
func (driver SFTP) Preview(path string, account *model.Account) (interface{}, error) {
|
||||
//TODO preview interface if driver support
|
||||
return nil, base.ErrNotImplement
|
||||
}
|
||||
|
||||
func (driver SFTP) MakeDir(path string, account *model.Account) error {
|
||||
client, err := GetClient(account)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return client.MkdirAll(utils.Join(account.RootFolder, path))
|
||||
}
|
||||
|
||||
func (driver SFTP) Move(src string, dst string, account *model.Account) error {
|
||||
return driver.Rename(src, dst, account)
|
||||
}
|
||||
|
||||
func (driver SFTP) Rename(src string, dst string, account *model.Account) error {
|
||||
client, err := GetClient(account)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return client.Rename(utils.Join(account.RootFolder, src), utils.Join(account.RootFolder, dst))
|
||||
}
|
||||
|
||||
func (driver SFTP) Copy(src string, dst string, account *model.Account) error {
|
||||
return base.ErrNotSupport
|
||||
}
|
||||
|
||||
func (driver SFTP) Delete(path string, account *model.Account) error {
|
||||
client, err := GetClient(account)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return client.Remove(utils.Join(account.RootFolder, path))
|
||||
}
|
||||
|
||||
func (driver SFTP) Upload(file *model.FileStream, account *model.Account) error {
|
||||
if file == nil {
|
||||
return base.ErrEmptyFile
|
||||
}
|
||||
client, err := GetClient(account)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
dstFile, err := client.Create(path.Join(account.RootFolder, file.ParentPath, file.Name))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer func() {
|
||||
_ = dstFile.Close()
|
||||
}()
|
||||
_, err = io.Copy(dstFile, file)
|
||||
return err
|
||||
}
|
||||
|
||||
var _ base.Driver = (*SFTP)(nil)
|
|
@ -0,0 +1,110 @@
|
|||
package template
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"github.com/Xhofe/alist/conf"
|
||||
"github.com/Xhofe/alist/drivers/base"
|
||||
"github.com/Xhofe/alist/model"
|
||||
"github.com/Xhofe/alist/utils"
|
||||
"github.com/pkg/sftp"
|
||||
"golang.org/x/crypto/ssh"
|
||||
"os"
|
||||
"path"
|
||||
"sync"
|
||||
)
|
||||
|
||||
var clientsMap = struct {
|
||||
sync.Mutex
|
||||
clients map[string]*Client
|
||||
}{clients: make(map[string]*Client)}
|
||||
|
||||
func GetClient(account *model.Account) (*Client, error) {
|
||||
clientsMap.Lock()
|
||||
defer clientsMap.Unlock()
|
||||
if v, ok := clientsMap.clients[account.Name]; ok {
|
||||
return v, nil
|
||||
}
|
||||
conn, err := ssh.Dial("tcp", fmt.Sprintf("%s:%d", account.SiteUrl, account.Limit), &ssh.ClientConfig{
|
||||
User: account.Username,
|
||||
Auth: []ssh.AuthMethod{ssh.Password(account.Password)},
|
||||
HostKeyCallback: ssh.InsecureIgnoreHostKey(),
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
client, err := sftp.NewClient(conn)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
c := &Client{client}
|
||||
clientsMap.clients[account.Name] = c
|
||||
return c, nil
|
||||
}
|
||||
|
||||
type Client struct {
|
||||
*sftp.Client
|
||||
}
|
||||
|
||||
func (client *Client) Files(remotePath string) ([]os.FileInfo, error) {
|
||||
return client.ReadDir(remotePath)
|
||||
}
|
||||
|
||||
func (client *Client) Remove(remotePath string) error {
|
||||
f, err := client.Stat(remotePath)
|
||||
if err != nil {
|
||||
return nil
|
||||
}
|
||||
if f.IsDir() {
|
||||
return client.removeDirectory(remotePath)
|
||||
} else {
|
||||
return client.removeFile(remotePath)
|
||||
}
|
||||
}
|
||||
|
||||
func (client *Client) removeDirectory(remotePath string) error {
|
||||
//打不开,说明要么文件路径错误了,要么是第一次部署
|
||||
remoteFiles, err := client.ReadDir(remotePath)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
for _, backupDir := range remoteFiles {
|
||||
remoteFilePath := path.Join(remotePath, backupDir.Name())
|
||||
if backupDir.IsDir() {
|
||||
err := client.removeDirectory(remoteFilePath)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
} else {
|
||||
err := client.Remove(path.Join(remoteFilePath))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
return client.RemoveDirectory(remotePath)
|
||||
}
|
||||
|
||||
func (client *Client) removeFile(remotePath string) error {
|
||||
return client.Remove(utils.Join(remotePath))
|
||||
}
|
||||
|
||||
func (driver SFTP) formatFile(f os.FileInfo) model.File {
|
||||
t := f.ModTime()
|
||||
file := model.File{
|
||||
//Id: f.Id,
|
||||
Name: f.Name(),
|
||||
Size: f.Size(),
|
||||
Driver: driver.Config().Name,
|
||||
UpdatedAt: &t,
|
||||
}
|
||||
if f.IsDir() {
|
||||
file.Type = conf.FOLDER
|
||||
} else {
|
||||
file.Type = utils.GetFileType(path.Ext(f.Name()))
|
||||
}
|
||||
return file
|
||||
}
|
||||
|
||||
func init() {
|
||||
base.RegisterDriver(&SFTP{})
|
||||
}
|
|
@ -0,0 +1,18 @@
|
|||
package template
|
||||
|
||||
import "time"
|
||||
|
||||
// write all struct here
|
||||
|
||||
type Resp struct {
|
||||
Code int `json:"code"`
|
||||
Message string `json:"message"`
|
||||
}
|
||||
|
||||
type File struct {
|
||||
Id string `json:"id"`
|
||||
FileName string `json:"file_name"`
|
||||
Size int64 `json:"size"`
|
||||
File bool `json:"file"`
|
||||
UpdatedAt *time.Time `json:"updated_at"`
|
||||
}
|
|
@ -0,0 +1,3 @@
|
|||
package template
|
||||
|
||||
// write util func here, such as cal sign
|
|
@ -3,6 +3,7 @@ package webdav
|
|||
import (
|
||||
"github.com/Xhofe/alist/conf"
|
||||
"github.com/Xhofe/alist/drivers/base"
|
||||
"github.com/Xhofe/alist/drivers/webdav/odrvcookie"
|
||||
"github.com/Xhofe/alist/model"
|
||||
"github.com/Xhofe/alist/utils"
|
||||
"path/filepath"
|
||||
|
@ -40,6 +41,15 @@ func (driver WebDav) Items() []base.Item {
|
|||
Type: base.TypeString,
|
||||
Required: true,
|
||||
},
|
||||
{
|
||||
Name: "internal_type",
|
||||
Label: "vendor",
|
||||
Type: base.TypeSelect,
|
||||
Required: true,
|
||||
Default: "other",
|
||||
Values: "sharepoint,other",
|
||||
Description: "webdav vendor",
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -47,9 +57,17 @@ func (driver WebDav) Save(account *model.Account, old *model.Account) error {
|
|||
if account == nil {
|
||||
return nil
|
||||
}
|
||||
account.Status = "work"
|
||||
var err error
|
||||
if isSharePoint(account) {
|
||||
_, err = odrvcookie.GetCookie(account.Username, account.Password, account.SiteUrl)
|
||||
}
|
||||
if err != nil {
|
||||
account.Status = err.Error()
|
||||
} else {
|
||||
account.Status = "work"
|
||||
}
|
||||
_ = model.SaveAccount(account)
|
||||
return nil
|
||||
return err
|
||||
}
|
||||
|
||||
func (driver WebDav) File(path string, account *model.Account) (*model.File, error) {
|
||||
|
|
|
@ -0,0 +1,44 @@
|
|||
package odrvcookie
|
||||
|
||||
import (
|
||||
"github.com/Xhofe/alist/utils/cookie"
|
||||
"net/http"
|
||||
"sync"
|
||||
"time"
|
||||
)
|
||||
|
||||
type SpCookie struct {
|
||||
Cookie string
|
||||
expire time.Time
|
||||
}
|
||||
|
||||
func (sp SpCookie) IsExpire() bool {
|
||||
return time.Now().Before(sp.expire)
|
||||
}
|
||||
|
||||
var cookiesMap = struct {
|
||||
sync.Mutex
|
||||
m map[string]*SpCookie
|
||||
}{m: make(map[string]*SpCookie)}
|
||||
|
||||
func GetCookie(username, password, siteUrl string) (string, error) {
|
||||
cookiesMap.Lock()
|
||||
defer cookiesMap.Unlock()
|
||||
spCookie, ok := cookiesMap.m[username]
|
||||
if ok {
|
||||
if !spCookie.IsExpire() {
|
||||
return spCookie.Cookie, nil
|
||||
}
|
||||
}
|
||||
ca := New(username, password, siteUrl)
|
||||
tokenConf, err := ca.Cookies()
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
spCookie = &SpCookie{
|
||||
Cookie: cookie.ToString([]*http.Cookie{&tokenConf.RtFa, &tokenConf.FedAuth}),
|
||||
expire: time.Now().Add(time.Hour * 12),
|
||||
}
|
||||
cookiesMap.m[username] = spCookie
|
||||
return spCookie.Cookie, nil
|
||||
}
|
|
@ -0,0 +1,206 @@
|
|||
// Package odrvcookie can fetch authentication cookies for a sharepoint webdav endpoint
|
||||
package odrvcookie
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/xml"
|
||||
"fmt"
|
||||
"html/template"
|
||||
"net/http"
|
||||
"net/http/cookiejar"
|
||||
"net/url"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"golang.org/x/net/publicsuffix"
|
||||
)
|
||||
|
||||
// CookieAuth hold the authentication information
|
||||
// These are username and password as well as the authentication endpoint
|
||||
type CookieAuth struct {
|
||||
user string
|
||||
pass string
|
||||
endpoint string
|
||||
}
|
||||
|
||||
// CookieResponse contains the requested cookies
|
||||
type CookieResponse struct {
|
||||
RtFa http.Cookie
|
||||
FedAuth http.Cookie
|
||||
}
|
||||
|
||||
// SuccessResponse hold a response from the sharepoint webdav
|
||||
type SuccessResponse struct {
|
||||
XMLName xml.Name `xml:"Envelope"`
|
||||
Succ SuccessResponseBody `xml:"Body"`
|
||||
}
|
||||
|
||||
// SuccessResponseBody is the body of a success response, it holds the token
|
||||
type SuccessResponseBody struct {
|
||||
XMLName xml.Name
|
||||
Type string `xml:"RequestSecurityTokenResponse>TokenType"`
|
||||
Created time.Time `xml:"RequestSecurityTokenResponse>Lifetime>Created"`
|
||||
Expires time.Time `xml:"RequestSecurityTokenResponse>Lifetime>Expires"`
|
||||
Token string `xml:"RequestSecurityTokenResponse>RequestedSecurityToken>BinarySecurityToken"`
|
||||
}
|
||||
|
||||
// reqString is a template that gets populated with the user data in order to retrieve a "BinarySecurityToken"
|
||||
const reqString = `<s:Envelope xmlns:s="http://www.w3.org/2003/05/soap-envelope"
|
||||
xmlns:a="http://www.w3.org/2005/08/addressing"
|
||||
xmlns:u="http://docs.oasis-open.org/wss/2004/01/oasis-200401-wss-wssecurity-utility-1.0.xsd">
|
||||
<s:Header>
|
||||
<a:Action s:mustUnderstand="1">http://schemas.xmlsoap.org/ws/2005/02/trust/RST/Issue</a:Action>
|
||||
<a:ReplyTo>
|
||||
<a:Address>http://www.w3.org/2005/08/addressing/anonymous</a:Address>
|
||||
</a:ReplyTo>
|
||||
<a:To s:mustUnderstand="1">{{ .LoginUrl }}</a:To>
|
||||
<o:Security s:mustUnderstand="1"
|
||||
xmlns:o="http://docs.oasis-open.org/wss/2004/01/oasis-200401-wss-wssecurity-secext-1.0.xsd">
|
||||
<o:UsernameToken>
|
||||
<o:Username>{{ .Username }}</o:Username>
|
||||
<o:Password>{{ .Password }}</o:Password>
|
||||
</o:UsernameToken>
|
||||
</o:Security>
|
||||
</s:Header>
|
||||
<s:Body>
|
||||
<t:RequestSecurityToken xmlns:t="http://schemas.xmlsoap.org/ws/2005/02/trust">
|
||||
<wsp:AppliesTo xmlns:wsp="http://schemas.xmlsoap.org/ws/2004/09/policy">
|
||||
<a:EndpointReference>
|
||||
<a:Address>{{ .Address }}</a:Address>
|
||||
</a:EndpointReference>
|
||||
</wsp:AppliesTo>
|
||||
<t:KeyType>http://schemas.xmlsoap.org/ws/2005/05/identity/NoProofKey</t:KeyType>
|
||||
<t:RequestType>http://schemas.xmlsoap.org/ws/2005/02/trust/Issue</t:RequestType>
|
||||
<t:TokenType>urn:oasis:names:tc:SAML:1.0:assertion</t:TokenType>
|
||||
</t:RequestSecurityToken>
|
||||
</s:Body>
|
||||
</s:Envelope>`
|
||||
|
||||
// New creates a new CookieAuth struct
|
||||
func New(pUser, pPass, pEndpoint string) CookieAuth {
|
||||
retStruct := CookieAuth{
|
||||
user: pUser,
|
||||
pass: pPass,
|
||||
endpoint: pEndpoint,
|
||||
}
|
||||
|
||||
return retStruct
|
||||
}
|
||||
|
||||
// Cookies creates a CookieResponse. It fetches the auth token and then
|
||||
// retrieves the Cookies
|
||||
func (ca *CookieAuth) Cookies() (CookieResponse, error) {
|
||||
spToken, err := ca.getSPToken()
|
||||
if err != nil {
|
||||
return CookieResponse{}, err
|
||||
}
|
||||
return ca.getSPCookie(spToken)
|
||||
}
|
||||
|
||||
func (ca *CookieAuth) getSPCookie(conf *SuccessResponse) (CookieResponse, error) {
|
||||
spRoot, err := url.Parse(ca.endpoint)
|
||||
if err != nil {
|
||||
return CookieResponse{}, err
|
||||
}
|
||||
|
||||
u, err := url.Parse("https://" + spRoot.Host + "/_forms/default.aspx?wa=wsignin1.0")
|
||||
if err != nil {
|
||||
return CookieResponse{}, err
|
||||
}
|
||||
|
||||
// To authenticate with davfs or anything else we need two cookies (rtFa and FedAuth)
|
||||
// In order to get them we use the token we got earlier and a cookieJar
|
||||
jar, err := cookiejar.New(&cookiejar.Options{PublicSuffixList: publicsuffix.List})
|
||||
if err != nil {
|
||||
return CookieResponse{}, err
|
||||
}
|
||||
|
||||
client := &http.Client{
|
||||
Jar: jar,
|
||||
}
|
||||
|
||||
// Send the previously aquired Token as a Post parameter
|
||||
if _, err = client.Post(u.String(), "text/xml", strings.NewReader(conf.Succ.Token)); err != nil {
|
||||
return CookieResponse{}, err
|
||||
}
|
||||
|
||||
cookieResponse := CookieResponse{}
|
||||
for _, cookie := range jar.Cookies(u) {
|
||||
if (cookie.Name == "rtFa") || (cookie.Name == "FedAuth") {
|
||||
switch cookie.Name {
|
||||
case "rtFa":
|
||||
cookieResponse.RtFa = *cookie
|
||||
case "FedAuth":
|
||||
cookieResponse.FedAuth = *cookie
|
||||
}
|
||||
}
|
||||
}
|
||||
return cookieResponse, err
|
||||
}
|
||||
|
||||
var loginUrlsMap = map[string]string{
|
||||
"com": "https://login.microsoftonline.com",
|
||||
"cn": "https://login.chinacloudapi.cn",
|
||||
"us": "https://login.microsoftonline.us",
|
||||
"de": "https://login.microsoftonline.de",
|
||||
}
|
||||
|
||||
func getLoginUrl(endpoint string) (string, error) {
|
||||
spRoot, err := url.Parse(endpoint)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
domains := strings.Split(spRoot.Host, ".")
|
||||
tld := domains[len(domains)-1]
|
||||
loginUrl, ok := loginUrlsMap[tld]
|
||||
if !ok {
|
||||
return "", fmt.Errorf("tld %s is not supported", tld)
|
||||
}
|
||||
return loginUrl + "/extSTS.srf", nil
|
||||
}
|
||||
|
||||
func (ca *CookieAuth) getSPToken() (*SuccessResponse, error) {
|
||||
loginUrl, err := getLoginUrl(ca.endpoint)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
reqData := map[string]string{
|
||||
"Username": ca.user,
|
||||
"Password": ca.pass,
|
||||
"Address": ca.endpoint,
|
||||
"LoginUrl": loginUrl,
|
||||
}
|
||||
|
||||
t := template.Must(template.New("authXML").Parse(reqString))
|
||||
|
||||
buf := &bytes.Buffer{}
|
||||
if err := t.Execute(buf, reqData); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Execute the first request which gives us an auth token for the sharepoint service
|
||||
// With this token we can authenticate on the login page and save the returned cookies
|
||||
req, err := http.NewRequest("POST", loginUrl, buf)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
client := &http.Client{}
|
||||
resp, err := client.Do(req)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
respBuf := bytes.Buffer{}
|
||||
respBuf.ReadFrom(resp.Body)
|
||||
s := respBuf.Bytes()
|
||||
|
||||
var conf SuccessResponse
|
||||
err = xml.Unmarshal(s, &conf)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &conf, err
|
||||
}
|
|
@ -0,0 +1,7 @@
|
|||
package webdav
|
||||
|
||||
import "github.com/Xhofe/alist/model"
|
||||
|
||||
func isSharePoint(account *model.Account) bool {
|
||||
return account.InternalType == "sharepoint"
|
||||
}
|
|
@ -2,14 +2,30 @@ package webdav
|
|||
|
||||
import (
|
||||
"github.com/Xhofe/alist/drivers/base"
|
||||
"github.com/Xhofe/alist/drivers/webdav/odrvcookie"
|
||||
"github.com/Xhofe/alist/model"
|
||||
"github.com/Xhofe/alist/utils"
|
||||
log "github.com/sirupsen/logrus"
|
||||
"github.com/studio-b12/gowebdav"
|
||||
"net/http"
|
||||
"strings"
|
||||
)
|
||||
|
||||
func (driver WebDav) NewClient(account *model.Account) *gowebdav.Client {
|
||||
return gowebdav.NewClient(account.SiteUrl, account.Username, account.Password)
|
||||
c := gowebdav.NewClient(account.SiteUrl, account.Username, account.Password)
|
||||
if isSharePoint(account) {
|
||||
cookie, err := odrvcookie.GetCookie(account.Username, account.Password, account.SiteUrl)
|
||||
log.Debugln(cookie, err)
|
||||
if err == nil {
|
||||
log.Debugln("set interceptor")
|
||||
c.SetInterceptor(func(method string, rq *http.Request) {
|
||||
rq.Header.Del("Authorization")
|
||||
rq.Header.Set("Cookie", cookie)
|
||||
log.Debugf("sp webdav req: %+v", rq)
|
||||
})
|
||||
}
|
||||
}
|
||||
return c
|
||||
}
|
||||
|
||||
func (driver WebDav) WebDavPath(path string) string {
|
||||
|
|
9
go.mod
9
go.mod
|
@ -1,6 +1,6 @@
|
|||
module github.com/Xhofe/alist
|
||||
|
||||
go 1.17
|
||||
go 1.18
|
||||
|
||||
require (
|
||||
github.com/aws/aws-sdk-go v1.27.0
|
||||
|
@ -13,6 +13,7 @@ require (
|
|||
github.com/jlaffaye/ftp v0.0.0-20211117213618-11820403398b
|
||||
github.com/json-iterator/go v1.1.12
|
||||
github.com/patrickmn/go-cache v2.1.0+incompatible
|
||||
github.com/pkg/sftp v1.13.4
|
||||
github.com/robfig/cron/v3 v3.0.0
|
||||
github.com/sirupsen/logrus v1.8.1
|
||||
github.com/studio-b12/gowebdav v0.0.0-20211109083228-3f8721cd4b6f
|
||||
|
@ -24,6 +25,8 @@ require (
|
|||
gorm.io/gorm v1.23.1
|
||||
)
|
||||
|
||||
require github.com/kr/fs v0.1.0 // indirect
|
||||
|
||||
require (
|
||||
github.com/baiyubin/aliyun-sts-go-sdk v0.0.0-20180326062324-cfa1a18b161f // indirect
|
||||
github.com/fatih/color v1.13.0
|
||||
|
@ -73,9 +76,9 @@ require (
|
|||
go.opentelemetry.io/otel v0.20.0 // indirect
|
||||
go.opentelemetry.io/otel/metric v0.20.0 // indirect
|
||||
go.opentelemetry.io/otel/trace v0.20.0 // indirect
|
||||
golang.org/x/crypto v0.0.0-20220214200702-86341886e292 // indirect
|
||||
golang.org/x/crypto v0.0.0-20220331220935-ae2d96664a29
|
||||
golang.org/x/net v0.0.0-20211209124913-491a49abca63 // indirect
|
||||
golang.org/x/sys v0.0.0-20211023085530-d6a326fbbf70 // indirect
|
||||
golang.org/x/sys v0.0.0-20220330033206-e17cdc41300f // indirect
|
||||
google.golang.org/protobuf v1.27.1 // indirect
|
||||
gopkg.in/natefinch/lumberjack.v2 v2.0.0 // indirect
|
||||
gopkg.in/tomb.v2 v2.0.0-20161208151619-d5d1b5820637 // indirect
|
||||
|
|
15
go.sum
15
go.sum
|
@ -307,6 +307,8 @@ github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+o
|
|||
github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
|
||||
github.com/konsorten/go-windows-terminal-sequences v1.0.2/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
|
||||
github.com/konsorten/go-windows-terminal-sequences v1.0.3/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
|
||||
github.com/kr/fs v0.1.0 h1:Jskdu9ieNAYnjxsi0LbQp1ulIKZV1LAFgK1tWhpZgl8=
|
||||
github.com/kr/fs v0.1.0/go.mod h1:FFnZGqtBN9Gxj7eW1uZ42v5BccTP0vu6NEaFoC2HwRg=
|
||||
github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515/go.mod h1:+0opPa2QZZtGFBFZlji/RkVcI2GknAs/DXo4wKdlNEc=
|
||||
github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo=
|
||||
github.com/kr/pretty v0.2.1/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI=
|
||||
|
@ -420,6 +422,8 @@ github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINE
|
|||
github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4=
|
||||
github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
|
||||
github.com/pkg/profile v1.2.1/go.mod h1:hJw3o1OdXxsrSjjVksARp5W95eeEaEfptyVZyv6JUPA=
|
||||
github.com/pkg/sftp v1.13.4 h1:Lb0RYJCmgUcBgZosfoi9Y9sbl6+LJgOIgk/2Y4YjMFg=
|
||||
github.com/pkg/sftp v1.13.4/go.mod h1:LzqnAvaD5TWeNBsZpfKxSYn1MbjWwOsCIAFFJbpIsK8=
|
||||
github.com/pmezard/go-difflib v0.0.0-20151028094244-d8ed2627bdf0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
||||
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
|
||||
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
||||
|
@ -556,11 +560,12 @@ golang.org/x/crypto v0.0.0-20190820162420-60c769a6c586/go.mod h1:yigFU9vqHzYiE8U
|
|||
golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
|
||||
golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
|
||||
golang.org/x/crypto v0.0.0-20201203163018-be400aefbc4c/go.mod h1:jdWPYTVW3xRLrWPugEBEK3UY2ZEsg3UU495nc5E+M+I=
|
||||
golang.org/x/crypto v0.0.0-20210421170649-83a5a9bb288b/go.mod h1:T9bdIzuCu7OtxOm1hfPfRQxPLYneinmdGuTeoZ9dtd4=
|
||||
golang.org/x/crypto v0.0.0-20210616213533-5ff15b29337e/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
|
||||
golang.org/x/crypto v0.0.0-20210711020723-a769d52b0f97/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
|
||||
golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
|
||||
golang.org/x/crypto v0.0.0-20220214200702-86341886e292 h1:f+lwQ+GtmgoY+A2YaQxlSOnDjXcQ7ZRLWOHbC6HtRqE=
|
||||
golang.org/x/crypto v0.0.0-20220214200702-86341886e292/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4=
|
||||
golang.org/x/crypto v0.0.0-20220331220935-ae2d96664a29 h1:tkVvjkPTB7pnW3jnid7kNyAMPVWllTNOf/qKDze4p9o=
|
||||
golang.org/x/crypto v0.0.0-20220331220935-ae2d96664a29/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4=
|
||||
golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
|
||||
golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE=
|
||||
golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU=
|
||||
|
@ -645,12 +650,14 @@ golang.org/x/sys v0.0.0-20210124154548-22da62e12c0c/go.mod h1:h1NjWce9XRLGQEsW7w
|
|||
golang.org/x/sys v0.0.0-20210309074719-68d13333faf2/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20210330210617-4fbd30eecc44/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20210423185535-09eb48e85fd7/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20210806184541-e5e7981a1069/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20211023085530-d6a326fbbf70 h1:SeSEfdIxyvwGJliREIJhRPPXvW6sDlLT+UQ3B0hD0NA=
|
||||
golang.org/x/sys v0.0.0-20211023085530-d6a326fbbf70/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20220330033206-e17cdc41300f h1:rlezHXNlxYWvBCzNses9Dlc7nGFaNMJeqLolcmQSSZY=
|
||||
golang.org/x/sys v0.0.0-20220330033206-e17cdc41300f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/term v0.0.0-20201117132131-f5c789dd3221/go.mod h1:Nr5EML6q2oocZ2LXRh80K7BxOlk5/8JxuGnuhpl+muw=
|
||||
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1 h1:v+OssWQX+hTHEmOBgwxdZxK4zHq3yOs8F9J7mk0PY8E=
|
||||
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
|
||||
golang.org/x/text v0.0.0-20160726164857-2910a502d2bf/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||
|
|
|
@ -56,7 +56,7 @@ func ExtractFolder(files []File, account *Account) {
|
|||
return
|
||||
}
|
||||
front := account.ExtractFolder == "front"
|
||||
sort.Slice(files, func(i, j int) bool {
|
||||
sort.SliceStable(files, func(i, j int) bool {
|
||||
if files[i].IsDir() || files[j].IsDir() {
|
||||
if !files[i].IsDir() {
|
||||
return !front
|
||||
|
@ -84,3 +84,7 @@ func (f File) ModTime() time.Time {
|
|||
func (f File) IsDir() bool {
|
||||
return f.Type == conf.FOLDER
|
||||
}
|
||||
|
||||
func (f File) GetType() int {
|
||||
return f.Type
|
||||
}
|
||||
|
|
|
@ -12,6 +12,7 @@ type Meta struct {
|
|||
Hide string `json:"hide"`
|
||||
Upload bool `json:"upload"`
|
||||
OnlyShows string `json:"only_shows"`
|
||||
Readme string `json:"readme"`
|
||||
}
|
||||
|
||||
func GetMetaByPath(path string) (*Meta, error) {
|
||||
|
|
|
@ -0,0 +1,35 @@
|
|||
package model
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"github.com/Xhofe/alist/conf"
|
||||
)
|
||||
|
||||
type ISearchFile interface {
|
||||
GetName() string
|
||||
GetSize() uint64
|
||||
GetType() int
|
||||
}
|
||||
|
||||
type SearchFile struct {
|
||||
Path string `json:"path" gorm:"index"`
|
||||
Name string `json:"name"`
|
||||
Size uint64 `json:"size"`
|
||||
Type int `json:"type"`
|
||||
}
|
||||
|
||||
func CreateSearchFiles(files []SearchFile) error {
|
||||
return conf.DB.Create(files).Error
|
||||
}
|
||||
|
||||
func DeleteSearchFilesByPath(path string) error {
|
||||
return conf.DB.Where(fmt.Sprintf("%s = ?", columnName("path")), path).Delete(&SearchFile{}).Error
|
||||
}
|
||||
|
||||
func SearchByNameAndPath(path, keyword string) ([]SearchFile, error) {
|
||||
var files []SearchFile
|
||||
if err := conf.DB.Where(fmt.Sprintf("%s LIKE ? AND %s LIKE ?", columnName("path"), columnName("name")), fmt.Sprintf("%s%%", path), fmt.Sprintf("%%%s%%", keyword)).Find(&files).Error; err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return files, nil
|
||||
}
|
|
@ -9,9 +9,9 @@ import (
|
|||
|
||||
func Path(rawPath string) (*model.File, []model.File, *model.Account, base.Driver, string, error) {
|
||||
account, path, driver, err := ParsePath(rawPath)
|
||||
accountFiles := model.GetAccountFilesByPath(rawPath)
|
||||
if err != nil {
|
||||
if err.Error() == "path not found" {
|
||||
accountFiles := model.GetAccountFilesByPath(rawPath)
|
||||
if len(accountFiles) != 0 {
|
||||
return nil, accountFiles, nil, nil, path, nil
|
||||
}
|
||||
|
@ -21,13 +21,31 @@ func Path(rawPath string) (*model.File, []model.File, *model.Account, base.Drive
|
|||
log.Debugln("use account: ", account.Name)
|
||||
file, files, err := operate.Path(driver, account, path)
|
||||
if err != nil {
|
||||
if err.Error() == "path not found" {
|
||||
if len(accountFiles) != 0 {
|
||||
return nil, accountFiles, nil, nil, path, nil
|
||||
}
|
||||
}
|
||||
return nil, nil, nil, nil, "", err
|
||||
}
|
||||
if file != nil {
|
||||
return file, nil, account, driver, path, nil
|
||||
} else {
|
||||
accountFiles := model.GetAccountFilesByPath(rawPath)
|
||||
files = append(files, accountFiles...)
|
||||
for _, accountFile := range accountFiles {
|
||||
if !containsByName(files, accountFile) {
|
||||
files = append(files, accountFile)
|
||||
}
|
||||
}
|
||||
return nil, files, account, driver, path, nil
|
||||
}
|
||||
}
|
||||
|
||||
func containsByName(files []model.File, file model.File) bool {
|
||||
for _, f := range files {
|
||||
if f.Name == file.Name {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
|
|
@ -12,6 +12,7 @@ import (
|
|||
"net/url"
|
||||
"os"
|
||||
"strconv"
|
||||
"strings"
|
||||
)
|
||||
|
||||
var HttpClient = &http.Client{}
|
||||
|
@ -56,8 +57,12 @@ func Proxy(w http.ResponseWriter, r *http.Request, link *base.Link, file *model.
|
|||
return err
|
||||
}
|
||||
for h, val := range r.Header {
|
||||
if strings.ToLower(h) == "authorization" {
|
||||
continue
|
||||
}
|
||||
req.Header[h] = val
|
||||
}
|
||||
log.Debugf("req headers: %+v", r.Header)
|
||||
for _, header := range link.Headers {
|
||||
req.Header.Set(header.Name, header.Value)
|
||||
}
|
||||
|
|
|
@ -60,6 +60,7 @@ type Meta struct {
|
|||
Driver string `json:"driver"`
|
||||
Upload bool `json:"upload"`
|
||||
Total int `json:"total"`
|
||||
Readme string `json:"readme"`
|
||||
//Pages int `json:"pages"`
|
||||
}
|
||||
|
||||
|
@ -75,8 +76,10 @@ func Path(c *gin.Context) {
|
|||
_, ok := c.Get("admin")
|
||||
meta, _ := model.GetMetaByPath(req.Path)
|
||||
upload := false
|
||||
if meta != nil && meta.Upload {
|
||||
upload = true
|
||||
readme := ""
|
||||
if meta != nil {
|
||||
upload = meta.Upload
|
||||
readme = meta.Readme
|
||||
}
|
||||
err := CheckPagination(&req)
|
||||
if err != nil {
|
||||
|
@ -137,6 +140,7 @@ func Path(c *gin.Context) {
|
|||
Driver: driverName,
|
||||
Upload: upload,
|
||||
Total: total,
|
||||
Readme: readme,
|
||||
},
|
||||
Files: files,
|
||||
},
|
||||
|
|
|
@ -61,6 +61,7 @@ func Proxy(c *gin.Context) {
|
|||
return
|
||||
}
|
||||
err = common.Proxy(c.Writer, c.Request, link, file)
|
||||
log.Debugln("web proxy error:", err)
|
||||
if err != nil {
|
||||
common.ErrorResp(c, err, 500)
|
||||
}
|
||||
|
|
|
@ -0,0 +1,31 @@
|
|||
package controllers
|
||||
|
||||
import (
|
||||
"github.com/Xhofe/alist/conf"
|
||||
"github.com/Xhofe/alist/model"
|
||||
"github.com/Xhofe/alist/server/common"
|
||||
"github.com/gin-gonic/gin"
|
||||
)
|
||||
|
||||
type SearchReq struct {
|
||||
Path string `json:"path"`
|
||||
Keyword string `json:"keyword"`
|
||||
}
|
||||
|
||||
func Search(c *gin.Context) {
|
||||
if !conf.GetBool("enable search") {
|
||||
common.ErrorStrResp(c, "Not allowed search", 403)
|
||||
return
|
||||
}
|
||||
var req SearchReq
|
||||
if err := c.ShouldBind(&req); err != nil {
|
||||
common.ErrorResp(c, err, 400)
|
||||
return
|
||||
}
|
||||
files, err := model.SearchByNameAndPath(req.Path, req.Keyword)
|
||||
if err != nil {
|
||||
common.ErrorResp(c, err, 500)
|
||||
return
|
||||
}
|
||||
common.SuccessResp(c, files)
|
||||
}
|
|
@ -25,6 +25,8 @@ func InitApiRouter(r *gin.Engine) {
|
|||
path.POST("/path", controllers.Path)
|
||||
path.POST("/preview", controllers.Preview)
|
||||
|
||||
public.POST("/search", controllers.Search)
|
||||
|
||||
//path.POST("/link",middlewares.Auth, controllers.Link)
|
||||
public.POST("/upload", file.UploadFiles)
|
||||
|
||||
|
|
|
@ -48,7 +48,20 @@ func (fs *FileSystem) File(rawPath string) (*model.File, error) {
|
|||
}
|
||||
return nil, err
|
||||
}
|
||||
return operate.File(driver, account, path_)
|
||||
file, err := operate.File(driver, account, path_)
|
||||
if err != nil && err.Error() == "path not found" {
|
||||
accountFiles := model.GetAccountFilesByPath(rawPath)
|
||||
if len(accountFiles) != 0 {
|
||||
now := time.Now()
|
||||
return &model.File{
|
||||
Name: "root",
|
||||
Size: 0,
|
||||
Type: conf.FOLDER,
|
||||
UpdatedAt: &now,
|
||||
}, nil
|
||||
}
|
||||
}
|
||||
return file, err
|
||||
}
|
||||
|
||||
func (fs *FileSystem) Files(ctx context.Context, rawPath string) ([]model.File, error) {
|
||||
|
|
Loading…
Reference in New Issue