2022-09-16 10:53:45 +00:00
|
|
|
package service
|
|
|
|
|
|
|
|
import (
|
2023-10-17 09:58:21 +00:00
|
|
|
"bufio"
|
2022-12-21 07:54:34 +00:00
|
|
|
"context"
|
2023-07-04 08:16:12 +00:00
|
|
|
"encoding/base64"
|
2022-09-16 10:53:45 +00:00
|
|
|
"encoding/json"
|
2022-10-28 03:02:47 +00:00
|
|
|
"fmt"
|
2022-10-27 15:09:39 +00:00
|
|
|
"os"
|
2023-05-12 14:30:26 +00:00
|
|
|
"path"
|
2023-02-23 08:56:16 +00:00
|
|
|
"strings"
|
2024-01-19 08:48:41 +00:00
|
|
|
"time"
|
2022-09-16 10:53:45 +00:00
|
|
|
|
2022-10-17 08:32:31 +00:00
|
|
|
"github.com/1Panel-dev/1Panel/backend/app/dto"
|
|
|
|
"github.com/1Panel-dev/1Panel/backend/app/model"
|
2023-04-17 08:06:28 +00:00
|
|
|
"github.com/1Panel-dev/1Panel/backend/buserr"
|
2022-10-17 08:32:31 +00:00
|
|
|
"github.com/1Panel-dev/1Panel/backend/constant"
|
2022-10-27 15:09:39 +00:00
|
|
|
"github.com/1Panel-dev/1Panel/backend/global"
|
2022-10-17 08:32:31 +00:00
|
|
|
"github.com/1Panel-dev/1Panel/backend/utils/cloud_storage"
|
2024-01-19 08:48:41 +00:00
|
|
|
"github.com/1Panel-dev/1Panel/backend/utils/cloud_storage/client"
|
2023-05-10 06:08:14 +00:00
|
|
|
fileUtils "github.com/1Panel-dev/1Panel/backend/utils/files"
|
2022-09-16 10:53:45 +00:00
|
|
|
"github.com/jinzhu/copier"
|
|
|
|
"github.com/pkg/errors"
|
|
|
|
)
|
|
|
|
|
|
|
|
type BackupService struct{}
|
|
|
|
|
|
|
|
type IBackupService interface {
|
2022-09-19 11:42:06 +00:00
|
|
|
List() ([]dto.BackupInfo, error)
|
2022-11-29 09:39:10 +00:00
|
|
|
SearchRecordsWithPage(search dto.RecordSearch) (int64, []dto.BackupRecords, error)
|
2024-01-24 09:41:56 +00:00
|
|
|
SearchRecordsByCronjobWithPage(search dto.RecordSearchByCronjob) (int64, []dto.BackupRecords, error)
|
2024-01-19 15:06:41 +00:00
|
|
|
LoadOneDriveInfo() (dto.OneDriveInfo, error)
|
2022-10-28 03:02:47 +00:00
|
|
|
DownloadRecord(info dto.DownloadRecord) (string, error)
|
2022-09-16 10:53:45 +00:00
|
|
|
Create(backupDto dto.BackupOperate) error
|
|
|
|
GetBuckets(backupDto dto.ForBuckets) ([]interface{}, error)
|
2023-01-29 08:38:34 +00:00
|
|
|
Update(ireq dto.BackupOperate) error
|
2023-04-17 08:06:28 +00:00
|
|
|
Delete(id uint) error
|
2022-10-27 15:09:39 +00:00
|
|
|
BatchDeleteRecord(ids []uint) error
|
2022-09-29 08:15:59 +00:00
|
|
|
NewClient(backup *model.BackupAccount) (cloud_storage.CloudStorageClient, error)
|
2023-02-13 07:48:18 +00:00
|
|
|
|
2023-08-08 08:44:12 +00:00
|
|
|
ListFiles(req dto.BackupSearchFile) ([]string, error)
|
2023-02-21 11:06:24 +00:00
|
|
|
|
|
|
|
MysqlBackup(db dto.CommonBackup) error
|
2023-12-28 08:29:18 +00:00
|
|
|
PostgresqlBackup(db dto.CommonBackup) error
|
2023-02-21 11:06:24 +00:00
|
|
|
MysqlRecover(db dto.CommonRecover) error
|
2023-12-28 08:29:18 +00:00
|
|
|
PostgresqlRecover(db dto.CommonRecover) error
|
2023-02-21 11:06:24 +00:00
|
|
|
MysqlRecoverByUpload(req dto.CommonRecover) error
|
2023-12-28 08:29:18 +00:00
|
|
|
PostgresqlRecoverByUpload(req dto.CommonRecover) error
|
2023-02-21 11:06:24 +00:00
|
|
|
|
|
|
|
RedisBackup() error
|
|
|
|
RedisRecover(db dto.CommonRecover) error
|
|
|
|
|
|
|
|
WebsiteBackup(db dto.CommonBackup) error
|
|
|
|
WebsiteRecover(req dto.CommonRecover) error
|
|
|
|
|
|
|
|
AppBackup(db dto.CommonBackup) error
|
|
|
|
AppRecover(req dto.CommonRecover) error
|
2024-01-19 08:48:41 +00:00
|
|
|
|
|
|
|
Run()
|
2022-09-16 10:53:45 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
func NewIBackupService() IBackupService {
|
|
|
|
return &BackupService{}
|
|
|
|
}
|
|
|
|
|
2022-09-19 11:42:06 +00:00
|
|
|
func (u *BackupService) List() ([]dto.BackupInfo, error) {
|
|
|
|
ops, err := backupRepo.List(commonRepo.WithOrderBy("created_at desc"))
|
2022-09-16 10:53:45 +00:00
|
|
|
var dtobas []dto.BackupInfo
|
2023-04-11 04:10:25 +00:00
|
|
|
dtobas = append(dtobas, u.loadByType("LOCAL", ops))
|
2023-04-03 10:51:11 +00:00
|
|
|
dtobas = append(dtobas, u.loadByType("OSS", ops))
|
|
|
|
dtobas = append(dtobas, u.loadByType("S3", ops))
|
|
|
|
dtobas = append(dtobas, u.loadByType("SFTP", ops))
|
|
|
|
dtobas = append(dtobas, u.loadByType("MINIO", ops))
|
|
|
|
dtobas = append(dtobas, u.loadByType("COS", ops))
|
|
|
|
dtobas = append(dtobas, u.loadByType("KODO", ops))
|
2023-06-23 15:06:13 +00:00
|
|
|
dtobas = append(dtobas, u.loadByType("OneDrive", ops))
|
2023-11-30 13:50:08 +00:00
|
|
|
dtobas = append(dtobas, u.loadByType("WebDAV", ops))
|
2022-09-19 11:42:06 +00:00
|
|
|
return dtobas, err
|
2022-09-16 10:53:45 +00:00
|
|
|
}
|
|
|
|
|
2022-11-29 09:39:10 +00:00
|
|
|
func (u *BackupService) SearchRecordsWithPage(search dto.RecordSearch) (int64, []dto.BackupRecords, error) {
|
2022-10-27 15:09:39 +00:00
|
|
|
total, records, err := backupRepo.PageRecord(
|
|
|
|
search.Page, search.PageSize,
|
|
|
|
commonRepo.WithOrderBy("created_at desc"),
|
|
|
|
commonRepo.WithByName(search.Name),
|
|
|
|
commonRepo.WithByType(search.Type),
|
|
|
|
backupRepo.WithByDetailName(search.DetailName),
|
|
|
|
)
|
2023-12-01 09:50:09 +00:00
|
|
|
|
|
|
|
var datas []dto.BackupRecords
|
|
|
|
clientMap := make(map[string]loadSizeHelper)
|
|
|
|
for i := 0; i < len(records); i++ {
|
2022-10-27 15:09:39 +00:00
|
|
|
var item dto.BackupRecords
|
2023-12-01 09:50:09 +00:00
|
|
|
if err := copier.Copy(&item, &records[i]); err != nil {
|
2022-10-27 15:09:39 +00:00
|
|
|
return 0, nil, errors.WithMessage(constant.ErrStructTransform, err.Error())
|
|
|
|
}
|
2023-12-01 09:50:09 +00:00
|
|
|
itemPath := path.Join(records[i].FileDir, records[i].FileName)
|
2024-01-24 09:41:56 +00:00
|
|
|
if _, ok := clientMap[records[i].Source]; !ok {
|
|
|
|
backup, err := backupRepo.Get(commonRepo.WithByType(records[i].Source))
|
|
|
|
if err != nil {
|
|
|
|
global.LOG.Errorf("load backup model %s from db failed, err: %v", records[i].Source, err)
|
|
|
|
return total, datas, err
|
2023-12-19 08:22:07 +00:00
|
|
|
}
|
2024-01-24 09:41:56 +00:00
|
|
|
client, err := u.NewClient(&backup)
|
|
|
|
if err != nil {
|
|
|
|
global.LOG.Errorf("load backup client %s from db failed, err: %v", records[i].Source, err)
|
|
|
|
return total, datas, err
|
|
|
|
}
|
|
|
|
item.Size, _ = client.Size(path.Join(strings.TrimLeft(backup.BackupPath, "/"), itemPath))
|
2023-12-01 09:50:09 +00:00
|
|
|
datas = append(datas, item)
|
2024-01-24 09:41:56 +00:00
|
|
|
clientMap[records[i].Source] = loadSizeHelper{backupPath: strings.TrimLeft(backup.BackupPath, "/"), client: client}
|
2023-12-01 09:50:09 +00:00
|
|
|
continue
|
|
|
|
}
|
2024-01-24 09:41:56 +00:00
|
|
|
item.Size, _ = clientMap[records[i].Source].client.Size(path.Join(clientMap[records[i].Source].backupPath, itemPath))
|
|
|
|
datas = append(datas, item)
|
|
|
|
}
|
|
|
|
return total, datas, err
|
|
|
|
}
|
|
|
|
|
|
|
|
func (u *BackupService) SearchRecordsByCronjobWithPage(search dto.RecordSearchByCronjob) (int64, []dto.BackupRecords, error) {
|
|
|
|
total, records, err := backupRepo.PageRecord(
|
|
|
|
search.Page, search.PageSize,
|
|
|
|
commonRepo.WithOrderBy("created_at desc"),
|
|
|
|
backupRepo.WithByCronID(search.CronjobID),
|
|
|
|
)
|
|
|
|
|
|
|
|
var datas []dto.BackupRecords
|
|
|
|
clientMap := make(map[string]loadSizeHelper)
|
|
|
|
for i := 0; i < len(records); i++ {
|
|
|
|
var item dto.BackupRecords
|
|
|
|
if err := copier.Copy(&item, &records[i]); err != nil {
|
|
|
|
return 0, nil, errors.WithMessage(constant.ErrStructTransform, err.Error())
|
|
|
|
}
|
|
|
|
itemPath := path.Join(records[i].FileDir, records[i].FileName)
|
2023-12-01 09:50:09 +00:00
|
|
|
if _, ok := clientMap[records[i].Source]; !ok {
|
|
|
|
backup, err := backupRepo.Get(commonRepo.WithByType(records[i].Source))
|
|
|
|
if err != nil {
|
|
|
|
global.LOG.Errorf("load backup model %s from db failed, err: %v", records[i].Source, err)
|
|
|
|
return total, datas, err
|
|
|
|
}
|
|
|
|
client, err := u.NewClient(&backup)
|
|
|
|
if err != nil {
|
|
|
|
global.LOG.Errorf("load backup client %s from db failed, err: %v", records[i].Source, err)
|
|
|
|
return total, datas, err
|
|
|
|
}
|
|
|
|
item.Size, _ = client.Size(path.Join(strings.TrimLeft(backup.BackupPath, "/"), itemPath))
|
|
|
|
datas = append(datas, item)
|
|
|
|
clientMap[records[i].Source] = loadSizeHelper{backupPath: strings.TrimLeft(backup.BackupPath, "/"), client: client}
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
item.Size, _ = clientMap[records[i].Source].client.Size(path.Join(clientMap[records[i].Source].backupPath, itemPath))
|
|
|
|
datas = append(datas, item)
|
2022-10-27 15:09:39 +00:00
|
|
|
}
|
2023-12-01 09:50:09 +00:00
|
|
|
return total, datas, err
|
|
|
|
}
|
|
|
|
|
|
|
|
type loadSizeHelper struct {
|
|
|
|
backupPath string
|
|
|
|
client cloud_storage.CloudStorageClient
|
2022-10-27 15:09:39 +00:00
|
|
|
}
|
|
|
|
|
2024-01-19 15:06:41 +00:00
|
|
|
func (u *BackupService) LoadOneDriveInfo() (dto.OneDriveInfo, error) {
|
|
|
|
var data dto.OneDriveInfo
|
|
|
|
data.RedirectUri = constant.OneDriveRedirectURI
|
|
|
|
clientID, err := settingRepo.Get(settingRepo.WithByKey("OneDriveID"))
|
2023-07-04 08:16:12 +00:00
|
|
|
if err != nil {
|
2024-01-19 15:06:41 +00:00
|
|
|
return data, err
|
2023-07-04 08:16:12 +00:00
|
|
|
}
|
2024-01-19 15:06:41 +00:00
|
|
|
idItem, err := base64.StdEncoding.DecodeString(clientID.Value)
|
2023-07-04 08:16:12 +00:00
|
|
|
if err != nil {
|
2024-01-19 15:06:41 +00:00
|
|
|
return data, err
|
|
|
|
}
|
|
|
|
data.ClientID = string(idItem)
|
|
|
|
clientSecret, err := settingRepo.Get(settingRepo.WithByKey("OneDriveSc"))
|
|
|
|
if err != nil {
|
|
|
|
return data, err
|
2023-07-04 08:16:12 +00:00
|
|
|
}
|
2024-01-19 15:06:41 +00:00
|
|
|
secretItem, err := base64.StdEncoding.DecodeString(clientSecret.Value)
|
|
|
|
if err != nil {
|
|
|
|
return data, err
|
|
|
|
}
|
|
|
|
data.ClientSecret = string(secretItem)
|
|
|
|
|
|
|
|
return data, err
|
2023-07-04 08:16:12 +00:00
|
|
|
}
|
|
|
|
|
2022-10-28 03:02:47 +00:00
|
|
|
func (u *BackupService) DownloadRecord(info dto.DownloadRecord) (string, error) {
|
|
|
|
if info.Source == "LOCAL" {
|
2024-01-24 09:41:56 +00:00
|
|
|
localDir, err := loadLocalDir()
|
|
|
|
if err != nil {
|
|
|
|
return "", err
|
|
|
|
}
|
|
|
|
return path.Join(localDir, info.FileDir, info.FileName), nil
|
2022-10-28 03:02:47 +00:00
|
|
|
}
|
|
|
|
backup, _ := backupRepo.Get(commonRepo.WithByType(info.Source))
|
|
|
|
if backup.ID == 0 {
|
|
|
|
return "", constant.ErrRecordNotFound
|
|
|
|
}
|
|
|
|
varMap := make(map[string]interface{})
|
|
|
|
if err := json.Unmarshal([]byte(backup.Vars), &varMap); err != nil {
|
|
|
|
return "", err
|
|
|
|
}
|
|
|
|
varMap["bucket"] = backup.Bucket
|
|
|
|
switch backup.Type {
|
2023-11-30 13:50:08 +00:00
|
|
|
case constant.Sftp, constant.WebDAV:
|
2022-12-22 07:03:43 +00:00
|
|
|
varMap["username"] = backup.AccessKey
|
2022-10-28 03:02:47 +00:00
|
|
|
varMap["password"] = backup.Credential
|
2023-04-03 10:51:11 +00:00
|
|
|
case constant.OSS, constant.S3, constant.MinIo, constant.Cos, constant.Kodo:
|
2022-12-22 07:03:43 +00:00
|
|
|
varMap["accessKey"] = backup.AccessKey
|
2022-10-28 03:02:47 +00:00
|
|
|
varMap["secretKey"] = backup.Credential
|
2023-06-23 15:06:13 +00:00
|
|
|
case constant.OneDrive:
|
|
|
|
varMap["accessToken"] = backup.Credential
|
2022-10-28 03:02:47 +00:00
|
|
|
}
|
2023-06-23 15:06:13 +00:00
|
|
|
backClient, err := cloud_storage.NewCloudStorageClient(backup.Type, varMap)
|
2022-10-28 03:02:47 +00:00
|
|
|
if err != nil {
|
|
|
|
return "", fmt.Errorf("new cloud storage client failed, err: %v", err)
|
|
|
|
}
|
2023-05-12 14:30:26 +00:00
|
|
|
targetPath := fmt.Sprintf("%s/download/%s/%s", constant.DataDir, info.FileDir, info.FileName)
|
2023-05-13 07:32:28 +00:00
|
|
|
if _, err := os.Stat(path.Dir(targetPath)); err != nil && os.IsNotExist(err) {
|
|
|
|
if err = os.MkdirAll(path.Dir(targetPath), os.ModePerm); err != nil {
|
|
|
|
global.LOG.Errorf("mkdir %s failed, err: %v", path.Dir(targetPath), err)
|
2022-10-28 03:02:47 +00:00
|
|
|
}
|
|
|
|
}
|
2023-05-12 14:30:26 +00:00
|
|
|
srcPath := fmt.Sprintf("%s/%s", info.FileDir, info.FileName)
|
2023-07-11 03:01:09 +00:00
|
|
|
if len(backup.BackupPath) != 0 {
|
2023-12-01 09:50:09 +00:00
|
|
|
srcPath = path.Join(strings.TrimPrefix(backup.BackupPath, "/"), srcPath)
|
2023-07-11 03:01:09 +00:00
|
|
|
}
|
2023-05-12 14:30:26 +00:00
|
|
|
if exist, _ := backClient.Exist(srcPath); exist {
|
|
|
|
isOK, err := backClient.Download(srcPath, targetPath)
|
2022-10-28 03:02:47 +00:00
|
|
|
if !isOK {
|
|
|
|
return "", fmt.Errorf("cloud storage download failed, err: %v", err)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return targetPath, nil
|
|
|
|
}
|
|
|
|
|
2023-10-17 09:58:21 +00:00
|
|
|
func (u *BackupService) Create(req dto.BackupOperate) error {
|
|
|
|
backup, _ := backupRepo.Get(commonRepo.WithByType(req.Type))
|
2022-09-16 10:53:45 +00:00
|
|
|
if backup.ID != 0 {
|
|
|
|
return constant.ErrRecordExist
|
|
|
|
}
|
2023-10-17 09:58:21 +00:00
|
|
|
if err := copier.Copy(&backup, &req); err != nil {
|
2022-09-16 10:53:45 +00:00
|
|
|
return errors.WithMessage(constant.ErrStructTransform, err.Error())
|
|
|
|
}
|
2023-06-23 15:06:13 +00:00
|
|
|
|
2023-10-17 09:58:21 +00:00
|
|
|
if req.Type == constant.OneDrive {
|
2023-06-23 15:06:13 +00:00
|
|
|
if err := u.loadAccessToken(&backup); err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
}
|
2023-10-17 09:58:21 +00:00
|
|
|
if req.Type != "LOCAL" {
|
2023-11-30 13:50:08 +00:00
|
|
|
if _, err := u.checkBackupConn(&backup); err != nil {
|
2023-10-17 09:58:21 +00:00
|
|
|
return buserr.WithMap("ErrBackupCheck", map[string]interface{}{"err": err.Error()}, err)
|
|
|
|
}
|
|
|
|
}
|
2024-01-19 08:48:41 +00:00
|
|
|
if backup.Type == constant.OneDrive {
|
|
|
|
StartRefreshOneDriveToken()
|
|
|
|
}
|
2022-09-16 10:53:45 +00:00
|
|
|
if err := backupRepo.Create(&backup); err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
|
|
|
func (u *BackupService) GetBuckets(backupDto dto.ForBuckets) ([]interface{}, error) {
|
|
|
|
varMap := make(map[string]interface{})
|
|
|
|
if err := json.Unmarshal([]byte(backupDto.Vars), &varMap); err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
switch backupDto.Type {
|
2023-11-30 13:50:08 +00:00
|
|
|
case constant.Sftp, constant.WebDAV:
|
2022-12-22 07:03:43 +00:00
|
|
|
varMap["username"] = backupDto.AccessKey
|
2022-09-16 10:53:45 +00:00
|
|
|
varMap["password"] = backupDto.Credential
|
2023-04-03 10:51:11 +00:00
|
|
|
case constant.OSS, constant.S3, constant.MinIo, constant.Cos, constant.Kodo:
|
2022-12-22 07:03:43 +00:00
|
|
|
varMap["accessKey"] = backupDto.AccessKey
|
2022-09-16 10:53:45 +00:00
|
|
|
varMap["secretKey"] = backupDto.Credential
|
|
|
|
}
|
2023-06-23 15:06:13 +00:00
|
|
|
client, err := cloud_storage.NewCloudStorageClient(backupDto.Type, varMap)
|
2022-09-16 10:53:45 +00:00
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
return client.ListBuckets()
|
|
|
|
}
|
|
|
|
|
2023-04-17 08:06:28 +00:00
|
|
|
func (u *BackupService) Delete(id uint) error {
|
2024-01-19 08:48:41 +00:00
|
|
|
backup, _ := backupRepo.Get(commonRepo.WithByID(id))
|
|
|
|
if backup.ID == 0 {
|
|
|
|
return constant.ErrRecordNotFound
|
|
|
|
}
|
|
|
|
if backup.Type == constant.OneDrive {
|
|
|
|
global.Cron.Remove(global.OneDriveCronID)
|
|
|
|
}
|
2024-01-25 03:20:42 +00:00
|
|
|
cronjobs, _ := cronjobRepo.List(cronjobRepo.WithByDefaultDownload(backup.Type))
|
2023-04-17 08:06:28 +00:00
|
|
|
if len(cronjobs) != 0 {
|
|
|
|
return buserr.New(constant.ErrBackupInUsed)
|
|
|
|
}
|
|
|
|
return backupRepo.Delete(commonRepo.WithByID(id))
|
2022-09-16 10:53:45 +00:00
|
|
|
}
|
|
|
|
|
2022-10-27 15:09:39 +00:00
|
|
|
func (u *BackupService) BatchDeleteRecord(ids []uint) error {
|
|
|
|
records, err := backupRepo.ListRecord(commonRepo.WithIdsIn(ids))
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
for _, record := range records {
|
|
|
|
if record.Source == "LOCAL" {
|
2022-12-01 08:21:49 +00:00
|
|
|
if err := os.Remove(record.FileDir + "/" + record.FileName); err != nil {
|
2022-10-27 15:09:39 +00:00
|
|
|
global.LOG.Errorf("remove file %s failed, err: %v", record.FileDir+record.FileName, err)
|
|
|
|
}
|
|
|
|
} else {
|
2023-05-12 14:30:26 +00:00
|
|
|
backupAccount, err := backupRepo.Get(commonRepo.WithByType(record.Source))
|
2022-10-27 15:09:39 +00:00
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
client, err := u.NewClient(&backupAccount)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
if _, err = client.Delete(record.FileDir + record.FileName); err != nil {
|
|
|
|
global.LOG.Errorf("remove file %s from %s failed, err: %v", record.FileDir+record.FileName, record.Source, err)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2022-12-21 07:54:34 +00:00
|
|
|
return backupRepo.DeleteRecord(context.Background(), commonRepo.WithIdsIn(ids))
|
2022-10-27 15:09:39 +00:00
|
|
|
}
|
|
|
|
|
2023-01-29 08:38:34 +00:00
|
|
|
func (u *BackupService) Update(req dto.BackupOperate) error {
|
|
|
|
backup, err := backupRepo.Get(commonRepo.WithByID(req.ID))
|
|
|
|
if err != nil {
|
|
|
|
return constant.ErrRecordNotFound
|
|
|
|
}
|
2023-02-23 08:56:16 +00:00
|
|
|
varMap := make(map[string]interface{})
|
2023-01-29 08:38:34 +00:00
|
|
|
if err := json.Unmarshal([]byte(req.Vars), &varMap); err != nil {
|
|
|
|
return err
|
|
|
|
}
|
2023-03-14 02:19:29 +00:00
|
|
|
|
|
|
|
oldVars := backup.Vars
|
|
|
|
oldDir, err := loadLocalDir()
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
2023-01-29 08:38:34 +00:00
|
|
|
upMap := make(map[string]interface{})
|
|
|
|
upMap["bucket"] = req.Bucket
|
2023-10-17 09:58:21 +00:00
|
|
|
upMap["access_key"] = req.AccessKey
|
2023-01-29 08:38:34 +00:00
|
|
|
upMap["credential"] = req.Credential
|
2023-07-06 06:30:25 +00:00
|
|
|
upMap["backup_path"] = req.BackupPath
|
2023-01-29 08:38:34 +00:00
|
|
|
upMap["vars"] = req.Vars
|
2023-10-17 09:58:21 +00:00
|
|
|
backup.Bucket = req.Bucket
|
2023-06-23 15:06:13 +00:00
|
|
|
backup.Vars = req.Vars
|
2023-10-17 09:58:21 +00:00
|
|
|
backup.Credential = req.Credential
|
|
|
|
backup.AccessKey = req.AccessKey
|
|
|
|
backup.BackupPath = req.BackupPath
|
2023-06-23 15:06:13 +00:00
|
|
|
|
|
|
|
if req.Type == constant.OneDrive {
|
|
|
|
if err := u.loadAccessToken(&backup); err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
upMap["credential"] = backup.Credential
|
|
|
|
upMap["vars"] = backup.Vars
|
|
|
|
}
|
2023-10-17 09:58:21 +00:00
|
|
|
if backup.Type != "LOCAL" {
|
|
|
|
isOk, err := u.checkBackupConn(&backup)
|
|
|
|
if err != nil || !isOk {
|
|
|
|
return buserr.WithMap("ErrBackupCheck", map[string]interface{}{"err": err.Error()}, err)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-01-29 08:38:34 +00:00
|
|
|
if err := backupRepo.Update(req.ID, upMap); err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
if backup.Type == "LOCAL" {
|
|
|
|
if dir, ok := varMap["dir"]; ok {
|
2023-02-23 08:56:16 +00:00
|
|
|
if dirStr, isStr := dir.(string); isStr {
|
2023-03-09 16:27:41 +00:00
|
|
|
if strings.HasSuffix(dirStr, "/") {
|
|
|
|
dirStr = dirStr[:strings.LastIndex(dirStr, "/")]
|
|
|
|
}
|
2023-05-10 06:08:14 +00:00
|
|
|
if err := copyDir(oldDir, dirStr); err != nil {
|
2023-03-14 02:19:29 +00:00
|
|
|
_ = backupRepo.Update(req.ID, (map[string]interface{}{"vars": oldVars}))
|
2023-02-23 08:56:16 +00:00
|
|
|
return err
|
|
|
|
}
|
2023-01-29 08:38:34 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return nil
|
2022-09-16 10:53:45 +00:00
|
|
|
}
|
2022-09-29 08:15:59 +00:00
|
|
|
|
2023-08-08 08:44:12 +00:00
|
|
|
func (u *BackupService) ListFiles(req dto.BackupSearchFile) ([]string, error) {
|
2023-02-13 07:48:18 +00:00
|
|
|
backup, err := backupRepo.Get(backupRepo.WithByType(req.Type))
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
client, err := u.NewClient(&backup)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
2023-08-07 07:50:28 +00:00
|
|
|
prefix := "system_snapshot"
|
|
|
|
if len(backup.BackupPath) != 0 {
|
2023-08-08 08:44:12 +00:00
|
|
|
prefix = path.Join(strings.TrimPrefix(backup.BackupPath, "/"), prefix)
|
2023-08-07 07:50:28 +00:00
|
|
|
}
|
2023-08-08 08:44:12 +00:00
|
|
|
files, err := client.ListObjects(prefix)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
var datas []string
|
|
|
|
for _, file := range files {
|
|
|
|
if len(file) != 0 {
|
|
|
|
datas = append(datas, path.Base(file))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return datas, nil
|
2023-02-13 07:48:18 +00:00
|
|
|
}
|
|
|
|
|
2022-09-29 08:15:59 +00:00
|
|
|
func (u *BackupService) NewClient(backup *model.BackupAccount) (cloud_storage.CloudStorageClient, error) {
|
|
|
|
varMap := make(map[string]interface{})
|
|
|
|
if err := json.Unmarshal([]byte(backup.Vars), &varMap); err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
varMap["bucket"] = backup.Bucket
|
|
|
|
switch backup.Type {
|
2023-11-30 13:50:08 +00:00
|
|
|
case constant.Sftp, constant.WebDAV:
|
2022-12-22 07:03:43 +00:00
|
|
|
varMap["username"] = backup.AccessKey
|
2022-09-29 08:15:59 +00:00
|
|
|
varMap["password"] = backup.Credential
|
2023-04-03 10:51:11 +00:00
|
|
|
case constant.OSS, constant.S3, constant.MinIo, constant.Cos, constant.Kodo:
|
2022-12-22 07:03:43 +00:00
|
|
|
varMap["accessKey"] = backup.AccessKey
|
2022-09-29 08:15:59 +00:00
|
|
|
varMap["secretKey"] = backup.Credential
|
2023-06-23 15:06:13 +00:00
|
|
|
case constant.OneDrive:
|
|
|
|
varMap["accessToken"] = backup.Credential
|
2022-09-29 08:15:59 +00:00
|
|
|
}
|
|
|
|
|
2023-06-23 15:06:13 +00:00
|
|
|
backClient, err := cloud_storage.NewCloudStorageClient(backup.Type, varMap)
|
2022-09-29 08:15:59 +00:00
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
return backClient, nil
|
|
|
|
}
|
2022-10-28 10:46:14 +00:00
|
|
|
|
2023-04-03 10:51:11 +00:00
|
|
|
func (u *BackupService) loadByType(accountType string, accounts []model.BackupAccount) dto.BackupInfo {
|
|
|
|
for _, account := range accounts {
|
|
|
|
if account.Type == accountType {
|
|
|
|
var item dto.BackupInfo
|
|
|
|
if err := copier.Copy(&item, &account); err != nil {
|
|
|
|
global.LOG.Errorf("copy backup account to dto backup info failed, err: %v", err)
|
|
|
|
}
|
2024-01-19 08:48:41 +00:00
|
|
|
if account.Type == constant.OneDrive {
|
|
|
|
varMap := make(map[string]interface{})
|
|
|
|
if err := json.Unmarshal([]byte(item.Vars), &varMap); err != nil {
|
|
|
|
return dto.BackupInfo{Type: accountType}
|
|
|
|
}
|
|
|
|
delete(varMap, "refresh_token")
|
|
|
|
itemVars, _ := json.Marshal(varMap)
|
|
|
|
item.Vars = string(itemVars)
|
|
|
|
}
|
2023-04-03 10:51:11 +00:00
|
|
|
return item
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return dto.BackupInfo{Type: accountType}
|
|
|
|
}
|
|
|
|
|
2023-06-23 15:06:13 +00:00
|
|
|
func (u *BackupService) loadAccessToken(backup *model.BackupAccount) error {
|
|
|
|
varMap := make(map[string]interface{})
|
|
|
|
if err := json.Unmarshal([]byte(backup.Vars), &varMap); err != nil {
|
|
|
|
return fmt.Errorf("unmarshal backup vars failed, err: %v", err)
|
|
|
|
}
|
2024-01-19 15:06:41 +00:00
|
|
|
token, refreshToken, err := client.RefreshToken("authorization_code", varMap)
|
2023-06-23 15:06:13 +00:00
|
|
|
if err != nil {
|
2024-01-19 08:48:41 +00:00
|
|
|
return err
|
2023-06-23 15:06:13 +00:00
|
|
|
}
|
2024-01-19 15:06:41 +00:00
|
|
|
delete(varMap, "code")
|
2024-01-19 08:48:41 +00:00
|
|
|
backup.Credential = token
|
2024-01-19 15:06:41 +00:00
|
|
|
varMap["refresh_status"] = constant.StatusSuccess
|
|
|
|
varMap["refresh_time"] = time.Now().Format("2006-01-02 15:04:05")
|
|
|
|
varMap["refresh_token"] = refreshToken
|
|
|
|
itemVars, err := json.Marshal(varMap)
|
2023-06-23 15:06:13 +00:00
|
|
|
if err != nil {
|
|
|
|
return fmt.Errorf("json marshal var map failed, err: %v", err)
|
|
|
|
}
|
|
|
|
backup.Vars = string(itemVars)
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2022-11-09 07:08:38 +00:00
|
|
|
func loadLocalDir() (string, error) {
|
|
|
|
backup, err := backupRepo.Get(commonRepo.WithByType("LOCAL"))
|
|
|
|
if err != nil {
|
|
|
|
return "", err
|
|
|
|
}
|
2022-10-28 10:46:14 +00:00
|
|
|
varMap := make(map[string]interface{})
|
|
|
|
if err := json.Unmarshal([]byte(backup.Vars), &varMap); err != nil {
|
|
|
|
return "", err
|
|
|
|
}
|
|
|
|
if _, ok := varMap["dir"]; !ok {
|
|
|
|
return "", errors.New("load local backup dir failed")
|
|
|
|
}
|
|
|
|
baseDir, ok := varMap["dir"].(string)
|
|
|
|
if ok {
|
|
|
|
if _, err := os.Stat(baseDir); err != nil && os.IsNotExist(err) {
|
|
|
|
if err = os.MkdirAll(baseDir, os.ModePerm); err != nil {
|
2023-02-21 11:06:24 +00:00
|
|
|
return "", fmt.Errorf("mkdir %s failed, err: %v", baseDir, err)
|
2022-10-28 10:46:14 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
return baseDir, nil
|
|
|
|
}
|
|
|
|
return "", fmt.Errorf("error type dir: %T", varMap["dir"])
|
|
|
|
}
|
2023-01-29 08:38:34 +00:00
|
|
|
|
2023-05-10 06:08:14 +00:00
|
|
|
func copyDir(src, dst string) error {
|
|
|
|
srcInfo, err := os.Stat(src)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
2023-01-29 08:38:34 +00:00
|
|
|
}
|
2023-05-10 06:08:14 +00:00
|
|
|
if err = os.MkdirAll(dst, srcInfo.Mode()); err != nil {
|
|
|
|
return err
|
2023-01-29 08:38:34 +00:00
|
|
|
}
|
2023-05-10 06:08:14 +00:00
|
|
|
files, err := os.ReadDir(src)
|
2023-02-23 08:56:16 +00:00
|
|
|
if err != nil {
|
2023-05-10 06:08:14 +00:00
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
fileOP := fileUtils.NewFileOp()
|
|
|
|
for _, file := range files {
|
|
|
|
srcPath := fmt.Sprintf("%s/%s", src, file.Name())
|
|
|
|
dstPath := fmt.Sprintf("%s/%s", dst, file.Name())
|
|
|
|
if file.IsDir() {
|
|
|
|
if err = copyDir(srcPath, dstPath); err != nil {
|
|
|
|
global.LOG.Errorf("copy dir %s to %s failed, err: %v", srcPath, dstPath, err)
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
if err := fileOP.CopyFile(srcPath, dst); err != nil {
|
|
|
|
global.LOG.Errorf("copy file %s to %s failed, err: %v", srcPath, dstPath, err)
|
|
|
|
}
|
|
|
|
}
|
2023-01-29 08:38:34 +00:00
|
|
|
}
|
2023-05-10 06:08:14 +00:00
|
|
|
|
2023-01-29 08:38:34 +00:00
|
|
|
return nil
|
|
|
|
}
|
2023-10-17 09:58:21 +00:00
|
|
|
|
|
|
|
func (u *BackupService) checkBackupConn(backup *model.BackupAccount) (bool, error) {
|
|
|
|
client, err := u.NewClient(backup)
|
|
|
|
if err != nil {
|
|
|
|
return false, err
|
|
|
|
}
|
|
|
|
fileItem := path.Join(global.CONF.System.TmpDir, "test", "1panel")
|
|
|
|
if _, err := os.Stat(path.Dir(fileItem)); err != nil && os.IsNotExist(err) {
|
|
|
|
if err = os.MkdirAll(path.Dir(fileItem), os.ModePerm); err != nil {
|
|
|
|
return false, err
|
|
|
|
}
|
|
|
|
}
|
|
|
|
file, err := os.OpenFile(fileItem, os.O_WRONLY|os.O_CREATE, 0666)
|
|
|
|
if err != nil {
|
|
|
|
return false, err
|
|
|
|
}
|
|
|
|
defer file.Close()
|
|
|
|
write := bufio.NewWriter(file)
|
|
|
|
_, _ = write.WriteString(string("1Panel 备份账号测试文件。\n"))
|
|
|
|
_, _ = write.WriteString(string("1Panel 備份賬號測試文件。\n"))
|
|
|
|
_, _ = write.WriteString(string("1Panel Backs up account test files.\n"))
|
|
|
|
_, _ = write.WriteString(string("1Panelアカウントのテストファイルをバックアップします。\n"))
|
|
|
|
write.Flush()
|
|
|
|
|
|
|
|
targetPath := strings.TrimPrefix(path.Join(backup.BackupPath, "test/1panel"), "/")
|
|
|
|
return client.Upload(fileItem, targetPath)
|
|
|
|
}
|
2024-01-19 08:48:41 +00:00
|
|
|
|
|
|
|
func StartRefreshOneDriveToken() {
|
|
|
|
service := NewIBackupService()
|
|
|
|
oneDriveCronID, err := global.Cron.AddJob("0 * * * *", service)
|
|
|
|
if err != nil {
|
|
|
|
global.LOG.Errorf("can not add OneDrive corn job: %s", err.Error())
|
|
|
|
return
|
|
|
|
}
|
|
|
|
global.OneDriveCronID = oneDriveCronID
|
|
|
|
}
|
|
|
|
|
|
|
|
func (u *BackupService) Run() {
|
|
|
|
var backupItem model.BackupAccount
|
|
|
|
_ = global.DB.Where("`type` = ?", "OneDrive").First(&backupItem)
|
|
|
|
if backupItem.ID == 0 {
|
|
|
|
return
|
|
|
|
}
|
|
|
|
if len(backupItem.Credential) == 0 {
|
|
|
|
global.LOG.Error("OneDrive configuration lacks token information, please rebind.")
|
|
|
|
return
|
|
|
|
}
|
|
|
|
global.LOG.Info("start to refresh token of OneDrive ...")
|
|
|
|
varMap := make(map[string]interface{})
|
|
|
|
if err := json.Unmarshal([]byte(backupItem.Vars), &varMap); err != nil {
|
|
|
|
global.LOG.Errorf("Failed to refresh OneDrive token, please retry, err: %v", err)
|
|
|
|
return
|
|
|
|
}
|
2024-01-19 15:06:41 +00:00
|
|
|
token, refreshToken, err := client.RefreshToken("refresh_token", varMap)
|
2024-01-19 08:48:41 +00:00
|
|
|
varMap["refresh_status"] = constant.StatusSuccess
|
|
|
|
varMap["refresh_time"] = time.Now().Format("2006-01-02 15:04:05")
|
|
|
|
if err != nil {
|
|
|
|
varMap["refresh_status"] = constant.StatusFailed
|
|
|
|
varMap["refresh_msg"] = err.Error()
|
|
|
|
global.LOG.Errorf("Failed to refresh OneDrive token, please retry, err: %v", err)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
varMap["refresh_token"] = refreshToken
|
|
|
|
|
|
|
|
varsItem, _ := json.Marshal(varMap)
|
|
|
|
_ = global.DB.Model(&model.BackupAccount{}).
|
|
|
|
Where("id = ?", backupItem.ID).
|
|
|
|
Updates(map[string]interface{}{
|
|
|
|
"credential": token,
|
|
|
|
"vars": varsItem,
|
|
|
|
}).Error
|
|
|
|
global.LOG.Info("Successfully refreshed OneDrive token.")
|
|
|
|
}
|