diff --git a/backend/app/api/v1/cronjob.go b/backend/app/api/v1/cronjob.go index b9b4a3f2f..c216eaa4f 100644 --- a/backend/app/api/v1/cronjob.go +++ b/backend/app/api/v1/cronjob.go @@ -152,3 +152,22 @@ func (b *BaseApi) LoadRecordDetail(c *gin.Context) { } helper.SuccessWithData(c, string(buf)) } + +func (b *BaseApi) TargetDownload(c *gin.Context) { + var req dto.CronjobDownload + if err := c.ShouldBindJSON(&req); err != nil { + helper.ErrorWithDetail(c, constant.CodeErrBadRequest, constant.ErrTypeInvalidParams, err) + return + } + if err := global.VALID.Struct(req); err != nil { + helper.ErrorWithDetail(c, constant.CodeErrBadRequest, constant.ErrTypeInvalidParams, err) + return + } + + dir, err := cronjobService.Download(req) + if err != nil { + helper.ErrorWithDetail(c, constant.CodeErrInternalServer, constant.ErrTypeInternalServer, err) + return + } + helper.SuccessWithData(c, dir) +} diff --git a/backend/app/dto/cronjob.go b/backend/app/dto/cronjob.go index b0d0f860e..33af95ebb 100644 --- a/backend/app/dto/cronjob.go +++ b/backend/app/dto/cronjob.go @@ -44,6 +44,11 @@ type CronjobUpdateStatus struct { Status string `json:"status" validate:"required"` } +type CronjobDownload struct { + RecordID uint `json:"recordID" validate:"required"` + BackupAccountID uint `json:"backupAccountID" validate:"required"` +} + type DetailFile struct { Path string `json:"path" validate:"required"` } diff --git a/backend/app/repo/cronjob.go b/backend/app/repo/cronjob.go index d6319d536..66a38bc52 100644 --- a/backend/app/repo/cronjob.go +++ b/backend/app/repo/cronjob.go @@ -13,6 +13,7 @@ type CronjobRepo struct{} type ICronjobRepo interface { Get(opts ...DBOption) (model.Cronjob, error) + GetRecord(opts ...DBOption) (model.JobRecords, error) List(opts ...DBOption) ([]model.Cronjob, error) Page(limit, offset int, opts ...DBOption) (int64, []model.Cronjob, error) Create(cronjob *model.Cronjob) error @@ -40,6 +41,16 @@ func (u *CronjobRepo) Get(opts ...DBOption) (model.Cronjob, error) { return cronjob, err } +func (u *CronjobRepo) GetRecord(opts ...DBOption) (model.JobRecords, error) { + var record model.JobRecords + db := global.DB + for _, opt := range opts { + db = opt(db) + } + err := db.First(&record).Error + return record, err +} + func (u *CronjobRepo) List(opts ...DBOption) ([]model.Cronjob, error) { var cronjobs []model.Cronjob db := global.DB.Model(&model.Cronjob{}) diff --git a/backend/app/service/cornjob.go b/backend/app/service/cornjob.go index daeb95783..72a7dc365 100644 --- a/backend/app/service/cornjob.go +++ b/backend/app/service/cornjob.go @@ -82,6 +82,53 @@ func (u *CronjobService) SearchRecords(search dto.SearchRecord) (int64, interfac return total, dtoCronjobs, err } +func (u *CronjobService) Download(down dto.CronjobDownload) (string, error) { + record, _ := cronjobRepo.GetRecord(commonRepo.WithByID(down.RecordID)) + if record.ID != 0 { + return "", constant.ErrRecordExist + } + cronjob, _ := cronjobRepo.Get(commonRepo.WithByID(record.CronjobID)) + if cronjob.ID != 0 { + return "", constant.ErrRecordExist + } + backup, _ := backupRepo.Get(commonRepo.WithByID(down.BackupAccountID)) + if cronjob.ID != 0 { + return "", constant.ErrRecordExist + } + varMap := make(map[string]interface{}) + if err := json.Unmarshal([]byte(backup.Vars), &varMap); err != nil { + return "", err + } + varMap["type"] = backup.Type + if backup.Type != "LOCAL" { + varMap["bucket"] = backup.Bucket + switch backup.Type { + case constant.Sftp: + varMap["password"] = backup.Credential + case constant.OSS, constant.S3, constant.MinIo: + varMap["secretKey"] = backup.Credential + } + backClient, err := cloud_storage.NewCloudStorageClient(varMap) + if err != nil { + return "", fmt.Errorf("new cloud storage client failed, err: %v", err) + } + name := fmt.Sprintf("%s/%s/%s.tar.gz", cronjob.Type, cronjob.Name, record.StartTime.Format("20060102150405")) + if cronjob.Type == "database" { + name = fmt.Sprintf("%s/%s/%s.gz", cronjob.Type, cronjob.Name, record.StartTime.Format("20060102150405")) + } + isOK, err := backClient.Download(name, constant.DownloadDir) + if !isOK { + return "", fmt.Errorf("cloud storage download failed, err: %v", err) + } + return constant.DownloadDir, nil + } + if _, ok := varMap["dir"]; !ok { + return "", errors.New("load local backup dir failed") + } + return fmt.Sprintf("%v/%s/%s", varMap["dir"], cronjob.Type, cronjob.Name), nil + +} + func (u *CronjobService) Create(cronjobDto dto.CronjobCreate) error { cronjob, _ := cronjobRepo.Get(commonRepo.WithByName(cronjobDto.Name)) if cronjob.ID != 0 { @@ -387,6 +434,14 @@ func tarWithExclude(cronjob *model.Cronjob, startTime time.Time) ([]byte, error) if !isOK { return nil, fmt.Errorf("cloud storage upload failed, err: %v", err) } + currentObjs, _ := backClient.ListObjects(fmt.Sprintf("%s/%s/", cronjob.Type, cronjob.Name)) + if len(currentObjs) > int(cronjob.RetainCopies) { + for i := 0; i < len(currentObjs)-int(cronjob.RetainCopies); i++ { + if path, ok := currentObjs[i].(string); ok { + _, _ = backClient.Delete(path) + } + } + } if err := os.RemoveAll(fmt.Sprintf("%s/%s/%s-%v", constant.TmpDir, cronjob.Type, cronjob.Name, cronjob.ID)); err != nil { global.LOG.Errorf("rm file %s/%s/%s-%v failed, err: %v", constant.TaskDir, cronjob.Type, cronjob.Name, cronjob.ID, err) } diff --git a/backend/constant/cronjob.go b/backend/constant/cronjob.go index 75f853946..ab6ef7b45 100644 --- a/backend/constant/cronjob.go +++ b/backend/constant/cronjob.go @@ -1,6 +1,7 @@ package constant const ( - TmpDir = "/opt/1Panel/task/tmp/" - TaskDir = "/opt/1Panel/task/" + TmpDir = "/opt/1Panel/task/tmp/" + TaskDir = "/opt/1Panel/task/" + DownloadDir = "/opt/1Panel/download" ) diff --git a/backend/router/ro_cronjob.go b/backend/router/ro_cronjob.go index bc711bb34..11b89c855 100644 --- a/backend/router/ro_cronjob.go +++ b/backend/router/ro_cronjob.go @@ -18,6 +18,7 @@ func (s *CronjobRouter) InitCronjobRouter(Router *gin.RouterGroup) { withRecordRouter.POST("/del", baseApi.DeleteCronjob) withRecordRouter.PUT(":id", baseApi.UpdateCronjob) withRecordRouter.POST("/status", baseApi.UpdateCronjobStatus) + withRecordRouter.POST("/download", baseApi.TargetDownload) cmdRouter.POST("/search", baseApi.SearchCronjob) cmdRouter.POST("/search/records", baseApi.SearchJobRecords) cmdRouter.POST("/search/detail", baseApi.LoadRecordDetail) diff --git a/backend/utils/cloud_storage/client/minio.go b/backend/utils/cloud_storage/client/minio.go index 1a9aefdf6..11b8db7c2 100644 --- a/backend/utils/cloud_storage/client/minio.go +++ b/backend/utils/cloud_storage/client/minio.go @@ -156,3 +156,7 @@ func (minIo minIoClient) Download(src, target string) (bool, error) { return false, constant.ErrInvalidParams } } + +func (minIo minIoClient) ListObjects(prefix string) ([]interface{}, error) { + return nil, nil +} diff --git a/backend/utils/cloud_storage/client/oss.go b/backend/utils/cloud_storage/client/oss.go index df456cbed..2908dd209 100644 --- a/backend/utils/cloud_storage/client/oss.go +++ b/backend/utils/cloud_storage/client/oss.go @@ -2,12 +2,12 @@ package client import ( "github.com/1Panel-dev/1Panel/constant" - "github.com/aliyun/aliyun-oss-go-sdk/oss" + osssdk "github.com/aliyun/aliyun-oss-go-sdk/oss" ) type ossClient struct { Vars map[string]interface{} - client oss.Client + client osssdk.Client } func NewOssClient(vars map[string]interface{}) (*ossClient, error) { @@ -29,7 +29,7 @@ func NewOssClient(vars map[string]interface{}) (*ossClient, error) { } else { return nil, constant.ErrInvalidParams } - client, err := oss.New(endpoint, accessKey, secretKey) + client, err := osssdk.New(endpoint, accessKey, secretKey) if err != nil { return nil, err } @@ -96,7 +96,7 @@ func (oss ossClient) Download(src, target string) (bool, error) { return true, nil } -func (oss *ossClient) GetBucket() (*oss.Bucket, error) { +func (oss *ossClient) GetBucket() (*osssdk.Bucket, error) { if _, ok := oss.Vars["bucket"]; ok { bucket, err := oss.client.Bucket(oss.Vars["bucket"].(string)) if err != nil { @@ -107,3 +107,23 @@ func (oss *ossClient) GetBucket() (*oss.Bucket, error) { return nil, constant.ErrInvalidParams } } + +func (oss *ossClient) ListObjects(prefix string) ([]interface{}, error) { + if _, ok := oss.Vars["bucket"]; ok { + bucket, err := oss.client.Bucket(oss.Vars["bucket"].(string)) + if err != nil { + return nil, err + } + lor, err := bucket.ListObjects(osssdk.Prefix(prefix)) + if err != nil { + return nil, err + } + var result []interface{} + for _, obj := range lor.Objects { + result = append(result, obj.Key) + } + return result, nil + } else { + return nil, constant.ErrInvalidParams + } +} diff --git a/backend/utils/cloud_storage/client/oss_test.go b/backend/utils/cloud_storage/client/oss_test.go new file mode 100644 index 000000000..b44293cc0 --- /dev/null +++ b/backend/utils/cloud_storage/client/oss_test.go @@ -0,0 +1,63 @@ +package service + +import ( + "encoding/json" + "fmt" + "testing" + + "github.com/1Panel-dev/1Panel/app/model" + "github.com/1Panel-dev/1Panel/constant" + "github.com/1Panel-dev/1Panel/global" + "github.com/1Panel-dev/1Panel/init/db" + "github.com/1Panel-dev/1Panel/init/log" + "github.com/1Panel-dev/1Panel/init/viper" + "github.com/aliyun/aliyun-oss-go-sdk/oss" +) + +func TestCron(t *testing.T) { + viper.Init() + log.Init() + db.Init() + + var backup model.BackupAccount + if err := global.DB.Where("id = ?", 2).First(&backup).Error; err != nil { + fmt.Println(err) + } + + varMap := make(map[string]interface{}) + if err := json.Unmarshal([]byte(backup.Vars), &varMap); err != nil { + fmt.Println(err) + } + varMap["type"] = backup.Type + varMap["bucket"] = backup.Bucket + switch backup.Type { + case constant.Sftp: + varMap["password"] = backup.Credential + case constant.OSS, constant.S3, constant.MinIo: + varMap["secretKey"] = backup.Credential + } + endpoint := varMap["endpoint"].(string) + accessKey := varMap["accessKey"].(string) + secretKey := varMap["secretKey"].(string) + client, err := oss.New(endpoint, accessKey, secretKey) + if err != nil { + fmt.Println(err) + } + bucket, err := client.Bucket(backup.Bucket) + if err != nil { + fmt.Println(err) + } + lor, err := bucket.ListObjects(oss.Prefix("directory/directory-test1/")) + if err != nil { + fmt.Println(err) + } + fmt.Println("my objects:", getObjectsFormResponse(lor)) +} + +func getObjectsFormResponse(lor oss.ListObjectsResult) string { + var output string + for _, object := range lor.Objects { + output += object.Key + " " + } + return output +} diff --git a/backend/utils/cloud_storage/client/s3.go b/backend/utils/cloud_storage/client/s3.go index 4592ea679..1348f58b5 100644 --- a/backend/utils/cloud_storage/client/s3.go +++ b/backend/utils/cloud_storage/client/s3.go @@ -175,3 +175,7 @@ func (s3C *s3Client) getBucket() (string, error) { return "", constant.ErrInvalidParams } } + +func (s3C *s3Client) ListObjects(prefix string) ([]interface{}, error) { + return nil, nil +} diff --git a/backend/utils/cloud_storage/client/sftp.go b/backend/utils/cloud_storage/client/sftp.go index ea94acce3..52d470f41 100644 --- a/backend/utils/cloud_storage/client/sftp.go +++ b/backend/utils/cloud_storage/client/sftp.go @@ -210,3 +210,7 @@ func (s sftpClient) getBucket() (string, error) { return "", constant.ErrInvalidParams } } + +func (s sftpClient) ListObjects(prefix string) ([]interface{}, error) { + return nil, nil +} diff --git a/backend/utils/cloud_storage/cloud_storage_client.go b/backend/utils/cloud_storage/cloud_storage_client.go index 4c1cbefbf..0c994fb96 100644 --- a/backend/utils/cloud_storage/cloud_storage_client.go +++ b/backend/utils/cloud_storage/cloud_storage_client.go @@ -7,6 +7,7 @@ import ( type CloudStorageClient interface { ListBuckets() ([]interface{}, error) + ListObjects(prefix string) ([]interface{}, error) Exist(path string) (bool, error) Delete(path string) (bool, error) Upload(src, target string) (bool, error) diff --git a/frontend/src/api/interface/cronjob.ts b/frontend/src/api/interface/cronjob.ts index 888fbf169..ad242da27 100644 --- a/frontend/src/api/interface/cronjob.ts +++ b/frontend/src/api/interface/cronjob.ts @@ -61,6 +61,10 @@ export namespace Cronjob { id: number; status: string; } + export interface UpdateStatus { + recordID: number; + backupAccountID: number; + } export interface SearchRecord extends ReqPage { cronjobID: number; startTime: Date;