mirror of https://github.com/1Panel-dev/1Panel
feat: 完成数据库备份与计划任务联调
parent
9f1e417c06
commit
0f136570fe
|
@ -103,21 +103,6 @@ func (b *BaseApi) UpdateCronjob(c *gin.Context) {
|
|||
return
|
||||
}
|
||||
|
||||
upMap := make(map[string]interface{})
|
||||
upMap["name"] = req.Name
|
||||
upMap["script"] = req.Script
|
||||
upMap["specType"] = req.SpecType
|
||||
upMap["week"] = req.Week
|
||||
upMap["day"] = req.Day
|
||||
upMap["hour"] = req.Hour
|
||||
upMap["minute"] = req.Minute
|
||||
upMap["website"] = req.Website
|
||||
upMap["exclusionRules"] = req.ExclusionRules
|
||||
upMap["database"] = req.Database
|
||||
upMap["url"] = req.URL
|
||||
upMap["sourceDir"] = req.SourceDir
|
||||
upMap["targetDirID"] = req.TargetDirID
|
||||
upMap["retainCopies"] = req.RetainCopies
|
||||
if err := cronjobService.Update(id, req); err != nil {
|
||||
helper.ErrorWithDetail(c, constant.CodeErrInternalServer, constant.ErrTypeInternalServer, err)
|
||||
return
|
||||
|
|
|
@ -80,6 +80,22 @@ func (b *BaseApi) SearchMysql(c *gin.Context) {
|
|||
})
|
||||
}
|
||||
|
||||
func (b *BaseApi) ListDBNameByVersion(c *gin.Context) {
|
||||
version, ok := c.Params.Get("version")
|
||||
if !ok {
|
||||
helper.ErrorWithDetail(c, constant.CodeErrBadRequest, constant.ErrTypeInvalidParams, errors.New("error version in path"))
|
||||
return
|
||||
}
|
||||
|
||||
list, err := mysqlService.ListDBByVersion(version)
|
||||
if err != nil {
|
||||
helper.ErrorWithDetail(c, constant.CodeErrInternalServer, constant.ErrTypeInternalServer, err)
|
||||
return
|
||||
}
|
||||
|
||||
helper.SuccessWithData(c, list)
|
||||
}
|
||||
|
||||
func (b *BaseApi) SearchDBBackups(c *gin.Context) {
|
||||
var req dto.SearchBackupsWithPage
|
||||
if err := c.ShouldBindJSON(&req); err != nil {
|
||||
|
@ -87,7 +103,7 @@ func (b *BaseApi) SearchDBBackups(c *gin.Context) {
|
|||
return
|
||||
}
|
||||
|
||||
total, list, err := mysqlService.SearchBacpupsWithPage(req)
|
||||
total, list, err := mysqlService.SearchBackupsWithPage(req)
|
||||
if err != nil {
|
||||
helper.ErrorWithDetail(c, constant.CodeErrInternalServer, constant.ErrTypeInternalServer, err)
|
||||
return
|
||||
|
|
|
@ -15,8 +15,10 @@ type CronjobCreate struct {
|
|||
Website string `json:"website"`
|
||||
ExclusionRules string `json:"exclusionRules"`
|
||||
Database string `json:"database"`
|
||||
DBName string `json:"dbName"`
|
||||
URL string `json:"url"`
|
||||
SourceDir string `json:"sourceDir"`
|
||||
KeepLocal bool `json:"keepLocal"`
|
||||
TargetDirID int `json:"targetDirID"`
|
||||
RetainCopies int `json:"retainCopies" validate:"number,min=1"`
|
||||
}
|
||||
|
@ -33,8 +35,10 @@ type CronjobUpdate struct {
|
|||
Website string `json:"website"`
|
||||
ExclusionRules string `json:"exclusionRules"`
|
||||
Database string `json:"database"`
|
||||
DBName string `json:"dbName"`
|
||||
URL string `json:"url"`
|
||||
SourceDir string `json:"sourceDir"`
|
||||
KeepLocal bool `json:"keepLocal"`
|
||||
TargetDirID int `json:"targetDirID"`
|
||||
RetainCopies int `json:"retainCopies" validate:"number,min=1"`
|
||||
}
|
||||
|
@ -63,8 +67,10 @@ type CronjobInfo struct {
|
|||
Website string `json:"website"`
|
||||
ExclusionRules string `json:"exclusionRules"`
|
||||
Database string `json:"database"`
|
||||
DBName string `json:"dbName"`
|
||||
URL string `json:"url"`
|
||||
SourceDir string `json:"sourceDir"`
|
||||
KeepLocal bool `json:"keepLocal"`
|
||||
TargetDir string `json:"targetDir"`
|
||||
TargetDirID int `json:"targetDirID"`
|
||||
RetainCopies int `json:"retainCopies"`
|
||||
|
|
|
@ -17,11 +17,14 @@ type Cronjob struct {
|
|||
Script string `gorm:"longtext" json:"script"`
|
||||
Website string `gorm:"type:varchar(64)" json:"website"`
|
||||
Database string `gorm:"type:varchar(64)" json:"database"`
|
||||
DBName string `gorm:"type:varchar(64)" json:"dbName"`
|
||||
URL string `gorm:"type:varchar(256)" json:"url"`
|
||||
SourceDir string `gorm:"type:varchar(256)" json:"sourceDir"`
|
||||
TargetDirID uint64 `gorm:"type:decimal" json:"targetDirID"`
|
||||
ExclusionRules string `gorm:"longtext" json:"exclusionRules"`
|
||||
RetainCopies uint64 `gorm:"type:decimal" json:"retainCopies"`
|
||||
|
||||
KeepLocal bool `gorm:"type:varchar(64)" json:"keepLocal"`
|
||||
TargetDirID uint64 `gorm:"type:decimal" json:"targetDirID"`
|
||||
RetainCopies uint64 `gorm:"type:decimal" json:"retainCopies"`
|
||||
|
||||
Status string `gorm:"type:varchar(64)" json:"status"`
|
||||
EntryID uint64 `gorm:"type:decimal" json:"entryID"`
|
||||
|
@ -35,6 +38,8 @@ type JobRecords struct {
|
|||
StartTime time.Time `gorm:"type:datetime" json:"startTime"`
|
||||
Interval float64 `gorm:"type:float" json:"interval"`
|
||||
Records string `gorm:"longtext" json:"records"`
|
||||
FromLocal bool `gorm:"type:varchar(64)" json:"source"`
|
||||
File string `gorm:"type:varchar(256)" json:"file"`
|
||||
Status string `gorm:"type:varchar(64)" json:"status"`
|
||||
Message string `gorm:"longtext" json:"message"`
|
||||
}
|
||||
|
|
|
@ -66,7 +66,7 @@ func (u *BackupService) SearchRecordWithPage(search dto.BackupSearch) (int64, []
|
|||
|
||||
func (u *BackupService) DownloadRecord(info dto.DownloadRecord) (string, error) {
|
||||
if info.Source == "LOCAL" {
|
||||
return info.FileDir + info.FileName, nil
|
||||
return info.FileDir + "/" + info.FileName, nil
|
||||
}
|
||||
backup, _ := backupRepo.Get(commonRepo.WithByType(info.Source))
|
||||
if backup.ID == 0 {
|
||||
|
@ -200,3 +200,25 @@ func (u *BackupService) NewClient(backup *model.BackupAccount) (cloud_storage.Cl
|
|||
|
||||
return backClient, nil
|
||||
}
|
||||
|
||||
func loadLocalDir(backup model.BackupAccount) (string, error) {
|
||||
varMap := make(map[string]interface{})
|
||||
if err := json.Unmarshal([]byte(backup.Vars), &varMap); err != nil {
|
||||
return "", err
|
||||
}
|
||||
if _, ok := varMap["dir"]; !ok {
|
||||
return "", errors.New("load local backup dir failed")
|
||||
}
|
||||
baseDir, ok := varMap["dir"].(string)
|
||||
if ok {
|
||||
if _, err := os.Stat(baseDir); err != nil && os.IsNotExist(err) {
|
||||
if err = os.MkdirAll(baseDir, os.ModePerm); err != nil {
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("mkdir %s failed, err: %v", baseDir, err)
|
||||
}
|
||||
}
|
||||
}
|
||||
return baseDir, nil
|
||||
}
|
||||
return "", fmt.Errorf("error type dir: %T", varMap["dir"])
|
||||
}
|
||||
|
|
|
@ -217,10 +217,12 @@ func (u *CronjobService) Update(id uint, req dto.CronjobUpdate) error {
|
|||
upMap["website"] = req.Website
|
||||
upMap["exclusion_rules"] = req.ExclusionRules
|
||||
upMap["database"] = req.Database
|
||||
upMap["db_name"] = req.DBName
|
||||
upMap["url"] = req.URL
|
||||
upMap["source_dir"] = req.SourceDir
|
||||
upMap["keep_local"] = req.KeepLocal
|
||||
upMap["target_dir_id"] = req.TargetDirID
|
||||
upMap["retain_days"] = req.RetainCopies
|
||||
upMap["retain_copies"] = req.RetainCopies
|
||||
return cronjobRepo.Update(id, upMap)
|
||||
}
|
||||
|
||||
|
@ -252,7 +254,7 @@ func (u *CronjobService) AddCronJob(cronjob *model.Cronjob) (int, error) {
|
|||
}
|
||||
|
||||
func mkdirAndWriteFile(cronjob *model.Cronjob, startTime time.Time, msg []byte) (string, error) {
|
||||
dir := fmt.Sprintf("%s%s/%s-%v", constant.TaskDir, cronjob.Type, cronjob.Name, cronjob.ID)
|
||||
dir := fmt.Sprintf("%s/%s/%s-%v", constant.TaskDir, cronjob.Type, cronjob.Name, cronjob.ID)
|
||||
if _, err := os.Stat(dir); err != nil && os.IsNotExist(err) {
|
||||
if err = os.MkdirAll(dir, os.ModePerm); err != nil {
|
||||
return "", err
|
||||
|
|
|
@ -2,7 +2,6 @@ package service
|
|||
|
||||
import (
|
||||
"crypto/tls"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"net/http"
|
||||
|
@ -25,19 +24,23 @@ func (u *CronjobService) HandleJob(cronjob *model.Cronjob) {
|
|||
err error
|
||||
)
|
||||
record := cronjobRepo.StartRecords(cronjob.ID, "")
|
||||
record.FromLocal = cronjob.KeepLocal
|
||||
switch cronjob.Type {
|
||||
case "shell":
|
||||
cmd := exec.Command(cronjob.Script)
|
||||
message, err = cmd.CombinedOutput()
|
||||
stdout, errExec := cmd.CombinedOutput()
|
||||
if errExec != nil {
|
||||
err = errors.New(string(stdout))
|
||||
}
|
||||
case "website":
|
||||
message, err = u.HandleBackup(cronjob, record.StartTime)
|
||||
record.File, err = u.HandleBackup(cronjob, record.StartTime)
|
||||
case "database":
|
||||
message, err = u.HandleBackup(cronjob, record.StartTime)
|
||||
record.File, err = u.HandleBackup(cronjob, record.StartTime)
|
||||
case "directory":
|
||||
if len(cronjob.SourceDir) == 0 {
|
||||
return
|
||||
}
|
||||
message, err = u.HandleBackup(cronjob, record.StartTime)
|
||||
record.File, err = u.HandleBackup(cronjob, record.StartTime)
|
||||
case "curl":
|
||||
if len(cronjob.URL) == 0 {
|
||||
return
|
||||
|
@ -65,56 +68,63 @@ func (u *CronjobService) HandleJob(cronjob *model.Cronjob) {
|
|||
cronjobRepo.EndRecords(record, constant.StatusSuccess, "", record.Records)
|
||||
}
|
||||
|
||||
func (u *CronjobService) HandleBackup(cronjob *model.Cronjob, startTime time.Time) ([]byte, error) {
|
||||
var stdout []byte
|
||||
func (u *CronjobService) HandleBackup(cronjob *model.Cronjob, startTime time.Time) (string, error) {
|
||||
var (
|
||||
baseDir string
|
||||
backupDir string
|
||||
fileName string
|
||||
)
|
||||
backup, err := backupRepo.Get(commonRepo.WithByID(uint(cronjob.TargetDirID)))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
return "", err
|
||||
}
|
||||
commonDir := fmt.Sprintf("%s/%s/", cronjob.Type, cronjob.Name)
|
||||
name := fmt.Sprintf("%s.gz", startTime.Format("20060102150405"))
|
||||
if cronjob.Type != "database" {
|
||||
name = fmt.Sprintf("%s.tar.gz", startTime.Format("20060102150405"))
|
||||
}
|
||||
if backup.Type == "LOCAL" {
|
||||
varMap := make(map[string]interface{})
|
||||
if err := json.Unmarshal([]byte(backup.Vars), &varMap); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if _, ok := varMap["dir"]; !ok {
|
||||
return nil, errors.New("load local backup dir failed")
|
||||
}
|
||||
baseDir := varMap["dir"].(string)
|
||||
if _, err := os.Stat(baseDir); err != nil && os.IsNotExist(err) {
|
||||
if err = os.MkdirAll(baseDir, os.ModePerm); err != nil {
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("mkdir %s failed, err: %v", baseDir, err)
|
||||
}
|
||||
}
|
||||
}
|
||||
stdout, err = handleTar(cronjob.SourceDir, fmt.Sprintf("%s/%s", baseDir, commonDir), name, cronjob.ExclusionRules)
|
||||
if cronjob.KeepLocal || cronjob.Type != "LOCAL" {
|
||||
backupLocal, err := backupRepo.Get(commonRepo.WithByType("LOCAL"))
|
||||
if err != nil {
|
||||
return stdout, err
|
||||
return "", err
|
||||
}
|
||||
u.HandleRmExpired(backup.Type, fmt.Sprintf("%s/%s", baseDir, commonDir), cronjob, nil)
|
||||
return stdout, nil
|
||||
localDir, err := loadLocalDir(backupLocal)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
baseDir = localDir
|
||||
} else {
|
||||
baseDir = constant.TmpDir
|
||||
}
|
||||
|
||||
if cronjob.Type == "database" {
|
||||
fileName = fmt.Sprintf("db_%s_%s.sql.gz", cronjob.DBName, time.Now().Format("20060102150405"))
|
||||
backupDir = fmt.Sprintf("database/%s/%s", cronjob.Database, cronjob.DBName)
|
||||
err = backupMysql(backup.Type, baseDir, backupDir, cronjob.Database, cronjob.DBName, fileName)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
} else {
|
||||
fileName = fmt.Sprintf("%s.tar.gz", startTime.Format("20060102150405"))
|
||||
backupDir = fmt.Sprintf("%s/%s", cronjob.Type, cronjob.Name)
|
||||
if err := handleTar(cronjob.SourceDir, baseDir+"/"+backupDir, fileName, cronjob.ExclusionRules); err != nil {
|
||||
return "", err
|
||||
}
|
||||
}
|
||||
|
||||
if backup.Type == "LOCAL" {
|
||||
u.HandleRmExpired(backup.Type, baseDir, backupDir, cronjob, nil)
|
||||
return baseDir + "/" + backupDir + "/" + fileName, nil
|
||||
}
|
||||
|
||||
cloudFile := baseDir + "/" + backupDir + "/" + fileName
|
||||
if !cronjob.KeepLocal {
|
||||
cloudFile = backupDir + "/" + fileName
|
||||
}
|
||||
targetDir := constant.TmpDir + commonDir
|
||||
client, err := NewIBackupService().NewClient(&backup)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
return cloudFile, err
|
||||
}
|
||||
if cronjob.Type != "database" {
|
||||
stdout, err = handleTar(cronjob.SourceDir, targetDir, name, cronjob.ExclusionRules)
|
||||
if err != nil {
|
||||
return stdout, err
|
||||
}
|
||||
if _, err = client.Upload(baseDir+"/"+backupDir+"/"+fileName, backupDir+"/"+fileName); err != nil {
|
||||
return cloudFile, err
|
||||
}
|
||||
if _, err = client.Upload(targetDir+name, commonDir+name); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
u.HandleRmExpired(backup.Type, commonDir+name, cronjob, client)
|
||||
return stdout, nil
|
||||
u.HandleRmExpired(backup.Type, baseDir, backupDir, cronjob, client)
|
||||
return cloudFile, nil
|
||||
}
|
||||
|
||||
func (u *CronjobService) HandleDelete(id uint) error {
|
||||
|
@ -132,26 +142,27 @@ func (u *CronjobService) HandleDelete(id uint) error {
|
|||
return nil
|
||||
}
|
||||
|
||||
func (u *CronjobService) HandleRmExpired(backType, path string, cronjob *model.Cronjob, backClient cloud_storage.CloudStorageClient) {
|
||||
func (u *CronjobService) HandleRmExpired(backType, baseDir, backupDir string, cronjob *model.Cronjob, backClient cloud_storage.CloudStorageClient) {
|
||||
if backType != "LOCAL" {
|
||||
commonDir := fmt.Sprintf("%s/%s/", cronjob.Type, cronjob.Name)
|
||||
currentObjs, err := backClient.ListObjects(commonDir)
|
||||
currentObjs, err := backClient.ListObjects(backupDir + "/")
|
||||
if err != nil {
|
||||
global.LOG.Errorf("list bucket object %s failed, err: %v", commonDir, err)
|
||||
global.LOG.Errorf("list bucket object %s failed, err: %v", backupDir, err)
|
||||
return
|
||||
}
|
||||
for i := 0; i < len(currentObjs)-int(cronjob.RetainCopies); i++ {
|
||||
_, _ = backClient.Delete(currentObjs[i].(string))
|
||||
}
|
||||
return
|
||||
if !cronjob.KeepLocal {
|
||||
return
|
||||
}
|
||||
}
|
||||
files, err := ioutil.ReadDir(path)
|
||||
files, err := ioutil.ReadDir(baseDir + "/" + backupDir)
|
||||
if err != nil {
|
||||
global.LOG.Errorf("read dir %s failed, err: %v", path, err)
|
||||
global.LOG.Errorf("read dir %s failed, err: %v", baseDir+"/"+backupDir, err)
|
||||
return
|
||||
}
|
||||
for i := 0; i < len(files)-int(cronjob.RetainCopies); i++ {
|
||||
_ = os.Remove(path + "/" + files[i].Name())
|
||||
_ = os.Remove(baseDir + "/" + backupDir + "/" + files[i].Name())
|
||||
}
|
||||
records, _ := cronjobRepo.ListRecord(cronjobRepo.WithByJobID(int(cronjob.ID)))
|
||||
if len(records) > int(cronjob.RetainCopies) {
|
||||
|
@ -161,15 +172,15 @@ func (u *CronjobService) HandleRmExpired(backType, path string, cronjob *model.C
|
|||
}
|
||||
}
|
||||
|
||||
func handleTar(sourceDir, targetDir, name, exclusionRules string) ([]byte, error) {
|
||||
func handleTar(sourceDir, targetDir, name, exclusionRules string) error {
|
||||
if _, err := os.Stat(targetDir); err != nil && os.IsNotExist(err) {
|
||||
if err = os.MkdirAll(targetDir, os.ModePerm); err != nil {
|
||||
return nil, err
|
||||
return err
|
||||
}
|
||||
}
|
||||
exStr := []string{}
|
||||
exStr = append(exStr, "zcvf")
|
||||
exStr = append(exStr, targetDir+name)
|
||||
exStr = append(exStr, targetDir+"/"+name)
|
||||
excludes := strings.Split(exclusionRules, ";")
|
||||
for _, exclude := range excludes {
|
||||
if len(exclude) == 0 {
|
||||
|
@ -188,5 +199,9 @@ func handleTar(sourceDir, targetDir, name, exclusionRules string) ([]byte, error
|
|||
exStr = append(exStr, sourceDir)
|
||||
}
|
||||
cmd := exec.Command("tar", exStr...)
|
||||
return (cmd.CombinedOutput())
|
||||
stdout, err := cmd.CombinedOutput()
|
||||
if err != nil {
|
||||
return errors.New(string(stdout))
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
|
|
@ -23,7 +23,8 @@ type MysqlService struct{}
|
|||
|
||||
type IMysqlService interface {
|
||||
SearchWithPage(search dto.SearchDBWithPage) (int64, interface{}, error)
|
||||
SearchBacpupsWithPage(search dto.SearchBackupsWithPage) (int64, interface{}, error)
|
||||
ListDBByVersion(version string) ([]string, error)
|
||||
SearchBackupsWithPage(search dto.SearchBackupsWithPage) (int64, interface{}, error)
|
||||
Create(mysqlDto dto.MysqlDBCreate) error
|
||||
ChangeInfo(info dto.ChangeDBInfo) error
|
||||
UpdateVariables(variables dto.MysqlVariablesUpdate) error
|
||||
|
@ -55,7 +56,16 @@ func (u *MysqlService) SearchWithPage(search dto.SearchDBWithPage) (int64, inter
|
|||
return total, dtoMysqls, err
|
||||
}
|
||||
|
||||
func (u *MysqlService) SearchBacpupsWithPage(search dto.SearchBackupsWithPage) (int64, interface{}, error) {
|
||||
func (u *MysqlService) ListDBByVersion(version string) ([]string, error) {
|
||||
mysqls, err := mysqlRepo.List(mysqlRepo.WithByVersion(version))
|
||||
var dbNames []string
|
||||
for _, mysql := range mysqls {
|
||||
dbNames = append(dbNames, mysql.Name)
|
||||
}
|
||||
return dbNames, err
|
||||
}
|
||||
|
||||
func (u *MysqlService) SearchBackupsWithPage(search dto.SearchBackupsWithPage) (int64, interface{}, error) {
|
||||
app, err := mysqlRepo.LoadBaseInfoByVersion(search.Version)
|
||||
if err != nil {
|
||||
return 0, nil, err
|
||||
|
@ -111,36 +121,18 @@ func (u *MysqlService) Create(mysqlDto dto.MysqlDBCreate) error {
|
|||
}
|
||||
|
||||
func (u *MysqlService) Backup(db dto.BackupDB) error {
|
||||
app, err := mysqlRepo.LoadBaseInfoByVersion(db.Version)
|
||||
backupLocal, err := backupRepo.Get(commonRepo.WithByType("LOCAL"))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
backupDir := fmt.Sprintf("%s/%s/%s/", constant.DatabaseDir, app.Name, db.DBName)
|
||||
if _, err := os.Stat(backupDir); err != nil && os.IsNotExist(err) {
|
||||
if err = os.MkdirAll(backupDir, os.ModePerm); err != nil {
|
||||
return err
|
||||
}
|
||||
localDir, err := loadLocalDir(backupLocal)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
backupName := fmt.Sprintf("%s%s_%s.sql.gz", backupDir, db.DBName, time.Now().Format("20060102150405"))
|
||||
outfile, _ := os.OpenFile(backupName, os.O_RDWR|os.O_CREATE, 0755)
|
||||
cmd := exec.Command("docker", "exec", app.ContainerName, "mysqldump", "-uroot", "-p"+app.Password, db.DBName)
|
||||
gzipCmd := exec.Command("gzip", "-cf")
|
||||
gzipCmd.Stdin, _ = cmd.StdoutPipe()
|
||||
gzipCmd.Stdout = outfile
|
||||
_ = gzipCmd.Start()
|
||||
_ = cmd.Run()
|
||||
_ = gzipCmd.Wait()
|
||||
|
||||
if err := backupRepo.CreateRecord(&model.BackupRecord{
|
||||
Type: "database-mysql",
|
||||
Name: app.Name,
|
||||
DetailName: db.DBName,
|
||||
Source: "LOCAL",
|
||||
FileDir: backupDir,
|
||||
FileName: strings.ReplaceAll(backupName, backupDir, ""),
|
||||
}); err != nil {
|
||||
global.LOG.Errorf("save backup record failed, err: %v", err)
|
||||
backupDir := fmt.Sprintf("database/%s/%s", db.Version, db.DBName)
|
||||
fileName := fmt.Sprintf("%s_%s.sql.gz", db.DBName, time.Now().Format("20060102150405"))
|
||||
if err := backupMysql("LOCAL", localDir, backupDir, db.Version, db.DBName, fileName); err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
@ -450,3 +442,44 @@ func excuteSql(containerName, password, command string) error {
|
|||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func backupMysql(backupType, baseDir, backupDir, version, dbName, fileName string) error {
|
||||
app, err := mysqlRepo.LoadBaseInfoByVersion(version)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
fullDir := baseDir + "/" + backupDir
|
||||
if _, err := os.Stat(fullDir); err != nil && os.IsNotExist(err) {
|
||||
if err = os.MkdirAll(fullDir, os.ModePerm); err != nil {
|
||||
if err != nil {
|
||||
return fmt.Errorf("mkdir %s failed, err: %v", fullDir, err)
|
||||
}
|
||||
}
|
||||
}
|
||||
outfile, _ := os.OpenFile(fullDir+"/"+fileName, os.O_RDWR|os.O_CREATE, 0755)
|
||||
cmd := exec.Command("docker", "exec", app.ContainerName, "mysqldump", "-uroot", "-p"+app.Password, dbName)
|
||||
gzipCmd := exec.Command("gzip", "-cf")
|
||||
gzipCmd.Stdin, _ = cmd.StdoutPipe()
|
||||
gzipCmd.Stdout = outfile
|
||||
_ = gzipCmd.Start()
|
||||
_ = cmd.Run()
|
||||
_ = gzipCmd.Wait()
|
||||
|
||||
record := &model.BackupRecord{
|
||||
Type: "database-mysql",
|
||||
Name: app.Name,
|
||||
DetailName: dbName,
|
||||
Source: backupType,
|
||||
FileDir: backupDir,
|
||||
FileName: fileName,
|
||||
}
|
||||
if baseDir != constant.TmpDir || backupType == "LOCAL" {
|
||||
record.Source = "LOCAL"
|
||||
record.FileDir = fullDir
|
||||
}
|
||||
if err := backupRepo.CreateRecord(record); err != nil {
|
||||
global.LOG.Errorf("save backup record failed, err: %v", err)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
|
|
@ -9,6 +9,6 @@ const (
|
|||
Sftp = "SFTP"
|
||||
MinIo = "MINIO"
|
||||
|
||||
DatabaseDir = "/opt/1Panel/data/backup/database"
|
||||
WebsiteDir = "/opt/1Panel/data/backup/website"
|
||||
DatabaseBackupDir = "/opt/1Panel/data/backup/database"
|
||||
WebsiteBackupDir = "/opt/1Panel/data/backup/website"
|
||||
)
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
package constant
|
||||
|
||||
const (
|
||||
TmpDir = "/opt/1Panel/task/tmp/"
|
||||
TaskDir = "/opt/1Panel/task/"
|
||||
DownloadDir = "/opt/1Panel/download/"
|
||||
TmpDir = "/opt/1Panel/data/tmp"
|
||||
TaskDir = "/opt/1Panel/data/task"
|
||||
DownloadDir = "/opt/1Panel/download"
|
||||
)
|
||||
|
|
|
@ -33,5 +33,6 @@ func (s *DatabaseRouter) InitDatabaseRouter(Router *gin.RouterGroup) {
|
|||
cmdRouter.GET("/status/:version", baseApi.LoadStatus)
|
||||
cmdRouter.GET("/baseinfo/:version", baseApi.LoadBaseinfo)
|
||||
cmdRouter.GET("/versions", baseApi.LoadVersions)
|
||||
cmdRouter.GET("/dbs/:version", baseApi.ListDBNameByVersion)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -15,8 +15,10 @@ export namespace Cronjob {
|
|||
website: string;
|
||||
exclusionRules: string;
|
||||
database: string;
|
||||
dbName: string;
|
||||
url: string;
|
||||
sourceDir: string;
|
||||
keepLocal: boolean;
|
||||
targetDirID: number;
|
||||
targetDir: string;
|
||||
retainCopies: number;
|
||||
|
@ -35,8 +37,10 @@ export namespace Cronjob {
|
|||
website: string;
|
||||
exclusionRules: string;
|
||||
database: string;
|
||||
dbName: string;
|
||||
url: string;
|
||||
sourceDir: string;
|
||||
keepLocal: boolean;
|
||||
targetDirID: number;
|
||||
retainCopies: number;
|
||||
}
|
||||
|
@ -52,8 +56,10 @@ export namespace Cronjob {
|
|||
website: string;
|
||||
exclusionRules: string;
|
||||
database: string;
|
||||
dbName: string;
|
||||
url: string;
|
||||
sourceDir: string;
|
||||
keepLocal: boolean;
|
||||
targetDirID: number;
|
||||
retainCopies: number;
|
||||
}
|
||||
|
|
|
@ -6,6 +6,9 @@ import { Database } from '../interface/database';
|
|||
export const searchMysqlDBs = (params: Database.Search) => {
|
||||
return http.post<ResPage<Database.MysqlDBInfo>>(`databases/search`, params);
|
||||
};
|
||||
export const listDBByVersion = (params: string) => {
|
||||
return http.get(`databases/dbs/${params}`);
|
||||
};
|
||||
|
||||
export const backup = (params: Database.Backup) => {
|
||||
return http.post(`/databases/backup`, params);
|
||||
|
|
|
@ -136,6 +136,7 @@ const onOpenDialog = async (
|
|||
day: 1,
|
||||
hour: 2,
|
||||
minute: 3,
|
||||
keepLocal: true,
|
||||
retainCopies: 7,
|
||||
},
|
||||
) => {
|
||||
|
|
|
@ -74,13 +74,20 @@
|
|||
/>
|
||||
</el-select>
|
||||
</el-form-item>
|
||||
<el-form-item
|
||||
v-if="dialogData.rowData!.type === 'database'"
|
||||
:label="$t('cronjob.database')"
|
||||
prop="database"
|
||||
>
|
||||
<el-input style="width: 100%" clearable v-model="dialogData.rowData!.database" />
|
||||
</el-form-item>
|
||||
|
||||
<div v-if="dialogData.rowData!.type === 'database'">
|
||||
<el-form-item :label="$t('cronjob.database')" prop="database">
|
||||
<el-radio-group v-model="dialogData.rowData!.database" @change="changeDBVersion" class="ml-4">
|
||||
<el-radio v-for="item in mysqlVersionOptions" :key="item" :label="item" :value="item" />
|
||||
</el-radio-group>
|
||||
</el-form-item>
|
||||
<el-form-item :label="$t('cronjob.database')" prop="dbName">
|
||||
<el-select style="width: 100%" clearable v-model="dialogData.rowData!.dbName">
|
||||
<el-option v-for="item in dbOptions" :key="item" :label="item" :value="item" />
|
||||
</el-select>
|
||||
</el-form-item>
|
||||
</div>
|
||||
|
||||
<el-form-item
|
||||
v-if="dialogData.rowData!.type === 'directory'"
|
||||
:label="$t('cronjob.sourceDir')"
|
||||
|
@ -98,19 +105,30 @@
|
|||
</el-input>
|
||||
</el-form-item>
|
||||
|
||||
<el-form-item v-if="isBackup()" :label="$t('cronjob.target')" prop="targetDirID">
|
||||
<el-select style="width: 100%" v-model="dialogData.rowData!.targetDirID">
|
||||
<el-option
|
||||
v-for="item in backupOptions"
|
||||
:key="item.label"
|
||||
:value="item.value"
|
||||
:label="item.label"
|
||||
/>
|
||||
</el-select>
|
||||
</el-form-item>
|
||||
<el-form-item v-if="isBackup()" :label="$t('cronjob.retainCopies')" prop="retainCopies">
|
||||
<el-input-number :min="1" :max="30" v-model.number="dialogData.rowData!.retainCopies"></el-input-number>
|
||||
</el-form-item>
|
||||
<div v-if="isBackup()">
|
||||
<el-form-item :label="$t('cronjob.target')" prop="targetDirID">
|
||||
<el-select style="width: 100%" v-model="dialogData.rowData!.targetDirID">
|
||||
<el-option
|
||||
v-for="item in backupOptions"
|
||||
:key="item.label"
|
||||
:value="item.value"
|
||||
:label="item.label"
|
||||
/>
|
||||
</el-select>
|
||||
</el-form-item>
|
||||
<el-form-item v-if="dialogData.rowData!.targetDirID !== localDirID">
|
||||
<el-checkbox v-model="dialogData.rowData!.keepLocal">
|
||||
同时保留本地备份(和云存储保留份数一致)
|
||||
</el-checkbox>
|
||||
</el-form-item>
|
||||
<el-form-item :label="$t('cronjob.retainCopies')" prop="retainCopies">
|
||||
<el-input-number
|
||||
:min="1"
|
||||
:max="30"
|
||||
v-model.number="dialogData.rowData!.retainCopies"
|
||||
></el-input-number>
|
||||
</el-form-item>
|
||||
</div>
|
||||
|
||||
<el-form-item v-if="dialogData.rowData!.type === 'curl'" :label="$t('cronjob.url') + 'URL'" prop="url">
|
||||
<el-input style="width: 100%" clearable v-model="dialogData.rowData!.url" />
|
||||
|
@ -143,7 +161,7 @@
|
|||
</template>
|
||||
|
||||
<script lang="ts" setup>
|
||||
import { onMounted, reactive, ref } from 'vue';
|
||||
import { reactive, ref } from 'vue';
|
||||
import { Rules } from '@/global/form-rules';
|
||||
import { loadBackupName } from '@/views/setting/helper';
|
||||
import FileList from '@/components/file-list/index.vue';
|
||||
|
@ -152,6 +170,7 @@ import i18n from '@/lang';
|
|||
import { ElForm, ElMessage } from 'element-plus';
|
||||
import { Cronjob } from '@/api/interface/cronjob';
|
||||
import { addCronjob, editCronjob } from '@/api/modules/cronjob';
|
||||
import { listDBByVersion, loadVersions } from '@/api/modules/database';
|
||||
|
||||
interface DialogProps {
|
||||
title: string;
|
||||
|
@ -167,8 +186,14 @@ const acceptParams = (params: DialogProps): void => {
|
|||
dialogData.value = params;
|
||||
title.value = i18n.global.t('commons.button.' + dialogData.value.title);
|
||||
cronjobVisiable.value = true;
|
||||
loadRunningOptions();
|
||||
loadBackups();
|
||||
};
|
||||
|
||||
const mysqlVersionOptions = ref();
|
||||
const dbOptions = ref();
|
||||
const localDirID = ref();
|
||||
|
||||
const websiteOptions = ref([
|
||||
{ label: '所有', value: 'all' },
|
||||
{ label: '网站1', value: 'web1' },
|
||||
|
@ -263,6 +288,7 @@ const rules = reactive({
|
|||
script: [Rules.requiredInput],
|
||||
website: [Rules.requiredSelect],
|
||||
database: [Rules.requiredSelect],
|
||||
dbName: [Rules.requiredSelect],
|
||||
url: [Rules.requiredInput],
|
||||
sourceDir: [Rules.requiredSelect],
|
||||
targetDirID: [Rules.requiredSelect, Rules.number],
|
||||
|
@ -280,9 +306,30 @@ const loadBackups = async () => {
|
|||
const res = await getBackupList();
|
||||
backupOptions.value = [];
|
||||
for (const item of res.data) {
|
||||
if (item.type === 'LOCAL') {
|
||||
localDirID.value = item.id;
|
||||
}
|
||||
backupOptions.value.push({ label: loadBackupName(item.type), value: item.id });
|
||||
}
|
||||
};
|
||||
|
||||
const loadRunningOptions = async () => {
|
||||
const res = await loadVersions();
|
||||
mysqlVersionOptions.value = res.data;
|
||||
if (mysqlVersionOptions.value.length != 0) {
|
||||
dialogData.value.rowData!.database = mysqlVersionOptions.value[0];
|
||||
changeDBVersion();
|
||||
}
|
||||
};
|
||||
const changeDBVersion = async () => {
|
||||
dialogData.value.rowData!.dbName = '';
|
||||
const res = await listDBByVersion(dialogData.value.rowData!.database);
|
||||
dbOptions.value = res.data;
|
||||
if (dbOptions.value.length != 0) {
|
||||
dialogData.value.rowData!.dbName = dbOptions.value[0];
|
||||
}
|
||||
};
|
||||
|
||||
function isBackup() {
|
||||
return (
|
||||
dialogData.value.rowData!.type === 'website' ||
|
||||
|
@ -328,9 +375,6 @@ const onSubmit = async (formEl: FormInstance | undefined) => {
|
|||
});
|
||||
};
|
||||
|
||||
onMounted(() => {
|
||||
loadBackups();
|
||||
});
|
||||
defineExpose({
|
||||
acceptParams,
|
||||
});
|
||||
|
|
|
@ -126,7 +126,7 @@
|
|||
<el-form-item :label="$t('cronjob.target')">
|
||||
{{ loadBackupName(dialogData.rowData!.targetDir) }}
|
||||
<el-button
|
||||
v-if="currentRecord?.records! !== 'errHandle'"
|
||||
v-if="currentRecord?.status! !== 'Failed'"
|
||||
type="primary"
|
||||
style="margin-left: 10px"
|
||||
link
|
||||
|
|
Loading…
Reference in New Issue