feat(explorer): preview archive file content and extract selected files (#2852)

pull/2870/head
Aaron Liu 2025-09-02 11:54:04 +08:00
parent 4acf9401b8
commit 9f1cb52cfb
14 changed files with 329 additions and 56 deletions

View File

@ -3,7 +3,7 @@ package constants
// These values will be injected at build time, DO NOT EDIT.
// BackendVersion 当前后端版本号
var BackendVersion = "4.1.0"
var BackendVersion = "4.7.0"
// IsPro 是否为Pro版本
var IsPro = "false"

2
assets

@ -1 +1 @@
Subproject commit 35961604a187a49591fa57a50de8c0dad4bb5b78
Subproject commit 463794a71e6e19b9d4ee35248f00ff64f9485f30

View File

@ -279,6 +279,53 @@ type (
)
var patches = []Patch{
{
Name: "apply_default_archive_viewer",
EndVersion: "4.7.0",
Func: func(l logging.Logger, client *ent.Client, ctx context.Context) error {
fileViewersSetting, err := client.Setting.Query().Where(setting.Name("file_viewers")).First(ctx)
if err != nil {
return fmt.Errorf("failed to query file_viewers setting: %w", err)
}
var fileViewers []types.ViewerGroup
if err := json.Unmarshal([]byte(fileViewersSetting.Value), &fileViewers); err != nil {
return fmt.Errorf("failed to unmarshal file_viewers setting: %w", err)
}
fileViewerExisted := false
for _, viewer := range fileViewers[0].Viewers {
if viewer.ID == "archive" {
fileViewerExisted = true
break
}
}
// 2.2 If not existed, add it
if !fileViewerExisted {
// Found existing archive viewer default setting
var defaultArchiveViewer types.Viewer
for _, viewer := range defaultFileViewers[0].Viewers {
if viewer.ID == "archive" {
defaultArchiveViewer = viewer
break
}
}
fileViewers[0].Viewers = append(fileViewers[0].Viewers, defaultArchiveViewer)
newFileViewersSetting, err := json.Marshal(fileViewers)
if err != nil {
return fmt.Errorf("failed to marshal file_viewers setting: %w", err)
}
if _, err := client.Setting.UpdateOne(fileViewersSetting).SetValue(string(newFileViewersSetting)).Save(ctx); err != nil {
return fmt.Errorf("failed to update file_viewers setting: %w", err)
}
}
return nil
},
},
{
Name: "apply_default_excalidraw_viewer",
EndVersion: "4.1.0",

View File

@ -321,6 +321,15 @@ var (
},
},
},
{
ID: "archive",
Type: types.ViewerTypeBuiltin,
DisplayName: "fileManager.archivePreview",
Exts: []string{"zip", "7z"},
RequiredGroupPermission: []types.GroupPermission{
types.GroupPermissionArchiveTask,
},
},
},
},
}

View File

@ -305,6 +305,7 @@ type (
Disabled bool `json:"disabled,omitempty"`
Templates []NewFileTemplate `json:"templates,omitempty"`
Platform string `json:"platform,omitempty"`
RequiredGroupPermission []GroupPermission `json:"required_group_permission,omitempty"`
}
ViewerGroup struct {
Viewers []Viewer `json:"viewers"`

View File

@ -699,6 +699,8 @@ func LockSessionToContext(ctx context.Context, session LockSession) context.Cont
return context.WithValue(ctx, LockSessionCtxKey{}, session)
}
// FindDesiredEntity finds the desired entity from the file.
// entityType is optional, if it is not nil, it will only return the entity with the given type.
func FindDesiredEntity(file File, version string, hasher hashid.Encoder, entityType *types.EntityType) (bool, Entity) {
if version == "" {
return true, file.PrimaryEntity()

View File

@ -3,19 +3,95 @@ package manager
import (
"archive/zip"
"context"
"encoding/gob"
"fmt"
"io"
"path"
"path/filepath"
"strings"
"time"
"github.com/bodgit/sevenzip"
"github.com/cloudreve/Cloudreve/v4/inventory/types"
"github.com/cloudreve/Cloudreve/v4/pkg/filemanager/fs"
"github.com/cloudreve/Cloudreve/v4/pkg/filemanager/fs/dbfs"
"github.com/cloudreve/Cloudreve/v4/pkg/filemanager/manager/entitysource"
"github.com/cloudreve/Cloudreve/v4/pkg/util"
"golang.org/x/tools/container/intsets"
)
type (
ArchivedFile struct {
Name string `json:"name"`
Size int64 `json:"size"`
UpdatedAt *time.Time `json:"updated_at"`
IsDirectory bool `json:"is_directory"`
}
)
const (
ArchiveListCacheTTL = 3600 // 1 hour
)
func init() {
gob.Register([]ArchivedFile{})
}
func (m *manager) ListArchiveFiles(ctx context.Context, uri *fs.URI, entity string) ([]ArchivedFile, error) {
file, err := m.fs.Get(ctx, uri, dbfs.WithFileEntities(), dbfs.WithRequiredCapabilities(dbfs.NavigatorCapabilityDownloadFile))
if err != nil {
return nil, fmt.Errorf("failed to get file: %w", err)
}
if file.Type() != types.FileTypeFile {
return nil, fs.ErrNotSupportedAction.WithError(fmt.Errorf("path %s is not a file", uri))
}
// Validate file size
if m.user.Edges.Group.Settings.DecompressSize > 0 && file.Size() > m.user.Edges.Group.Settings.DecompressSize {
return nil, fs.ErrFileSizeTooBig.WithError(fmt.Errorf("file size %d exceeds the limit %d", file.Size(), m.user.Edges.Group.Settings.DecompressSize))
}
found, targetEntity := fs.FindDesiredEntity(file, entity, m.hasher, nil)
if !found {
return nil, fs.ErrEntityNotExist
}
cacheKey := getArchiveListCacheKey(targetEntity.ID())
kv := m.kv
res, found := kv.Get(cacheKey)
if found {
return res.([]ArchivedFile), nil
}
es, err := m.GetEntitySource(ctx, 0, fs.WithEntity(targetEntity))
if err != nil {
return nil, fmt.Errorf("failed to get entity source: %w", err)
}
es.Apply(entitysource.WithContext(ctx))
defer es.Close()
var readerFunc func(ctx context.Context, file io.ReaderAt, size int64) ([]ArchivedFile, error)
switch file.Ext() {
case "zip":
readerFunc = getZipFileList
case "7z":
readerFunc = get7zFileList
default:
return nil, fs.ErrNotSupportedAction.WithError(fmt.Errorf("not supported archive format: %s", file.Ext()))
}
sr := io.NewSectionReader(es, 0, targetEntity.Size())
fileList, err := readerFunc(ctx, sr, targetEntity.Size())
if err != nil {
return nil, fmt.Errorf("failed to read file list: %w", err)
}
kv.Set(cacheKey, fileList, ArchiveListCacheTTL)
return fileList, nil
}
func (m *manager) CreateArchive(ctx context.Context, uris []*fs.URI, writer io.Writer, opts ...fs.Option) (int, error) {
o := newOption()
for _, opt := range opts {
@ -122,3 +198,47 @@ func (m *manager) compressFileToArchive(ctx context.Context, parent string, file
return err
}
func getZipFileList(ctx context.Context, file io.ReaderAt, size int64) ([]ArchivedFile, error) {
zr, err := zip.NewReader(file, size)
if err != nil {
return nil, fmt.Errorf("failed to create zip reader: %w", err)
}
fileList := make([]ArchivedFile, 0, len(zr.File))
for _, f := range zr.File {
info := f.FileInfo()
modTime := info.ModTime()
fileList = append(fileList, ArchivedFile{
Name: util.FormSlash(f.Name),
Size: info.Size(),
UpdatedAt: &modTime,
IsDirectory: info.IsDir(),
})
}
return fileList, nil
}
func get7zFileList(ctx context.Context, file io.ReaderAt, size int64) ([]ArchivedFile, error) {
zr, err := sevenzip.NewReader(file, size)
if err != nil {
return nil, fmt.Errorf("failed to create 7z reader: %w", err)
}
fileList := make([]ArchivedFile, 0, len(zr.File))
for _, f := range zr.File {
info := f.FileInfo()
modTime := info.ModTime()
fileList = append(fileList, ArchivedFile{
Name: util.FormSlash(f.Name),
Size: info.Size(),
UpdatedAt: &modTime,
IsDirectory: info.IsDir(),
})
}
return fileList, nil
}
func getArchiveListCacheKey(entity int) string {
return fmt.Sprintf("archive_list_%d", entity)
}

View File

@ -85,7 +85,10 @@ type (
}
Archiver interface {
// CreateArchive creates an archive
CreateArchive(ctx context.Context, uris []*fs.URI, writer io.Writer, opts ...fs.Option) (int, error)
// ListArchiveFiles lists files in an archive
ListArchiveFiles(ctx context.Context, uri *fs.URI, entity string) ([]ArchivedFile, error)
}
FileManager interface {

View File

@ -55,6 +55,7 @@ type (
ProcessedCursor string `json:"processed_cursor,omitempty"`
SlaveTaskID int `json:"slave_task_id,omitempty"`
Password string `json:"password,omitempty"`
FileMask []string `json:"file_mask,omitempty"`
NodeState `json:",inline"`
Phase ExtractArchiveTaskPhase `json:"phase,omitempty"`
}
@ -119,13 +120,14 @@ var encodings = map[string]encoding.Encoding{
}
// NewExtractArchiveTask creates a new ExtractArchiveTask
func NewExtractArchiveTask(ctx context.Context, src, dst, encoding, password string) (queue.Task, error) {
func NewExtractArchiveTask(ctx context.Context, src, dst, encoding, password string, mask []string) (queue.Task, error) {
state := &ExtractArchiveTaskState{
Uri: src,
Dst: dst,
Encoding: encoding,
NodeState: NodeState{},
Password: password,
FileMask: mask,
}
stateBytes, err := json.Marshal(state)
if err != nil {
@ -247,6 +249,7 @@ func (m *ExtractArchiveTask) createSlaveExtractTask(ctx context.Context, dep dep
Dst: m.state.Dst,
UserID: user.ID,
Password: m.state.Password,
FileMask: m.state.FileMask,
}
payloadStr, err := json.Marshal(payload)
@ -416,6 +419,14 @@ func (m *ExtractArchiveTask) masterExtractArchive(ctx context.Context, dep depen
rawPath := util.FormSlash(f.NameInArchive)
savePath := dst.JoinRaw(rawPath)
// If file mask is not empty, check if the path is in the mask
if len(m.state.FileMask) > 0 && !isFileInMask(rawPath, m.state.FileMask) {
m.l.Warning("File %q is not in the mask, skipping...", f.NameInArchive)
atomic.AddInt64(&m.progress[ProgressTypeExtractCount].Current, 1)
atomic.AddInt64(&m.progress[ProgressTypeExtractSize].Current, f.Size())
return nil
}
// Check if path is legit
if !strings.HasPrefix(savePath.Path(), util.FillSlash(path.Clean(dst.Path()))) {
m.l.Warning("Path %q is not legit, skipping...", f.NameInArchive)
@ -599,6 +610,7 @@ type (
TempZipFilePath string `json:"temp_zip_file_path,omitempty"`
ProcessedCursor string `json:"processed_cursor,omitempty"`
Password string `json:"password,omitempty"`
FileMask []string `json:"file_mask,omitempty"`
}
)
@ -779,6 +791,12 @@ func (m *SlaveExtractArchiveTask) Do(ctx context.Context) (task.Status, error) {
rawPath := util.FormSlash(f.NameInArchive)
savePath := dst.JoinRaw(rawPath)
// If file mask is not empty, check if the path is in the mask
if len(m.state.FileMask) > 0 && !isFileInMask(rawPath, m.state.FileMask) {
m.l.Debug("File %q is not in the mask, skipping...", f.NameInArchive)
return nil
}
// Check if path is legit
if !strings.HasPrefix(savePath.Path(), util.FillSlash(path.Clean(dst.Path()))) {
atomic.AddInt64(&m.progress[ProgressTypeExtractCount].Current, 1)
@ -846,3 +864,17 @@ func (m *SlaveExtractArchiveTask) Progress(ctx context.Context) queue.Progresses
defer m.Unlock()
return m.progress
}
func isFileInMask(path string, mask []string) bool {
if len(mask) == 0 {
return true
}
for _, m := range mask {
if path == m || strings.HasPrefix(path, m+"/") {
return true
}
}
return false
}

View File

@ -412,3 +412,17 @@ func PatchView(c *gin.Context) {
c.JSON(200, serializer.Response{})
}
func ListArchiveFiles(c *gin.Context) {
service := ParametersFromContext[*explorer.ArchiveListFilesService](c, explorer.ArchiveListFilesParamCtx{})
resp, err := service.List(c)
if err != nil {
c.JSON(200, serializer.Err(c, err))
c.Abort()
return
}
c.JSON(200, serializer.Response{
Data: resp,
})
}

View File

@ -566,6 +566,10 @@ func initMasterRouter(dep dependency.Dep) *gin.Engine {
controllers.FromQuery[explorer.ListFileService](explorer.ListFileParameterCtx{}),
controllers.ListDirectory,
)
file.GET("archive",
controllers.FromQuery[explorer.ArchiveListFilesService](explorer.ArchiveListFilesParamCtx{}),
controllers.ListArchiveFiles,
)
// Create file
file.POST("create",
controllers.FromJSON[explorer.CreateFileService](explorer.CreateFileParameterCtx{}),

View File

@ -716,3 +716,33 @@ func (s *PatchViewService) Patch(c *gin.Context) error {
return nil
}
type (
ArchiveListFilesParamCtx struct{}
ArchiveListFilesService struct {
Uri string `form:"uri" binding:"required"`
Entity string `form:"entity"`
}
)
func (s *ArchiveListFilesService) List(c *gin.Context) (*ArchiveListFilesResponse, error) {
dep := dependency.FromContext(c)
user := inventory.UserFromContext(c)
m := manager.NewFileManager(dep, user)
defer m.Recycle()
if !user.Edges.Group.Permissions.Enabled(int(types.GroupPermissionArchiveTask)) {
return nil, serializer.NewError(serializer.CodeGroupNotAllowed, "Group not allowed to extract archive files", nil)
}
uri, err := fs.NewUriFromString(s.Uri)
if err != nil {
return nil, serializer.NewError(serializer.CodeParamErr, "unknown uri", err)
}
files, err := m.ListArchiveFiles(c, uri, s.Entity)
if err != nil {
return nil, fmt.Errorf("failed to list archive files: %w", err)
}
return BuildArchiveListFilesResponse(files), nil
}

View File

@ -26,6 +26,16 @@ import (
"github.com/samber/lo"
)
type ArchiveListFilesResponse struct {
Files []manager.ArchivedFile `json:"files"`
}
func BuildArchiveListFilesResponse(files []manager.ArchivedFile) *ArchiveListFilesResponse {
return &ArchiveListFilesResponse{
Files: files,
}
}
type PutRelativeResponse struct {
Name string
Url string

View File

@ -174,6 +174,7 @@ type (
Dst string `json:"dst" binding:"required"`
Encoding string `json:"encoding"`
Password string `json:"password"`
FileMask []string `json:"file_mask"`
}
CreateArchiveParamCtx struct{}
)
@ -204,7 +205,7 @@ func (service *ArchiveWorkflowService) CreateExtractTask(c *gin.Context) (*TaskR
}
// Create task
t, err := workflows.NewExtractArchiveTask(c, service.Src[0], service.Dst, service.Encoding, service.Password)
t, err := workflows.NewExtractArchiveTask(c, service.Src[0], service.Dst, service.Encoding, service.Password, service.FileMask)
if err != nil {
return nil, serializer.NewError(serializer.CodeCreateTaskError, "Failed to create task", err)
}