Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

fix(archive): unable to preview #7843

Merged
merged 7 commits into from
Jan 27, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
31 changes: 21 additions & 10 deletions internal/archive/archives/archives.go
Original file line number Diff line number Diff line change
@@ -1,42 +1,53 @@
package archives

import (
"github.com/alist-org/alist/v3/internal/archive/tool"
"github.com/alist-org/alist/v3/internal/model"
"github.com/alist-org/alist/v3/internal/stream"
"github.com/alist-org/alist/v3/pkg/utils"
"io"
"io/fs"
"os"
stdpath "path"
"strings"

"github.com/alist-org/alist/v3/internal/archive/tool"
"github.com/alist-org/alist/v3/internal/model"
"github.com/alist-org/alist/v3/internal/stream"
"github.com/alist-org/alist/v3/pkg/utils"
)

type Archives struct {
}

func (_ *Archives) AcceptedExtensions() []string {
func (*Archives) AcceptedExtensions() []string {
return []string{
".br", ".bz2", ".gz", ".lz4", ".lz", ".sz", ".s2", ".xz", ".zz", ".zst", ".tar", ".rar", ".7z",
}
}

func (_ *Archives) GetMeta(ss *stream.SeekableStream, args model.ArchiveArgs) (model.ArchiveMeta, error) {
func (*Archives) GetMeta(ss *stream.SeekableStream, args model.ArchiveArgs) (model.ArchiveMeta, error) {
fsys, err := getFs(ss, args)
if err != nil {
return nil, err
}
_, err = fsys.ReadDir(".")
files, err := fsys.ReadDir(".")
if err != nil {
return nil, filterPassword(err)
}

tree := make([]model.ObjTree, 0, len(files))
for _, file := range files {
info, err := file.Info()
if err != nil {
continue
}
tree = append(tree, &model.ObjectTree{Object: *toModelObj(info)})
}
return &model.ArchiveMetaInfo{
Comment: "",
Encrypted: false,
Tree: tree,
}, nil
}

func (_ *Archives) List(ss *stream.SeekableStream, args model.ArchiveInnerArgs) ([]model.Obj, error) {
func (*Archives) List(ss *stream.SeekableStream, args model.ArchiveInnerArgs) ([]model.Obj, error) {
fsys, err := getFs(ss, args.ArchiveArgs)
if err != nil {
return nil, err
Expand All @@ -58,7 +69,7 @@ func (_ *Archives) List(ss *stream.SeekableStream, args model.ArchiveInnerArgs)
})
}

func (_ *Archives) Extract(ss *stream.SeekableStream, args model.ArchiveInnerArgs) (io.ReadCloser, int64, error) {
func (*Archives) Extract(ss *stream.SeekableStream, args model.ArchiveInnerArgs) (io.ReadCloser, int64, error) {
fsys, err := getFs(ss, args.ArchiveArgs)
if err != nil {
return nil, 0, err
Expand All @@ -74,7 +85,7 @@ func (_ *Archives) Extract(ss *stream.SeekableStream, args model.ArchiveInnerArg
return file, stat.Size(), nil
}

func (_ *Archives) Decompress(ss *stream.SeekableStream, outputPath string, args model.ArchiveInnerArgs, up model.UpdateProgress) error {
func (*Archives) Decompress(ss *stream.SeekableStream, outputPath string, args model.ArchiveInnerArgs, up model.UpdateProgress) error {
fsys, err := getFs(ss, args.ArchiveArgs)
if err != nil {
return err
Expand Down
12 changes: 8 additions & 4 deletions internal/archive/archives/utils.go
Original file line number Diff line number Diff line change
@@ -1,22 +1,26 @@
package archives

import (
"github.com/alist-org/alist/v3/internal/errs"
"github.com/alist-org/alist/v3/internal/model"
"github.com/alist-org/alist/v3/internal/stream"
"github.com/mholt/archives"
"io"
fs2 "io/fs"
"os"
stdpath "path"
"strings"

"github.com/alist-org/alist/v3/internal/errs"
"github.com/alist-org/alist/v3/internal/model"
"github.com/alist-org/alist/v3/internal/stream"
"github.com/mholt/archives"
)

func getFs(ss *stream.SeekableStream, args model.ArchiveArgs) (*archives.ArchiveFS, error) {
reader, err := stream.NewReadAtSeeker(ss, 0)
if err != nil {
return nil, err
}
if r, ok := reader.(*stream.RangeReadReadAtSeeker); ok {
r.InitHeadCache()
}
format, _, err := archives.Identify(ss.Ctx, ss.GetName(), reader)
if err != nil {
return nil, errs.UnknownArchiveFormat
Expand Down
102 changes: 88 additions & 14 deletions internal/archive/zip/zip.go
Original file line number Diff line number Diff line change
@@ -1,25 +1,26 @@
package zip

import (
"io"
"os"
stdpath "path"
"strings"

"github.com/alist-org/alist/v3/internal/archive/tool"
"github.com/alist-org/alist/v3/internal/errs"
"github.com/alist-org/alist/v3/internal/model"
"github.com/alist-org/alist/v3/internal/stream"
"github.com/yeka/zip"
"io"
"os"
stdpath "path"
"strings"
)

type Zip struct {
}

func (_ *Zip) AcceptedExtensions() []string {
func (*Zip) AcceptedExtensions() []string {
return []string{".zip"}
}

func (_ *Zip) GetMeta(ss *stream.SeekableStream, args model.ArchiveArgs) (model.ArchiveMeta, error) {
func (*Zip) GetMeta(ss *stream.SeekableStream, args model.ArchiveArgs) (model.ArchiveMeta, error) {
reader, err := stream.NewReadAtSeeker(ss, 0)
if err != nil {
return nil, err
Expand All @@ -29,19 +30,81 @@ func (_ *Zip) GetMeta(ss *stream.SeekableStream, args model.ArchiveArgs) (model.
return nil, err
}
encrypted := false
dirMap := make(map[string]*model.ObjectTree)
dirMap["."] = &model.ObjectTree{}
for _, file := range zipReader.File {
if file.IsEncrypted() {
encrypted = true
break
}

name := strings.TrimPrefix(decodeName(file.Name), "/")
var dir string
var dirObj *model.ObjectTree
isNewFolder := false
if !file.FileInfo().IsDir() {
// 先将 文件 添加到 所在的文件夹
dir = stdpath.Dir(name)
dirObj = dirMap[dir]
if dirObj == nil {
isNewFolder = true
dirObj = &model.ObjectTree{}
dirObj.IsFolder = true
dirObj.Name = stdpath.Base(dir)
dirObj.Modified = file.ModTime()
dirMap[dir] = dirObj
}
dirObj.Children = append(
dirObj.Children, &model.ObjectTree{
Object: *toModelObj(file.FileInfo()),
},
)
} else {
dir = strings.TrimSuffix(name, "/")
dirObj = dirMap[dir]
if dirObj == nil {
isNewFolder = true
dirObj = &model.ObjectTree{}
dirMap[dir] = dirObj
}
dirObj.IsFolder = true
dirObj.Name = stdpath.Base(dir)
dirObj.Modified = file.ModTime()
}
if isNewFolder {
// 将 文件夹 添加到 父文件夹
dir = stdpath.Dir(dir)
pDirObj := dirMap[dir]
if pDirObj != nil {
pDirObj.Children = append(pDirObj.Children, dirObj)
continue
}

for {
// 考虑压缩包仅记录文件的路径,不记录文件夹
pDirObj = &model.ObjectTree{}
pDirObj.IsFolder = true
pDirObj.Name = stdpath.Base(dir)
pDirObj.Modified = file.ModTime()
dirMap[dir] = pDirObj
pDirObj.Children = append(pDirObj.Children, dirObj)
dir = stdpath.Dir(dir)
if dirMap[dir] != nil {
break
}
dirObj = pDirObj
}
}
}

return &model.ArchiveMetaInfo{
Comment: zipReader.Comment,
Encrypted: encrypted,
Tree: dirMap["."].GetChildren(),
}, nil
}

func (_ *Zip) List(ss *stream.SeekableStream, args model.ArchiveInnerArgs) ([]model.Obj, error) {
func (*Zip) List(ss *stream.SeekableStream, args model.ArchiveInnerArgs) ([]model.Obj, error) {
reader, err := stream.NewReadAtSeeker(ss, 0)
if err != nil {
return nil, err
Expand All @@ -53,6 +116,7 @@ func (_ *Zip) List(ss *stream.SeekableStream, args model.ArchiveInnerArgs) ([]mo
if args.InnerPath == "/" {
ret := make([]model.Obj, 0)
passVerified := false
var dir *model.Object
for _, file := range zipReader.File {
if !passVerified && file.IsEncrypted() {
file.SetPassword(args.Password)
Expand All @@ -63,26 +127,36 @@ func (_ *Zip) List(ss *stream.SeekableStream, args model.ArchiveInnerArgs) ([]mo
_ = rc.Close()
passVerified = true
}
name := decodeName(file.Name)
if strings.Contains(strings.TrimSuffix(name, "/"), "/") {
name := strings.TrimSuffix(decodeName(file.Name), "/")
if strings.Contains(name, "/") {
// 有些压缩包不压缩第一个文件夹
strs := strings.Split(name, "/")
if dir == nil && len(strs) == 2 {
dir = &model.Object{
Name: strs[0],
Modified: ss.ModTime(),
IsFolder: true,
}
}
continue
}
ret = append(ret, toModelObj(file.FileInfo()))
}
if len(ret) == 0 && dir != nil {
ret = append(ret, dir)
}
return ret, nil
} else {
innerPath := strings.TrimPrefix(args.InnerPath, "/") + "/"
ret := make([]model.Obj, 0)
exist := false
for _, file := range zipReader.File {
name := decodeName(file.Name)
if name == innerPath {
exist = true
}
dir := stdpath.Dir(strings.TrimSuffix(name, "/")) + "/"
if dir != innerPath {
continue
}
exist = true
ret = append(ret, toModelObj(file.FileInfo()))
}
if !exist {
Expand All @@ -92,7 +166,7 @@ func (_ *Zip) List(ss *stream.SeekableStream, args model.ArchiveInnerArgs) ([]mo
}
}

func (_ *Zip) Extract(ss *stream.SeekableStream, args model.ArchiveInnerArgs) (io.ReadCloser, int64, error) {
func (*Zip) Extract(ss *stream.SeekableStream, args model.ArchiveInnerArgs) (io.ReadCloser, int64, error) {
reader, err := stream.NewReadAtSeeker(ss, 0)
if err != nil {
return nil, 0, err
Expand All @@ -117,7 +191,7 @@ func (_ *Zip) Extract(ss *stream.SeekableStream, args model.ArchiveInnerArgs) (i
return nil, 0, errs.ObjectNotFound
}

func (_ *Zip) Decompress(ss *stream.SeekableStream, outputPath string, args model.ArchiveInnerArgs, up model.UpdateProgress) error {
func (*Zip) Decompress(ss *stream.SeekableStream, outputPath string, args model.ArchiveInnerArgs, up model.UpdateProgress) error {
reader, err := stream.NewReadAtSeeker(ss, 0)
if err != nil {
return err
Expand Down
4 changes: 4 additions & 0 deletions internal/model/archive.go
Original file line number Diff line number Diff line change
@@ -1,5 +1,7 @@
package model

import "time"

type ObjTree interface {
Obj
GetChildren() []ObjTree
Expand Down Expand Up @@ -45,5 +47,7 @@ func (m *ArchiveMetaInfo) GetTree() []ObjTree {

type ArchiveMetaProvider struct {
ArchiveMeta
*Sort
DriverProviding bool
Expiration *time.Duration
}
43 changes: 33 additions & 10 deletions internal/op/archive.go
Original file line number Diff line number Diff line change
Expand Up @@ -3,13 +3,14 @@ package op
import (
"context"
stderrors "errors"
"github.com/alist-org/alist/v3/internal/archive/tool"
"github.com/alist-org/alist/v3/internal/stream"
"io"
stdpath "path"
"strings"
"time"

"github.com/alist-org/alist/v3/internal/archive/tool"
"github.com/alist-org/alist/v3/internal/stream"

"github.com/Xhofe/go-cache"
"github.com/alist-org/alist/v3/internal/driver"
"github.com/alist-org/alist/v3/internal/errs"
Expand Down Expand Up @@ -40,8 +41,8 @@ func GetArchiveMeta(ctx context.Context, storage driver.Driver, path string, arg
if err != nil {
return nil, errors.Wrapf(err, "failed to get %s archive met: %+v", path, err)
}
if !storage.Config().NoCache {
archiveMetaCache.Set(key, m, cache.WithEx[*model.ArchiveMetaProvider](time.Minute*time.Duration(storage.GetStorage().CacheExpiration)))
if m.Expiration != nil {
archiveMetaCache.Set(key, m, cache.WithEx[*model.ArchiveMetaProvider](*m.Expiration))
}
return m, nil
}
Expand Down Expand Up @@ -82,7 +83,15 @@ func getArchiveMeta(ctx context.Context, storage driver.Driver, path string, arg
}
meta, err := storageAr.GetArchiveMeta(ctx, obj, args.ArchiveArgs)
if !errors.Is(err, errs.NotImplement) {
return obj, &model.ArchiveMetaProvider{ArchiveMeta: meta, DriverProviding: true}, err
archiveMetaProvider := &model.ArchiveMetaProvider{ArchiveMeta: meta, DriverProviding: true}
if meta.GetTree() != nil {
archiveMetaProvider.Sort = &storage.GetStorage().Sort
}
if !storage.Config().NoCache {
Expiration := time.Minute * time.Duration(storage.GetStorage().CacheExpiration)
archiveMetaProvider.Expiration = &Expiration
}
return obj, archiveMetaProvider, err
}
}
obj, t, ss, err := getArchiveToolAndStream(ctx, storage, path, args.LinkArgs)
Expand All @@ -95,7 +104,21 @@ func getArchiveMeta(ctx context.Context, storage driver.Driver, path string, arg
}
}()
meta, err := t.GetMeta(ss, args.ArchiveArgs)
return obj, &model.ArchiveMetaProvider{ArchiveMeta: meta, DriverProviding: false}, err
if err != nil {
return nil, nil, err
}
archiveMetaProvider := &model.ArchiveMetaProvider{ArchiveMeta: meta, DriverProviding: false}
if meta.GetTree() != nil {
archiveMetaProvider.Sort = &storage.GetStorage().Sort
}
if !storage.Config().NoCache {
Expiration := time.Minute * time.Duration(storage.GetStorage().CacheExpiration)
archiveMetaProvider.Expiration = &Expiration
} else if ss.Link.MFile == nil {
// alias、crypt 驱动
archiveMetaProvider.Expiration = ss.Link.Expiration
}
return obj, archiveMetaProvider, err
}

var archiveListCache = cache.NewMemCache(cache.WithShards[[]model.Obj](64))
Expand All @@ -113,10 +136,10 @@ func ListArchive(ctx context.Context, storage driver.Driver, path string, args m
log.Debugf("use cache when list archive [%s]%s", path, args.InnerPath)
return files, nil
}
if meta, ok := archiveMetaCache.Get(metaKey); ok {
log.Debugf("use meta cache when list archive [%s]%s", path, args.InnerPath)
return getChildrenFromArchiveMeta(meta, args.InnerPath)
}
// if meta, ok := archiveMetaCache.Get(metaKey); ok {
// log.Debugf("use meta cache when list archive [%s]%s", path, args.InnerPath)
// return getChildrenFromArchiveMeta(meta, args.InnerPath)
// }
}
objs, err, _ := archiveListG.Do(key, func() ([]model.Obj, error) {
obj, files, err := listArchive(ctx, storage, path, args)
Expand Down
Loading
Loading