diff --git a/go.mod b/go.mod index 3fad4225..7fc142d4 100644 --- a/go.mod +++ b/go.mod @@ -3,6 +3,7 @@ module github.com/alist-org/alist/v3 go 1.21 require ( + github.com/Mikubill/gofakes3 v0.0.3-0.20230622102024-284c0f988700 github.com/SheltonZhu/115driver v1.0.22 github.com/Xhofe/go-cache v0.0.0-20220723083548-714439c8af9a github.com/Xhofe/rateg v0.0.0-20230728072201-251a4e1adad4 @@ -39,6 +40,7 @@ require ( github.com/meilisearch/meilisearch-go v0.26.1 github.com/minio/sio v0.3.0 github.com/natefinch/lumberjack v2.0.0+incompatible + github.com/ncw/swift/v2 v2.0.2 github.com/orzogc/fake115uploader v0.3.3-0.20230715111618-58f9eb76f831 github.com/pkg/errors v0.9.1 github.com/pkg/sftp v1.13.6 @@ -137,8 +139,8 @@ require ( github.com/jmespath/go-jmespath v0.4.0 // indirect github.com/josharian/intern v1.0.0 // indirect github.com/jzelinskie/whirlpool v0.0.0-20201016144138-0675e54bb004 // indirect - github.com/klauspost/compress v1.16.5 // indirect - github.com/klauspost/cpuid/v2 v2.2.5 // indirect + github.com/klauspost/compress v1.17.4 // indirect + github.com/klauspost/cpuid/v2 v2.2.6 // indirect github.com/kr/fs v0.1.0 // indirect github.com/leodido/go-urn v1.2.4 // indirect github.com/libp2p/go-buffer-pool v0.1.0 // indirect @@ -153,7 +155,7 @@ require ( github.com/mattn/go-runewidth v0.0.15 // indirect github.com/mattn/go-sqlite3 v1.14.15 // indirect github.com/matttproud/golang_protobuf_extensions v1.0.4 // indirect - github.com/minio/sha256-simd v1.0.0 // indirect + github.com/minio/sha256-simd v1.0.1 // indirect github.com/mitchellh/go-homedir v1.1.0 // indirect github.com/mitchellh/mapstructure v1.5.0 // indirect github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect @@ -172,7 +174,6 @@ require ( github.com/multiformats/go-multihash v0.2.3 // indirect github.com/multiformats/go-multistream v0.4.1 // indirect github.com/multiformats/go-varint v0.0.7 // indirect - github.com/ncw/swift/v2 v2.0.2 // indirect github.com/pelletier/go-toml/v2 v2.1.0 // indirect github.com/pierrec/lz4/v4 v4.1.18 // indirect github.com/pmezard/go-difflib v1.0.0 // indirect @@ -184,6 +185,8 @@ require ( github.com/prometheus/procfs v0.11.1 // indirect github.com/rfjakob/eme v1.1.2 // indirect github.com/rivo/uniseg v0.4.4 // indirect + github.com/ryszard/goskiplist v0.0.0-20150312221310-2dfbae5fcf46 // indirect + github.com/shabbyrobe/gocovmerge v0.0.0-20190829150210-3e036491d500 // indirect github.com/shirou/gopsutil/v3 v3.23.7 // indirect github.com/shoenig/go-m1cpu v0.1.6 // indirect github.com/skip2/go-qrcode v0.0.0-20200617195104-da1b6568686e // indirect @@ -201,10 +204,11 @@ require ( github.com/yusufpapurcu/wmi v1.2.3 // indirect go.etcd.io/bbolt v1.3.7 // indirect golang.org/x/arch v0.5.0 // indirect - golang.org/x/sync v0.3.0 // indirect + golang.org/x/sync v0.5.0 // indirect golang.org/x/sys v0.16.0 // indirect golang.org/x/term v0.16.0 // indirect golang.org/x/text v0.14.0 // indirect + golang.org/x/tools v0.16.0 // indirect google.golang.org/api v0.134.0 // indirect google.golang.org/genproto/googleapis/rpc v0.0.0-20230803162519-f966b187b2e5 // indirect google.golang.org/grpc v1.57.0 // indirect diff --git a/go.sum b/go.sum index c1442c3b..008dfa92 100644 --- a/go.sum +++ b/go.sum @@ -7,6 +7,8 @@ github.com/BurntSushi/toml v0.3.1 h1:WXkYYl6Yr3qBf1K79EBnL4mak0OimBfB0XUf9Vl28OQ github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= github.com/Max-Sum/base32768 v0.0.0-20230304063302-18e6ce5945fd h1:nzE1YQBdx1bq9IlZinHa+HVffy+NmVRoKr+wHN8fpLE= github.com/Max-Sum/base32768 v0.0.0-20230304063302-18e6ce5945fd/go.mod h1:C8yoIfvESpM3GD07OCHU7fqI7lhwyZ2Td1rbNbTAhnc= +github.com/Mikubill/gofakes3 v0.0.3-0.20230622102024-284c0f988700 h1:r3fp2/Ro+0RtpjNY0/wsbN7vRmCW//dXTOZDQTct25Q= +github.com/Mikubill/gofakes3 v0.0.3-0.20230622102024-284c0f988700/go.mod h1:OSXqXEGUe9CmPiwLMMnVrbXonMf4BeLBkBdLufxxiyY= github.com/RoaringBitmap/roaring v1.2.3 h1:yqreLINqIrX22ErkKI0vY47/ivtJr6n+kMhVOVmhWBY= github.com/RoaringBitmap/roaring v1.2.3/go.mod h1:plvDsJQpxOC5bw8LRteu/MLWHsHez/3y6cubLI4/1yE= github.com/SheltonZhu/115driver v1.0.22 h1:Wp8pN7/gK3YwEO5P18ggbIOHM++lo9eP/pBhuvXfI6U= @@ -32,8 +34,6 @@ github.com/andybalholm/brotli v1.0.4/go.mod h1:fO7iG3H7G2nSZ7m0zPUDn85XEX2GTukHG github.com/avast/retry-go v3.0.0+incompatible h1:4SOWQ7Qs+oroOTQOYnAHqelpCO0biHSxpiH9JdtuBj0= github.com/avast/retry-go v3.0.0+incompatible/go.mod h1:XtSnn+n/sHqQIpZ10K1qAevBhOOCWBLXXy3hyiqqBrY= github.com/aws/aws-sdk-go v1.38.20/go.mod h1:hcU610XS61/+aQV88ixoOzUoG7v3b31pl2zKMmprdro= -github.com/aws/aws-sdk-go v1.49.18 h1:g/iMXkfXeJQ7MvnLwroxWsTTNkHtdVJGxIgrAIEG62M= -github.com/aws/aws-sdk-go v1.49.18/go.mod h1:LF8svs817+Nz+DmiMQKTO3ubZ/6IaTpq3TjupRn3Eqk= github.com/aws/aws-sdk-go v1.50.24 h1:3o2Pg7mOoVL0jv54vWtuafoZqAeEXLhm1tltWA2GcEw= github.com/aws/aws-sdk-go v1.50.24/go.mod h1:LF8svs817+Nz+DmiMQKTO3ubZ/6IaTpq3TjupRn3Eqk= github.com/aymanbagabas/go-osc52/v2 v2.0.1 h1:HwpRHbFMcZLEVr42D4p7XBqjyuxQH5SMiErDT4WkJ2k= @@ -262,12 +262,11 @@ github.com/kisielk/errcheck v1.2.0/go.mod h1:/BMXB+zMLi60iA8Vv6Ksmxu/1UDYcXs4uQL github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= github.com/klauspost/compress v1.15.0/go.mod h1:/3/Vjq9QcHkK5uEr5lBEmyoZ1iFhe47etQ6QUkpK6sk= github.com/klauspost/compress v1.15.6/go.mod h1:PhcZ0MbTNciWF3rruxRgKxI5NkcHHrHUDtV4Yw2GlzU= -github.com/klauspost/compress v1.16.5 h1:IFV2oUNUzZaz+XyusxpLzpzS8Pt5rh0Z16For/djlyI= -github.com/klauspost/compress v1.16.5/go.mod h1:ntbaceVETuRiXiv4DpjP66DpAtAGkEQskQzEyD//IeE= -github.com/klauspost/cpuid/v2 v2.0.4/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg= +github.com/klauspost/compress v1.17.4 h1:Ej5ixsIri7BrIjBkRZLTo6ghwrEtHFk7ijlczPW4fZ4= +github.com/klauspost/compress v1.17.4/go.mod h1:/dCuZOvVtNoHsyb+cuJD3itjs3NbnF6KH9zAO4BDxPM= github.com/klauspost/cpuid/v2 v2.0.9/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg= -github.com/klauspost/cpuid/v2 v2.2.5 h1:0E5MSMDEoAulmXNFquVs//DdoomxaoTY1kUhbc/qbZg= -github.com/klauspost/cpuid/v2 v2.2.5/go.mod h1:Lcz8mBdAVJIBVzewtcLocK12l3Y+JytZYpaMropDUws= +github.com/klauspost/cpuid/v2 v2.2.6 h1:ndNyv040zDGIDh8thGkXYjnFtiN02M1PVVF+JE/48xc= +github.com/klauspost/cpuid/v2 v2.2.6/go.mod h1:Lcz8mBdAVJIBVzewtcLocK12l3Y+JytZYpaMropDUws= github.com/knz/go-libedit v1.10.1/go.mod h1:MZTVkCWyz0oBc7JOWP3wNAzd002ZbM/5hgShxwh4x8M= github.com/kr/fs v0.1.0 h1:Jskdu9ieNAYnjxsi0LbQp1ulIKZV1LAFgK1tWhpZgl8= github.com/kr/fs v0.1.0/go.mod h1:FFnZGqtBN9Gxj7eW1uZ42v5BccTP0vu6NEaFoC2HwRg= @@ -315,8 +314,8 @@ github.com/matttproud/golang_protobuf_extensions v1.0.4 h1:mmDVorXM7PCGKw94cs5zk github.com/matttproud/golang_protobuf_extensions v1.0.4/go.mod h1:BSXmuO+STAnVfrANrmjBb36TMTDstsz7MSK+HVaYKv4= github.com/meilisearch/meilisearch-go v0.26.1 h1:3bmo2uLijX7kvBmiZ9LupVfC95TFcRJDgrRTzbOoE4A= github.com/meilisearch/meilisearch-go v0.26.1/go.mod h1:SxuSqDcPBIykjWz1PX+KzsYzArNLSCadQodWs8extS0= -github.com/minio/sha256-simd v1.0.0 h1:v1ta+49hkWZyvaKwrQB8elexRqm6Y0aMLjCNsrYxo6g= -github.com/minio/sha256-simd v1.0.0/go.mod h1:OuYzVNI5vcoYIAmbIvHPl3N3jUzVedXbKy5RFepssQM= +github.com/minio/sha256-simd v1.0.1 h1:6kaan5IFmwTNynnKKpDHe6FWHohJOHhCPchzK49dzMM= +github.com/minio/sha256-simd v1.0.1/go.mod h1:Pz6AKMiUdngCLpeTL/RJY1M9rUuPMYujV5xJjtbRSN8= github.com/minio/sio v0.3.0 h1:syEFBewzOMOYVzSTFpp1MqpSZk8rUNbz8VIIc+PNzus= github.com/minio/sio v0.3.0/go.mod h1:8b0yPp2avGThviy/+OCJBI6OMpvxoUuiLvE6F1lebhw= github.com/mitchellh/go-homedir v1.1.0 h1:lukF9ziXFxDFPkA1vsr5zpc1XuPDn/wFntq5mG+4E0Y= @@ -408,6 +407,10 @@ github.com/rogpeppe/go-internal v1.8.0/go.mod h1:WmiCO8CzOY8rg0OYDC4/i/2WRWAB6po github.com/rogpeppe/go-internal v1.10.0 h1:TMyTOH3F/DB16zRVcYyreMH6GnZZrwQVAoYjRBZyWFQ= github.com/rogpeppe/go-internal v1.10.0/go.mod h1:UQnix2H7Ngw/k4C5ijL5+65zddjncjaFoBhdsK/akog= github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= +github.com/ryszard/goskiplist v0.0.0-20150312221310-2dfbae5fcf46 h1:GHRpF1pTW19a8tTFrMLUcfWwyC0pnifVo2ClaLq+hP8= +github.com/ryszard/goskiplist v0.0.0-20150312221310-2dfbae5fcf46/go.mod h1:uAQ5PCi+MFsC7HjREoAz1BU+Mq60+05gifQSsHSDG/8= +github.com/shabbyrobe/gocovmerge v0.0.0-20190829150210-3e036491d500 h1:WnNuhiq+FOY3jNj6JXFT+eLN3CQ/oPIsDPRanvwsmbI= +github.com/shabbyrobe/gocovmerge v0.0.0-20190829150210-3e036491d500/go.mod h1:+njLrG5wSeoG4Ds61rFgEzKvenR2UHbjMoDHsczxly0= github.com/shirou/gopsutil/v3 v3.23.7 h1:C+fHO8hfIppoJ1WdsVm1RoI0RwXoNdfTK7yWXV0wVj4= github.com/shirou/gopsutil/v3 v3.23.7/go.mod h1:c4gnmoRC0hQuaLqvxnx1//VXQ0Ms/X9UnJF8pddY5z4= github.com/shoenig/go-m1cpu v0.1.6 h1:nxdKQNcEB6vzgA2E2bvzKIYRuNj7XNJ4S/aRSwKzFtM= @@ -531,8 +534,8 @@ golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJ golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.3.0 h1:ftCYgMx6zT/asHUrPw8BLLscYtGznsLAnjq5RH9P66E= -golang.org/x/sync v0.3.0/go.mod h1:FU7BRWz2tNW+3quACPkgCx/L+uEAv1htQ0V83Z9Rj+Y= +golang.org/x/sync v0.5.0 h1:60k92dhOjHxJkrqnwsfl8KuaHbn/5dl0lUPUklKo3qE= +golang.org/x/sync v0.5.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190916202348-b4ddaad3f8a3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= @@ -589,9 +592,12 @@ golang.org/x/time v0.5.0 h1:o7cqy6amK/52YcAKIPlM3a+Fpj35zvRj2TP+e1xFSfk= golang.org/x/time v0.5.0/go.mod h1:3BpzKBy/shNhVucY/MWOyx10tF3SFh9QdLuxbVysPQM= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20181030221726-6c7e314b6563/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20190829051458-42f498d34c4d/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc= golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU= +golang.org/x/tools v0.16.0 h1:GO788SKMRunPIBCXiQyo2AaexLstOrVhuAL5YwsckQM= +golang.org/x/tools v0.16.0/go.mod h1:kYVVN6I1mBNoB1OX+noeBjbRk4IUEPa7JJ+TJMEooJ0= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= google.golang.org/api v0.134.0 h1:ktL4Goua+UBgoP1eL1/60LwZJqa1sIzkLmvoR3hR6Gw= diff --git a/internal/bootstrap/data/setting.go b/internal/bootstrap/data/setting.go index 4fd99ca2..8adc713d 100644 --- a/internal/bootstrap/data/setting.go +++ b/internal/bootstrap/data/setting.go @@ -176,6 +176,12 @@ func InitialSettings() []model.SettingItem { {Key: conf.LdapDefaultDir, Value: "/", Type: conf.TypeString, Group: model.LDAP, Flag: model.PRIVATE}, {Key: conf.LdapDefaultPermission, Value: "0", Type: conf.TypeNumber, Group: model.LDAP, Flag: model.PRIVATE}, {Key: conf.LdapLoginTips, Value: "login with ldap", Type: conf.TypeString, Group: model.LDAP, Flag: model.PUBLIC}, + + //s3 settings + {Key: conf.S3Enabled, Value: "false", Type: conf.TypeBool, Group: model.S3, Flag: model.PRIVATE}, + {Key: conf.S3AccessKeyId, Value: "", Type: conf.TypeString, Group: model.S3, Flag: model.PRIVATE}, + {Key: conf.S3SecretAccessKey, Value: "", Type: conf.TypeString, Group: model.S3, Flag: model.PRIVATE}, + {Key: conf.S3Buckets, Value: "[]", Type: conf.TypeString, Group: model.S3, Flag: model.PRIVATE}, } initialSettingItems = append(initialSettingItems, tool.Tools.Items()...) if flags.Dev { diff --git a/internal/conf/const.go b/internal/conf/const.go index 5ffdef2b..a5d95e5d 100644 --- a/internal/conf/const.go +++ b/internal/conf/const.go @@ -84,6 +84,12 @@ const ( LdapDefaultDir = "ldap_default_dir" LdapLoginTips = "ldap_login_tips" + //s3 + S3Enabled = "s3_enabled" + S3AccessKeyId = "s3_access_key_id" + S3SecretAccessKey = "s3_secret_access_key" + S3Buckets = "s3_buckets" + // qbittorrent QbittorrentUrl = "qbittorrent_url" QbittorrentSeedtime = "qbittorrent_seedtime" diff --git a/internal/model/setting.go b/internal/model/setting.go index b561ad6b..1a47cf5c 100644 --- a/internal/model/setting.go +++ b/internal/model/setting.go @@ -10,6 +10,7 @@ const ( INDEX SSO LDAP + S3 ) const ( diff --git a/server/router.go b/server/router.go index 1421f665..b0b66294 100644 --- a/server/router.go +++ b/server/router.go @@ -36,6 +36,7 @@ func Init(e *gin.Engine) { g.Use(middlewares.MaxAllowed(conf.Conf.MaxConnections)) } WebDav(g.Group("/dav")) + S3(g.Group("/s3")) g.GET("/d/*path", middlewares.Down, handles.Down) g.GET("/p/*path", middlewares.Down, handles.Proxy) diff --git a/server/s3.go b/server/s3.go new file mode 100644 index 00000000..5a70cf2a --- /dev/null +++ b/server/s3.go @@ -0,0 +1,29 @@ +package server + +import ( + "context" + "path" + "strings" + + "github.com/alist-org/alist/v3/internal/conf" + "github.com/alist-org/alist/v3/internal/setting" + "github.com/alist-org/alist/v3/server/common" + "github.com/alist-org/alist/v3/server/s3" + "github.com/gin-gonic/gin" +) + +func S3(g *gin.RouterGroup) { + if !setting.GetBool(conf.S3Enabled) { + g.Any("/*path", func(c *gin.Context) { + common.ErrorStrResp(c, "S3 server is not enabled", 403) + }) + return + } + h, _ := s3.NewServer(context.Background(), []string{setting.GetStr(conf.S3AccessKeyId) + "," + setting.GetStr(conf.S3SecretAccessKey)}) + + g.Any("/*path", func(c *gin.Context) { + adjustedPath := strings.TrimPrefix(c.Request.URL.Path, path.Join(conf.URL.Path, "/s3")) + c.Request.URL.Path = adjustedPath + gin.WrapH(h)(c) + }) +} diff --git a/server/s3/backend.go b/server/s3/backend.go new file mode 100644 index 00000000..c7340525 --- /dev/null +++ b/server/s3/backend.go @@ -0,0 +1,432 @@ +// Credits: https://pkg.go.dev/github.com/rclone/rclone@v1.65.2/cmd/serve/s3 +// Package s3 implements a fake s3 server for alist +package s3 + +import ( + "context" + "encoding/hex" + "fmt" + "io" + "path" + "strings" + "sync" + "time" + + "github.com/Mikubill/gofakes3" + "github.com/alist-org/alist/v3/internal/errs" + "github.com/alist-org/alist/v3/internal/fs" + "github.com/alist-org/alist/v3/internal/model" + "github.com/alist-org/alist/v3/internal/op" + "github.com/alist-org/alist/v3/internal/stream" + "github.com/alist-org/alist/v3/pkg/http_range" + "github.com/alist-org/alist/v3/pkg/utils" + "github.com/ncw/swift/v2" +) + +var ( + emptyPrefix = &gofakes3.Prefix{} + timeFormat = "Mon, 2 Jan 2006 15:04:05.999999999 GMT" +) + +// s3Backend implements the gofacess3.Backend interface to make an S3 +// backend for gofakes3 +type s3Backend struct { + meta *sync.Map +} + +// newBackend creates a new SimpleBucketBackend. +func newBackend() gofakes3.Backend { + return &s3Backend{ + meta: new(sync.Map), + } +} + +// ListBuckets always returns the default bucket. +func (b *s3Backend) ListBuckets() ([]gofakes3.BucketInfo, error) { + buckets, err := getAndParseBuckets() + if err != nil { + return nil, err + } + var response []gofakes3.BucketInfo + ctx := context.Background() + for _, b := range buckets { + node, _ := fs.Get(ctx, b.Path, &fs.GetArgs{}) + response = append(response, gofakes3.BucketInfo{ + // Name: gofakes3.URLEncode(b.Name), + Name: b.Name, + CreationDate: gofakes3.NewContentTime(node.ModTime()), + }) + } + return response, nil +} + +// ListBucket lists the objects in the given bucket. +func (b *s3Backend) ListBucket(bucketName string, prefix *gofakes3.Prefix, page gofakes3.ListBucketPage) (*gofakes3.ObjectList, error) { + bucket, err := getBucketByName(bucketName) + if err != nil { + return nil, err + } + bucketPath := bucket.Path + + if prefix == nil { + prefix = emptyPrefix + } + + // workaround + if strings.TrimSpace(prefix.Prefix) == "" { + prefix.HasPrefix = false + } + if strings.TrimSpace(prefix.Delimiter) == "" { + prefix.HasDelimiter = false + } + + response := gofakes3.NewObjectList() + path, remaining := prefixParser(prefix) + + err = b.entryListR(bucketPath, path, remaining, prefix.HasDelimiter, response) + if err == gofakes3.ErrNoSuchKey { + // AWS just returns an empty list + response = gofakes3.NewObjectList() + } else if err != nil { + return nil, err + } + + return b.pager(response, page) +} + +// HeadObject returns the fileinfo for the given object name. +// +// Note that the metadata is not supported yet. +func (b *s3Backend) HeadObject(bucketName, objectName string) (*gofakes3.Object, error) { + ctx := context.Background() + bucket, err := getBucketByName(bucketName) + if err != nil { + return nil, err + } + bucketPath := bucket.Path + + fp := path.Join(bucketPath, objectName) + fmeta, _ := op.GetNearestMeta(fp) + node, err := fs.Get(context.WithValue(ctx, "meta", fmeta), fp, &fs.GetArgs{}) + if err != nil { + return nil, gofakes3.KeyNotFound(objectName) + } + + if node.IsDir() { + return nil, gofakes3.KeyNotFound(objectName) + } + + size := node.GetSize() + // hash := getFileHashByte(fobj) + + meta := map[string]string{ + "Last-Modified": node.ModTime().Format(timeFormat), + "Content-Type": utils.GetMimeType(fp), + } + + if val, ok := b.meta.Load(fp); ok { + metaMap := val.(map[string]string) + for k, v := range metaMap { + meta[k] = v + } + } + + return &gofakes3.Object{ + Name: objectName, + // Hash: hash, + Metadata: meta, + Size: size, + Contents: noOpReadCloser{}, + }, nil +} + +// GetObject fetchs the object from the filesystem. +func (b *s3Backend) GetObject(bucketName, objectName string, rangeRequest *gofakes3.ObjectRangeRequest) (obj *gofakes3.Object, err error) { + ctx := context.Background() + bucket, err := getBucketByName(bucketName) + if err != nil { + return nil, err + } + bucketPath := bucket.Path + + fp := path.Join(bucketPath, objectName) + fmeta, _ := op.GetNearestMeta(fp) + node, err := fs.Get(context.WithValue(ctx, "meta", fmeta), fp, &fs.GetArgs{}) + if err != nil { + return nil, gofakes3.KeyNotFound(objectName) + } + + if node.IsDir() { + return nil, gofakes3.KeyNotFound(objectName) + } + + link, file, err := fs.Link(ctx, fp, model.LinkArgs{}) + if err != nil { + return nil, err + } + + size := file.GetSize() + rnge, err := rangeRequest.Range(size) + if err != nil { + return nil, err + } + + if link.RangeReadCloser == nil && link.MFile == nil && len(link.URL) == 0 { + return nil, fmt.Errorf("the remote storage driver need to be enhanced to support s3") + } + remoteFileSize := file.GetSize() + remoteClosers := utils.EmptyClosers() + rangeReaderFunc := func(ctx context.Context, start, length int64) (io.ReadCloser, error) { + if length >= 0 && start+length >= remoteFileSize { + length = -1 + } + rrc := link.RangeReadCloser + if len(link.URL) > 0 { + + rangedRemoteLink := &model.Link{ + URL: link.URL, + Header: link.Header, + } + var converted, err = stream.GetRangeReadCloserFromLink(remoteFileSize, rangedRemoteLink) + if err != nil { + return nil, err + } + rrc = converted + } + if rrc != nil { + remoteReader, err := rrc.RangeRead(ctx, http_range.Range{Start: start, Length: length}) + remoteClosers.AddClosers(rrc.GetClosers()) + if err != nil { + return nil, err + } + return remoteReader, nil + } + if link.MFile != nil { + _, err := link.MFile.Seek(start, io.SeekStart) + if err != nil { + return nil, err + } + //remoteClosers.Add(remoteLink.MFile) + //keep reuse same MFile and close at last. + remoteClosers.Add(link.MFile) + return io.NopCloser(link.MFile), nil + } + return nil, errs.NotSupport + } + + var rdr io.ReadCloser + if rnge != nil { + rdr, err = rangeReaderFunc(ctx, rnge.Start, rnge.Length) + if err != nil { + return nil, err + } + } else { + rdr, err = rangeReaderFunc(ctx, 0, -1) + if err != nil { + return nil, err + } + } + + meta := map[string]string{ + "Last-Modified": node.ModTime().Format(timeFormat), + "Content-Type": utils.GetMimeType(fp), + } + + if val, ok := b.meta.Load(fp); ok { + metaMap := val.(map[string]string) + for k, v := range metaMap { + meta[k] = v + } + } + + return &gofakes3.Object{ + // Name: gofakes3.URLEncode(objectName), + Name: objectName, + // Hash: "", + Metadata: meta, + Size: size, + Range: rnge, + Contents: rdr, + }, nil +} + +// TouchObject creates or updates meta on specified object. +func (b *s3Backend) TouchObject(fp string, meta map[string]string) (result gofakes3.PutObjectResult, err error) { + //TODO: implement + return result, gofakes3.ErrNotImplemented +} + +// PutObject creates or overwrites the object with the given name. +func (b *s3Backend) PutObject( + bucketName, objectName string, + meta map[string]string, + input io.Reader, size int64, +) (result gofakes3.PutObjectResult, err error) { + ctx := context.Background() + bucket, err := getBucketByName(bucketName) + if err != nil { + return result, err + } + bucketPath := bucket.Path + + fp := path.Join(bucketPath, objectName) + reqPath := path.Dir(fp) + fmeta, _ := op.GetNearestMeta(fp) + _, err = fs.Get(context.WithValue(ctx, "meta", fmeta), reqPath, &fs.GetArgs{}) + if err != nil { + return result, gofakes3.KeyNotFound(objectName) + } + + var ti time.Time + + if val, ok := meta["X-Amz-Meta-Mtime"]; ok { + ti, _ = swift.FloatStringToTime(val) + } + + if val, ok := meta["mtime"]; ok { + ti, _ = swift.FloatStringToTime(val) + } + + obj := model.Object{ + Name: path.Base(fp), + Size: size, + Modified: ti, + Ctime: time.Now(), + } + stream := &stream.FileStream{ + Obj: &obj, + Reader: input, + Mimetype: meta["Content-Type"], + } + + err = fs.PutDirectly(ctx, path.Dir(reqPath), stream) + if err != nil { + return result, err + } + + if err := stream.Close(); err != nil { + // remove file when close error occurred (FsPutErr) + _ = fs.Remove(ctx, fp) + return result, err + } + + b.meta.Store(fp, meta) + + return result, nil +} + +// DeleteMulti deletes multiple objects in a single request. +func (b *s3Backend) DeleteMulti(bucketName string, objects ...string) (result gofakes3.MultiDeleteResult, rerr error) { + for _, object := range objects { + if err := b.deleteObject(bucketName, object); err != nil { + utils.Log.Errorf("serve s3", "delete object failed: %v", err) + result.Error = append(result.Error, gofakes3.ErrorResult{ + Code: gofakes3.ErrInternal, + Message: gofakes3.ErrInternal.Message(), + Key: object, + }) + } else { + result.Deleted = append(result.Deleted, gofakes3.ObjectID{ + Key: object, + }) + } + } + + return result, nil +} + +// DeleteObject deletes the object with the given name. +func (b *s3Backend) DeleteObject(bucketName, objectName string) (result gofakes3.ObjectDeleteResult, rerr error) { + return result, b.deleteObject(bucketName, objectName) +} + +// deleteObject deletes the object from the filesystem. +func (b *s3Backend) deleteObject(bucketName, objectName string) error { + ctx := context.Background() + bucket, err := getBucketByName(bucketName) + if err != nil { + return err + } + bucketPath := bucket.Path + + fp := path.Join(bucketPath, objectName) + fmeta, _ := op.GetNearestMeta(fp) + // S3 does not report an error when attemping to delete a key that does not exist, so + // we need to skip IsNotExist errors. + if _, err := fs.Get(context.WithValue(ctx, "meta", fmeta), fp, &fs.GetArgs{}); err != nil && !errs.IsObjectNotFound(err) { + return err + } + + fs.Remove(ctx, fp) + return nil +} + +// CreateBucket creates a new bucket. +func (b *s3Backend) CreateBucket(name string) error { + return gofakes3.ErrNotImplemented +} + +// DeleteBucket deletes the bucket with the given name. +func (b *s3Backend) DeleteBucket(name string) error { + return gofakes3.ErrNotImplemented +} + +// BucketExists checks if the bucket exists. +func (b *s3Backend) BucketExists(name string) (exists bool, err error) { + buckets, err := getAndParseBuckets() + if err != nil { + return false, err + } + for _, b := range buckets { + if b.Name == name { + return true, nil + } + } + return false, nil +} + +// CopyObject copy specified object from srcKey to dstKey. +func (b *s3Backend) CopyObject(srcBucket, srcKey, dstBucket, dstKey string, meta map[string]string) (result gofakes3.CopyObjectResult, err error) { + if srcBucket == dstBucket && srcKey == dstKey { + //TODO: update meta + return result, nil + } + + ctx := context.Background() + srcB, err := getBucketByName(srcBucket) + if err != nil { + return result, err + } + srcBucketPath := srcB.Path + + srcFp := path.Join(srcBucketPath, srcKey) + fmeta, _ := op.GetNearestMeta(srcFp) + srcNode, err := fs.Get(context.WithValue(ctx, "meta", fmeta), srcFp, &fs.GetArgs{}) + + c, err := b.GetObject(srcBucket, srcKey, nil) + if err != nil { + return + } + defer func() { + _ = c.Contents.Close() + }() + + for k, v := range c.Metadata { + if _, found := meta[k]; !found && k != "X-Amz-Acl" { + meta[k] = v + } + } + if _, ok := meta["mtime"]; !ok { + meta["mtime"] = swift.TimeToFloatString(srcNode.ModTime()) + } + + _, err = b.PutObject(dstBucket, dstKey, meta, c.Contents, c.Size) + if err != nil { + return + } + + return gofakes3.CopyObjectResult{ + ETag: `"` + hex.EncodeToString(c.Hash) + `"`, + LastModified: gofakes3.NewContentTime(srcNode.ModTime()), + }, nil +} diff --git a/server/s3/ioutils.go b/server/s3/ioutils.go new file mode 100644 index 00000000..6b49cacc --- /dev/null +++ b/server/s3/ioutils.go @@ -0,0 +1,36 @@ +// Credits: https://pkg.go.dev/github.com/rclone/rclone@v1.65.2/cmd/serve/s3 +// Package s3 implements a fake s3 server for alist +package s3 + +import "io" + +type noOpReadCloser struct{} + +type readerWithCloser struct { + io.Reader + closer func() error +} + +var _ io.ReadCloser = &readerWithCloser{} + +func (d noOpReadCloser) Read(b []byte) (n int, err error) { + return 0, io.EOF +} + +func (d noOpReadCloser) Close() error { + return nil +} + +func limitReadCloser(rdr io.Reader, closer func() error, sz int64) io.ReadCloser { + return &readerWithCloser{ + Reader: io.LimitReader(rdr, sz), + closer: closer, + } +} + +func (rwc *readerWithCloser) Close() error { + if rwc.closer != nil { + return rwc.closer() + } + return nil +} diff --git a/server/s3/list.go b/server/s3/list.go new file mode 100644 index 00000000..bce870ca --- /dev/null +++ b/server/s3/list.go @@ -0,0 +1,53 @@ +// Credits: https://pkg.go.dev/github.com/rclone/rclone@v1.65.2/cmd/serve/s3 +// Package s3 implements a fake s3 server for alist +package s3 + +import ( + "path" + "strings" + + "github.com/Mikubill/gofakes3" +) + +func (b *s3Backend) entryListR(bucket, fdPath, name string, addPrefix bool, response *gofakes3.ObjectList) error { + fp := path.Join(bucket, fdPath) + + dirEntries, err := getDirEntries(fp) + if err != nil { + return err + } + + for _, entry := range dirEntries { + object := entry.GetName() + + // workround for control-chars detect + objectPath := path.Join(fdPath, object) + + if !strings.HasPrefix(object, name) { + continue + } + + if entry.IsDir() { + if addPrefix { + // response.AddPrefix(gofakes3.URLEncode(objectPath)) + response.AddPrefix(objectPath) + continue + } + err := b.entryListR(bucket, path.Join(fdPath, object), "", false, response) + if err != nil { + return err + } + } else { + item := &gofakes3.Content{ + // Key: gofakes3.URLEncode(objectPath), + Key: objectPath, + LastModified: gofakes3.NewContentTime(entry.ModTime()), + ETag: getFileHash(entry), + Size: entry.GetSize(), + StorageClass: gofakes3.StorageStandard, + } + response.Add(item) + } + } + return nil +} diff --git a/server/s3/logger.go b/server/s3/logger.go new file mode 100644 index 00000000..7566fa8a --- /dev/null +++ b/server/s3/logger.go @@ -0,0 +1,27 @@ +// Credits: https://pkg.go.dev/github.com/rclone/rclone@v1.65.2/cmd/serve/s3 +// Package s3 implements a fake s3 server for alist +package s3 + +import ( + "fmt" + + "github.com/Mikubill/gofakes3" + "github.com/alist-org/alist/v3/pkg/utils" +) + +// logger output formatted message +type logger struct{} + +// print log message +func (l logger) Print(level gofakes3.LogLevel, v ...interface{}) { + switch level { + default: + fallthrough + case gofakes3.LogErr: + utils.Log.Errorf("serve s3: %s", fmt.Sprintln(v...)) + case gofakes3.LogWarn: + utils.Log.Infof("serve s3: %s", fmt.Sprintln(v...)) + case gofakes3.LogInfo: + utils.Log.Debugf("serve s3: %s", fmt.Sprintln(v...)) + } +} diff --git a/server/s3/pager.go b/server/s3/pager.go new file mode 100644 index 00000000..3268b0ca --- /dev/null +++ b/server/s3/pager.go @@ -0,0 +1,67 @@ +// Credits: https://pkg.go.dev/github.com/rclone/rclone@v1.65.2/cmd/serve/s3 +// Package s3 implements a fake s3 server for alist +package s3 + +import ( + "sort" + + "github.com/Mikubill/gofakes3" +) + +// pager splits the object list into smulitply pages. +func (db *s3Backend) pager(list *gofakes3.ObjectList, page gofakes3.ListBucketPage) (*gofakes3.ObjectList, error) { + // sort by alphabet + sort.Slice(list.CommonPrefixes, func(i, j int) bool { + return list.CommonPrefixes[i].Prefix < list.CommonPrefixes[j].Prefix + }) + // sort by modtime + sort.Slice(list.Contents, func(i, j int) bool { + return list.Contents[i].LastModified.Before(list.Contents[j].LastModified.Time) + }) + tokens := page.MaxKeys + if tokens == 0 { + tokens = 1000 + } + if page.HasMarker { + for i, obj := range list.Contents { + if obj.Key == page.Marker { + list.Contents = list.Contents[i+1:] + break + } + } + for i, obj := range list.CommonPrefixes { + if obj.Prefix == page.Marker { + list.CommonPrefixes = list.CommonPrefixes[i+1:] + break + } + } + } + + response := gofakes3.NewObjectList() + for _, obj := range list.CommonPrefixes { + if tokens <= 0 { + break + } + response.AddPrefix(obj.Prefix) + tokens-- + } + + for _, obj := range list.Contents { + if tokens <= 0 { + break + } + response.Add(obj) + tokens-- + } + + if len(list.CommonPrefixes)+len(list.Contents) > int(page.MaxKeys) { + response.IsTruncated = true + if len(response.Contents) > 0 { + response.NextMarker = response.Contents[len(response.Contents)-1].Key + } else { + response.NextMarker = response.CommonPrefixes[len(response.CommonPrefixes)-1].Prefix + } + } + + return response, nil +} diff --git a/server/s3/server.go b/server/s3/server.go new file mode 100644 index 00000000..2cb1f36d --- /dev/null +++ b/server/s3/server.go @@ -0,0 +1,27 @@ +// Credits: https://pkg.go.dev/github.com/rclone/rclone@v1.65.2/cmd/serve/s3 +// Package s3 implements a fake s3 server for alist +package s3 + +import ( + "context" + "math/rand" + "net/http" + + "github.com/Mikubill/gofakes3" +) + +// Make a new S3 Server to serve the remote +func NewServer(ctx context.Context, authpair []string) (h http.Handler, err error) { + var newLogger logger + faker := gofakes3.New( + newBackend(), + // gofakes3.WithHostBucket(!opt.pathBucketMode), + gofakes3.WithLogger(newLogger), + gofakes3.WithRequestID(rand.Uint64()), + gofakes3.WithoutVersioning(), + gofakes3.WithV4Auth(authlistResolver(authpair)), + gofakes3.WithIntegrityCheck(true), // Check Content-MD5 if supplied + ) + + return faker.Server(), nil +} diff --git a/server/s3/utils.go b/server/s3/utils.go new file mode 100644 index 00000000..88fab1ad --- /dev/null +++ b/server/s3/utils.go @@ -0,0 +1,164 @@ +// Credits: https://pkg.go.dev/github.com/rclone/rclone@v1.65.2/cmd/serve/s3 +// Package s3 implements a fake s3 server for alist +package s3 + +import ( + "context" + "encoding/json" + "fmt" + "strings" + + "github.com/Mikubill/gofakes3" + "github.com/alist-org/alist/v3/internal/conf" + "github.com/alist-org/alist/v3/internal/errs" + "github.com/alist-org/alist/v3/internal/fs" + "github.com/alist-org/alist/v3/internal/model" + "github.com/alist-org/alist/v3/internal/op" + "github.com/alist-org/alist/v3/internal/setting" + "github.com/alist-org/alist/v3/pkg/utils" +) + +type Bucket struct { + Name string `json:"name"` + Path string `json:"path"` +} + +func getAndParseBuckets() ([]Bucket, error) { + var res []Bucket + err := json.Unmarshal([]byte(setting.GetStr(conf.S3Buckets)), &res) + return res, err +} + +func getBucketByName(name string) (Bucket, error) { + buckets, err := getAndParseBuckets() + if err != nil { + return Bucket{}, err + } + for _, b := range buckets { + if b.Name == name { + return b, nil + } + } + return Bucket{}, gofakes3.BucketNotFound(name) +} + +func getDirEntries(path string) ([]model.Obj, error) { + ctx := context.Background() + meta, _ := op.GetNearestMeta(path) + fi, err := fs.Get(context.WithValue(ctx, "meta", meta), path, &fs.GetArgs{}) + if errs.IsNotFoundError(err) { + return nil, gofakes3.ErrNoSuchKey + } else if err != nil { + return nil, gofakes3.ErrNoSuchKey + } + + if !fi.IsDir() { + return nil, gofakes3.ErrNoSuchKey + } + + dirEntries, err := fs.List(context.WithValue(ctx, "meta", meta), path, &fs.ListArgs{}) + if err != nil { + return nil, err + } + + return dirEntries, nil +} + +// func getFileHashByte(node interface{}) []byte { +// b, err := hex.DecodeString(getFileHash(node)) +// if err != nil { +// return nil +// } +// return b +// } + +func getFileHash(node interface{}) string { + // var o fs.Object + + // switch b := node.(type) { + // case vfs.Node: + // fsObj, ok := b.DirEntry().(fs.Object) + // if !ok { + // fs.Debugf("serve s3", "File uploading - reading hash from VFS cache") + // in, err := b.Open(os.O_RDONLY) + // if err != nil { + // return "" + // } + // defer func() { + // _ = in.Close() + // }() + // h, err := hash.NewMultiHasherTypes(hash.NewHashSet(hash.MD5)) + // if err != nil { + // return "" + // } + // _, err = io.Copy(h, in) + // if err != nil { + // return "" + // } + // return h.Sums()[hash.MD5] + // } + // o = fsObj + // case fs.Object: + // o = b + // } + + // hash, err := o.Hash(context.Background(), hash.MD5) + // if err != nil { + // return "" + // } + // return hash + return "" +} + +func prefixParser(p *gofakes3.Prefix) (path, remaining string) { + idx := strings.LastIndexByte(p.Prefix, '/') + if idx < 0 { + return "", p.Prefix + } + return p.Prefix[:idx], p.Prefix[idx+1:] +} + +// // FIXME this could be implemented by VFS.MkdirAll() +// func mkdirRecursive(path string, VFS *vfs.VFS) error { +// path = strings.Trim(path, "/") +// dirs := strings.Split(path, "/") +// dir := "" +// for _, d := range dirs { +// dir += "/" + d +// if _, err := VFS.Stat(dir); err != nil { +// err := VFS.Mkdir(dir, 0777) +// if err != nil { +// return err +// } +// } +// } +// return nil +// } + +// func rmdirRecursive(p string, VFS *vfs.VFS) { +// dir := path.Dir(p) +// if !strings.ContainsAny(dir, "/\\") { +// // might be bucket(root) +// return +// } +// if _, err := VFS.Stat(dir); err == nil { +// err := VFS.Remove(dir) +// if err != nil { +// return +// } +// rmdirRecursive(dir, VFS) +// } +// } + +func authlistResolver(list []string) map[string]string { + authList := make(map[string]string) + for _, v := range list { + parts := strings.Split(v, ",") + if len(parts) != 2 { + utils.Log.Infof(fmt.Sprintf("Ignored: invalid auth pair %s", v)) + continue + } + authList[parts[0]] = parts[1] + } + return authList +}