2019-01-05 22:44:33 +00:00
|
|
|
package http
|
|
|
|
|
|
|
|
import (
|
|
|
|
"errors"
|
2021-07-26 08:41:56 +00:00
|
|
|
"log"
|
2019-01-05 22:44:33 +00:00
|
|
|
"net/http"
|
|
|
|
"net/url"
|
2020-07-18 18:10:22 +00:00
|
|
|
gopath "path"
|
2019-01-05 22:44:33 +00:00
|
|
|
"path/filepath"
|
|
|
|
"strings"
|
|
|
|
|
2021-12-20 23:17:26 +00:00
|
|
|
"github.com/mholt/archiver/v3"
|
2021-07-26 10:00:05 +00:00
|
|
|
|
2020-05-31 23:12:36 +00:00
|
|
|
"github.com/filebrowser/filebrowser/v2/files"
|
2020-09-11 14:53:37 +00:00
|
|
|
"github.com/filebrowser/filebrowser/v2/fileutils"
|
2020-05-31 23:12:36 +00:00
|
|
|
"github.com/filebrowser/filebrowser/v2/users"
|
2019-01-05 22:44:33 +00:00
|
|
|
)
|
|
|
|
|
2020-07-18 18:10:22 +00:00
|
|
|
func slashClean(name string) string {
|
|
|
|
if name == "" || name[0] != '/' {
|
|
|
|
name = "/" + name
|
|
|
|
}
|
|
|
|
return gopath.Clean(name)
|
|
|
|
}
|
|
|
|
|
2020-05-31 23:12:36 +00:00
|
|
|
func parseQueryFiles(r *http.Request, f *files.FileInfo, _ *users.User) ([]string, error) {
|
|
|
|
var fileSlice []string
|
2019-01-05 22:44:33 +00:00
|
|
|
names := strings.Split(r.URL.Query().Get("files"), ",")
|
|
|
|
|
|
|
|
if len(names) == 0 {
|
2020-05-31 23:12:36 +00:00
|
|
|
fileSlice = append(fileSlice, f.Path)
|
2019-01-05 22:44:33 +00:00
|
|
|
} else {
|
|
|
|
for _, name := range names {
|
2021-03-03 23:10:08 +00:00
|
|
|
name, err := url.QueryUnescape(strings.Replace(name, "+", "%2B", -1)) //nolint:govet
|
2019-01-05 22:44:33 +00:00
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
2020-07-18 18:10:22 +00:00
|
|
|
name = slashClean(name)
|
2020-05-31 23:12:36 +00:00
|
|
|
fileSlice = append(fileSlice, filepath.Join(f.Path, name))
|
2019-01-05 22:44:33 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-05-31 23:12:36 +00:00
|
|
|
return fileSlice, nil
|
2019-01-05 22:44:33 +00:00
|
|
|
}
|
|
|
|
|
2023-02-16 08:11:12 +00:00
|
|
|
// nolint: goconst,nolintlint
|
2019-01-05 22:44:33 +00:00
|
|
|
func parseQueryAlgorithm(r *http.Request) (string, archiver.Writer, error) {
|
2020-05-31 23:12:36 +00:00
|
|
|
// TODO: use enum
|
2019-01-05 22:44:33 +00:00
|
|
|
switch r.URL.Query().Get("algo") {
|
|
|
|
case "zip", "true", "":
|
|
|
|
return ".zip", archiver.NewZip(), nil
|
|
|
|
case "tar":
|
|
|
|
return ".tar", archiver.NewTar(), nil
|
|
|
|
case "targz":
|
|
|
|
return ".tar.gz", archiver.NewTarGz(), nil
|
|
|
|
case "tarbz2":
|
|
|
|
return ".tar.bz2", archiver.NewTarBz2(), nil
|
|
|
|
case "tarxz":
|
|
|
|
return ".tar.xz", archiver.NewTarXz(), nil
|
|
|
|
case "tarlz4":
|
|
|
|
return ".tar.lz4", archiver.NewTarLz4(), nil
|
|
|
|
case "tarsz":
|
|
|
|
return ".tar.sz", archiver.NewTarSz(), nil
|
|
|
|
default:
|
|
|
|
return "", nil, errors.New("format not implemented")
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-06-25 07:37:13 +00:00
|
|
|
func setContentDisposition(w http.ResponseWriter, r *http.Request, file *files.FileInfo) {
|
|
|
|
if r.URL.Query().Get("inline") == "true" {
|
|
|
|
w.Header().Set("Content-Disposition", "inline")
|
|
|
|
} else {
|
|
|
|
// As per RFC6266 section 4.3
|
|
|
|
w.Header().Set("Content-Disposition", "attachment; filename*=utf-8''"+url.PathEscape(file.Name))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-01-05 22:44:33 +00:00
|
|
|
var rawHandler = withUser(func(w http.ResponseWriter, r *http.Request, d *data) (int, error) {
|
|
|
|
if !d.user.Perm.Download {
|
|
|
|
return http.StatusAccepted, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
file, err := files.NewFileInfo(files.FileOptions{
|
2021-01-07 10:30:17 +00:00
|
|
|
Fs: d.user.Fs,
|
|
|
|
Path: r.URL.Path,
|
|
|
|
Modify: d.user.Perm.Modify,
|
|
|
|
Expand: false,
|
|
|
|
ReadHeader: d.server.TypeDetectionByHeader,
|
|
|
|
Checker: d,
|
2019-01-05 22:44:33 +00:00
|
|
|
})
|
|
|
|
if err != nil {
|
|
|
|
return errToStatus(err), err
|
|
|
|
}
|
|
|
|
|
2020-11-24 10:32:23 +00:00
|
|
|
if files.IsNamedPipe(file.Mode) {
|
|
|
|
setContentDisposition(w, r, file)
|
|
|
|
return 0, nil
|
|
|
|
}
|
|
|
|
|
2019-01-05 22:44:33 +00:00
|
|
|
if !file.IsDir {
|
|
|
|
return rawFileHandler(w, r, file)
|
|
|
|
}
|
|
|
|
|
|
|
|
return rawDirHandler(w, r, d, file)
|
|
|
|
})
|
|
|
|
|
2020-09-11 14:53:37 +00:00
|
|
|
func addFile(ar archiver.Writer, d *data, path, commonPath string) error {
|
2019-01-05 22:44:33 +00:00
|
|
|
if !d.Check(path) {
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
|
|
|
info, err := d.user.Fs.Stat(path)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
2021-07-26 08:41:56 +00:00
|
|
|
if !info.IsDir() && !info.Mode().IsRegular() {
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
|
|
|
file, err := d.user.Fs.Open(path)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
2019-01-05 22:44:33 +00:00
|
|
|
}
|
2021-07-26 08:41:56 +00:00
|
|
|
defer file.Close()
|
2019-01-05 22:44:33 +00:00
|
|
|
|
2020-11-04 14:32:52 +00:00
|
|
|
if path != commonPath {
|
|
|
|
filename := strings.TrimPrefix(path, commonPath)
|
2021-03-09 15:54:54 +00:00
|
|
|
filename = strings.TrimPrefix(filename, string(filepath.Separator))
|
2020-11-04 14:32:52 +00:00
|
|
|
err = ar.Write(archiver.File{
|
|
|
|
FileInfo: archiver.FileInfo{
|
|
|
|
FileInfo: info,
|
|
|
|
CustomName: filename,
|
|
|
|
},
|
2021-07-26 08:41:56 +00:00
|
|
|
ReadCloser: file,
|
2020-11-04 14:32:52 +00:00
|
|
|
})
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
2019-01-05 22:44:33 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
if info.IsDir() {
|
|
|
|
names, err := file.Readdirnames(0)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
for _, name := range names {
|
2021-07-26 08:41:56 +00:00
|
|
|
fPath := filepath.Join(path, name)
|
|
|
|
err = addFile(ar, d, fPath, commonPath)
|
2019-01-05 22:44:33 +00:00
|
|
|
if err != nil {
|
2021-07-26 08:41:56 +00:00
|
|
|
log.Printf("Failed to archive %s: %v", fPath, err)
|
2019-01-05 22:44:33 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
|
|
|
func rawDirHandler(w http.ResponseWriter, r *http.Request, d *data, file *files.FileInfo) (int, error) {
|
|
|
|
filenames, err := parseQueryFiles(r, file, d.user)
|
|
|
|
if err != nil {
|
|
|
|
return http.StatusInternalServerError, err
|
|
|
|
}
|
|
|
|
|
|
|
|
extension, ar, err := parseQueryAlgorithm(r)
|
|
|
|
if err != nil {
|
|
|
|
return http.StatusInternalServerError, err
|
|
|
|
}
|
|
|
|
|
|
|
|
err = ar.Create(w)
|
|
|
|
if err != nil {
|
|
|
|
return http.StatusInternalServerError, err
|
|
|
|
}
|
|
|
|
defer ar.Close()
|
|
|
|
|
2021-03-09 15:54:54 +00:00
|
|
|
commonDir := fileutils.CommonPrefix(filepath.Separator, filenames...)
|
2020-09-11 14:53:37 +00:00
|
|
|
|
2021-03-26 14:45:18 +00:00
|
|
|
name := filepath.Base(commonDir)
|
|
|
|
if name == "." || name == "" || name == string(filepath.Separator) {
|
2021-03-12 15:52:52 +00:00
|
|
|
name = file.Name
|
|
|
|
}
|
2021-03-26 14:45:18 +00:00
|
|
|
// Prefix used to distinguish a filelist generated
|
|
|
|
// archive from the full directory archive
|
|
|
|
if len(filenames) > 1 {
|
|
|
|
name = "_" + name
|
2021-03-12 15:52:52 +00:00
|
|
|
}
|
|
|
|
name += extension
|
|
|
|
w.Header().Set("Content-Disposition", "attachment; filename*=utf-8''"+url.PathEscape(name))
|
|
|
|
|
2019-01-05 22:44:33 +00:00
|
|
|
for _, fname := range filenames {
|
2020-09-11 14:53:37 +00:00
|
|
|
err = addFile(ar, d, fname, commonDir)
|
2019-01-05 22:44:33 +00:00
|
|
|
if err != nil {
|
2021-07-26 08:41:56 +00:00
|
|
|
log.Printf("Failed to archive %s: %v", fname, err)
|
2019-01-05 22:44:33 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return 0, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
func rawFileHandler(w http.ResponseWriter, r *http.Request, file *files.FileInfo) (int, error) {
|
|
|
|
fd, err := file.Fs.Open(file.Path)
|
|
|
|
if err != nil {
|
|
|
|
return http.StatusInternalServerError, err
|
|
|
|
}
|
|
|
|
defer fd.Close()
|
|
|
|
|
2020-06-25 07:37:13 +00:00
|
|
|
setContentDisposition(w, r, file)
|
2019-01-05 22:44:33 +00:00
|
|
|
|
2021-08-06 12:31:39 +00:00
|
|
|
w.Header().Set("Cache-Control", "private")
|
2019-01-05 22:44:33 +00:00
|
|
|
http.ServeContent(w, r, file.Name, file.ModTime, fd)
|
|
|
|
return 0, nil
|
|
|
|
}
|