mirror of
https://github.com/seaweedfs/seaweedfs.git
synced 2025-10-21 04:57:24 +08:00
filer cipher: single chunk http POST and PUT and read
This commit is contained in:
@@ -3,8 +3,6 @@ package needle
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
"io/ioutil"
|
||||
"net/http"
|
||||
"strconv"
|
||||
"strings"
|
||||
@@ -12,7 +10,6 @@ import (
|
||||
|
||||
"github.com/chrislusf/seaweedfs/weed/images"
|
||||
. "github.com/chrislusf/seaweedfs/weed/storage/types"
|
||||
"github.com/chrislusf/seaweedfs/weed/util"
|
||||
)
|
||||
|
||||
const (
|
||||
@@ -51,67 +48,30 @@ func (n *Needle) String() (str string) {
|
||||
return
|
||||
}
|
||||
|
||||
func ParseUpload(r *http.Request, sizeLimit int64) (
|
||||
fileName string, data []byte, mimeType string, pairMap map[string]string, isGzipped bool, originalDataSize int,
|
||||
modifiedTime uint64, ttl *TTL, isChunkedFile bool, e error) {
|
||||
pairMap = make(map[string]string)
|
||||
for k, v := range r.Header {
|
||||
if len(v) > 0 && strings.HasPrefix(k, PairNamePrefix) {
|
||||
pairMap[k] = v[0]
|
||||
}
|
||||
}
|
||||
|
||||
if r.Method == "POST" {
|
||||
fileName, data, mimeType, isGzipped, originalDataSize, isChunkedFile, e = parseMultipart(r, sizeLimit)
|
||||
} else {
|
||||
isGzipped = r.Header.Get("Content-Encoding") == "gzip"
|
||||
mimeType = r.Header.Get("Content-Type")
|
||||
fileName = ""
|
||||
data, e = ioutil.ReadAll(io.LimitReader(r.Body, sizeLimit+1))
|
||||
originalDataSize = len(data)
|
||||
if e == io.EOF || int64(originalDataSize) == sizeLimit+1 {
|
||||
io.Copy(ioutil.Discard, r.Body)
|
||||
}
|
||||
r.Body.Close()
|
||||
if isGzipped {
|
||||
if unzipped, e := util.UnGzipData(data); e == nil {
|
||||
originalDataSize = len(unzipped)
|
||||
}
|
||||
} else if shouldGzip, _ := util.IsGzippableFileType("", mimeType); shouldGzip {
|
||||
if compressedData, err := util.GzipData(data); err == nil {
|
||||
data = compressedData
|
||||
isGzipped = true
|
||||
}
|
||||
}
|
||||
}
|
||||
if e != nil {
|
||||
return
|
||||
}
|
||||
|
||||
modifiedTime, _ = strconv.ParseUint(r.FormValue("ts"), 10, 64)
|
||||
ttl, _ = ReadTTL(r.FormValue("ttl"))
|
||||
|
||||
return
|
||||
}
|
||||
func CreateNeedleFromRequest(r *http.Request, fixJpgOrientation bool, sizeLimit int64) (n *Needle, originalSize int, e error) {
|
||||
var pairMap map[string]string
|
||||
fname, mimeType, isGzipped, isChunkedFile := "", "", false, false
|
||||
n = new(Needle)
|
||||
fname, n.Data, mimeType, pairMap, isGzipped, originalSize, n.LastModified, n.Ttl, isChunkedFile, e = ParseUpload(r, sizeLimit)
|
||||
pu, e := ParseUpload(r, sizeLimit)
|
||||
if e != nil {
|
||||
return
|
||||
}
|
||||
if len(fname) < 256 {
|
||||
n.Name = []byte(fname)
|
||||
n.Data = pu.Data
|
||||
originalSize = pu.OriginalDataSize
|
||||
n.LastModified = pu.ModifiedTime
|
||||
n.Ttl = pu.Ttl
|
||||
|
||||
|
||||
if len(pu.FileName) < 256 {
|
||||
n.Name = []byte(pu.FileName)
|
||||
n.SetHasName()
|
||||
}
|
||||
if len(mimeType) < 256 {
|
||||
n.Mime = []byte(mimeType)
|
||||
if len(pu.MimeType) < 256 {
|
||||
n.Mime = []byte(pu.MimeType)
|
||||
n.SetHasMime()
|
||||
}
|
||||
if len(pairMap) != 0 {
|
||||
if len(pu.PairMap) != 0 {
|
||||
trimmedPairMap := make(map[string]string)
|
||||
for k, v := range pairMap {
|
||||
for k, v := range pu.PairMap {
|
||||
trimmedPairMap[k[len(PairNamePrefix):]] = v
|
||||
}
|
||||
|
||||
@@ -122,7 +82,7 @@ func CreateNeedleFromRequest(r *http.Request, fixJpgOrientation bool, sizeLimit
|
||||
n.SetHasPairs()
|
||||
}
|
||||
}
|
||||
if isGzipped {
|
||||
if pu.IsGzipped {
|
||||
n.SetGzipped()
|
||||
}
|
||||
if n.LastModified == 0 {
|
||||
@@ -133,13 +93,13 @@ func CreateNeedleFromRequest(r *http.Request, fixJpgOrientation bool, sizeLimit
|
||||
n.SetHasTtl()
|
||||
}
|
||||
|
||||
if isChunkedFile {
|
||||
if pu.IsChunkedFile {
|
||||
n.SetIsChunkManifest()
|
||||
}
|
||||
|
||||
if fixJpgOrientation {
|
||||
loweredName := strings.ToLower(fname)
|
||||
if mimeType == "image/jpeg" || strings.HasSuffix(loweredName, ".jpg") || strings.HasSuffix(loweredName, ".jpeg") {
|
||||
loweredName := strings.ToLower(pu.FileName)
|
||||
if pu.MimeType == "image/jpeg" || strings.HasSuffix(loweredName, ".jpg") || strings.HasSuffix(loweredName, ".jpeg") {
|
||||
n.Data = images.FixJpgOrientation(n.Data)
|
||||
}
|
||||
}
|
||||
|
@@ -1,118 +0,0 @@
|
||||
package needle
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"io"
|
||||
"io/ioutil"
|
||||
"mime"
|
||||
"net/http"
|
||||
"path"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"github.com/chrislusf/seaweedfs/weed/glog"
|
||||
"github.com/chrislusf/seaweedfs/weed/util"
|
||||
)
|
||||
|
||||
func parseMultipart(r *http.Request, sizeLimit int64) (
|
||||
fileName string, data []byte, mimeType string, isGzipped bool, originalDataSize int, isChunkedFile bool, e error) {
|
||||
defer func() {
|
||||
if e != nil && r.Body != nil {
|
||||
io.Copy(ioutil.Discard, r.Body)
|
||||
r.Body.Close()
|
||||
}
|
||||
}()
|
||||
form, fe := r.MultipartReader()
|
||||
if fe != nil {
|
||||
glog.V(0).Infoln("MultipartReader [ERROR]", fe)
|
||||
e = fe
|
||||
return
|
||||
}
|
||||
|
||||
//first multi-part item
|
||||
part, fe := form.NextPart()
|
||||
if fe != nil {
|
||||
glog.V(0).Infoln("Reading Multi part [ERROR]", fe)
|
||||
e = fe
|
||||
return
|
||||
}
|
||||
|
||||
fileName = part.FileName()
|
||||
if fileName != "" {
|
||||
fileName = path.Base(fileName)
|
||||
}
|
||||
|
||||
data, e = ioutil.ReadAll(io.LimitReader(part, sizeLimit+1))
|
||||
if e != nil {
|
||||
glog.V(0).Infoln("Reading Content [ERROR]", e)
|
||||
return
|
||||
}
|
||||
if len(data) == int(sizeLimit)+1 {
|
||||
e = fmt.Errorf("file over the limited %d bytes", sizeLimit)
|
||||
return
|
||||
}
|
||||
|
||||
//if the filename is empty string, do a search on the other multi-part items
|
||||
for fileName == "" {
|
||||
part2, fe := form.NextPart()
|
||||
if fe != nil {
|
||||
break // no more or on error, just safely break
|
||||
}
|
||||
|
||||
fName := part2.FileName()
|
||||
|
||||
//found the first <file type> multi-part has filename
|
||||
if fName != "" {
|
||||
data2, fe2 := ioutil.ReadAll(io.LimitReader(part2, sizeLimit+1))
|
||||
if fe2 != nil {
|
||||
glog.V(0).Infoln("Reading Content [ERROR]", fe2)
|
||||
e = fe2
|
||||
return
|
||||
}
|
||||
if len(data) == int(sizeLimit)+1 {
|
||||
e = fmt.Errorf("file over the limited %d bytes", sizeLimit)
|
||||
return
|
||||
}
|
||||
|
||||
//update
|
||||
data = data2
|
||||
fileName = path.Base(fName)
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
originalDataSize = len(data)
|
||||
|
||||
isChunkedFile, _ = strconv.ParseBool(r.FormValue("cm"))
|
||||
|
||||
if !isChunkedFile {
|
||||
|
||||
dotIndex := strings.LastIndex(fileName, ".")
|
||||
ext, mtype := "", ""
|
||||
if dotIndex > 0 {
|
||||
ext = strings.ToLower(fileName[dotIndex:])
|
||||
mtype = mime.TypeByExtension(ext)
|
||||
}
|
||||
contentType := part.Header.Get("Content-Type")
|
||||
if contentType != "" && mtype != contentType {
|
||||
mimeType = contentType //only return mime type if not deductable
|
||||
mtype = contentType
|
||||
}
|
||||
|
||||
if part.Header.Get("Content-Encoding") == "gzip" {
|
||||
if unzipped, e := util.UnGzipData(data); e == nil {
|
||||
originalDataSize = len(unzipped)
|
||||
}
|
||||
isGzipped = true
|
||||
} else if util.IsGzippable(ext, mtype, data) {
|
||||
if compressedData, err := util.GzipData(data); err == nil {
|
||||
if len(data) > len(compressedData) {
|
||||
data = compressedData
|
||||
isGzipped = true
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return
|
||||
}
|
166
weed/storage/needle/needle_parse_upload.go
Normal file
166
weed/storage/needle/needle_parse_upload.go
Normal file
@@ -0,0 +1,166 @@
|
||||
package needle
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"io"
|
||||
"io/ioutil"
|
||||
"mime"
|
||||
"net/http"
|
||||
"path"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"github.com/chrislusf/seaweedfs/weed/glog"
|
||||
"github.com/chrislusf/seaweedfs/weed/util"
|
||||
)
|
||||
|
||||
type ParsedUpload struct {
|
||||
FileName string
|
||||
Data []byte
|
||||
MimeType string
|
||||
PairMap map[string]string
|
||||
IsGzipped bool
|
||||
OriginalDataSize int
|
||||
ModifiedTime uint64
|
||||
Ttl *TTL
|
||||
IsChunkedFile bool
|
||||
UncompressedData []byte
|
||||
}
|
||||
|
||||
func ParseUpload(r *http.Request, sizeLimit int64) (pu *ParsedUpload, e error) {
|
||||
pu = &ParsedUpload{}
|
||||
pu.PairMap = make(map[string]string)
|
||||
for k, v := range r.Header {
|
||||
if len(v) > 0 && strings.HasPrefix(k, PairNamePrefix) {
|
||||
pu.PairMap[k] = v[0]
|
||||
}
|
||||
}
|
||||
|
||||
if r.Method == "POST" {
|
||||
e = parseMultipart(r, sizeLimit, pu)
|
||||
} else {
|
||||
e = parsePut(r, sizeLimit, pu)
|
||||
}
|
||||
if e != nil {
|
||||
return
|
||||
}
|
||||
|
||||
pu.ModifiedTime, _ = strconv.ParseUint(r.FormValue("ts"), 10, 64)
|
||||
pu.Ttl, _ = ReadTTL(r.FormValue("ttl"))
|
||||
|
||||
pu.OriginalDataSize = len(pu.Data)
|
||||
pu.UncompressedData = pu.Data
|
||||
if pu.IsGzipped {
|
||||
if unzipped, e := util.UnGzipData(pu.Data); e == nil {
|
||||
pu.OriginalDataSize = len(unzipped)
|
||||
pu.UncompressedData = unzipped
|
||||
}
|
||||
} else if shouldGzip, _ := util.IsGzippableFileType("", pu.MimeType); shouldGzip {
|
||||
if compressedData, err := util.GzipData(pu.Data); err == nil {
|
||||
pu.Data = compressedData
|
||||
pu.IsGzipped = true
|
||||
}
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
func parsePut(r *http.Request, sizeLimit int64, pu *ParsedUpload) (e error) {
|
||||
pu.IsGzipped = r.Header.Get("Content-Encoding") == "gzip"
|
||||
pu.MimeType = r.Header.Get("Content-Type")
|
||||
pu.FileName = ""
|
||||
pu.Data, e = ioutil.ReadAll(io.LimitReader(r.Body, sizeLimit+1))
|
||||
if e == io.EOF || int64(pu.OriginalDataSize) == sizeLimit+1 {
|
||||
io.Copy(ioutil.Discard, r.Body)
|
||||
}
|
||||
r.Body.Close()
|
||||
return nil
|
||||
}
|
||||
|
||||
func parseMultipart(r *http.Request, sizeLimit int64, pu *ParsedUpload) (e error) {
|
||||
defer func() {
|
||||
if e != nil && r.Body != nil {
|
||||
io.Copy(ioutil.Discard, r.Body)
|
||||
r.Body.Close()
|
||||
}
|
||||
}()
|
||||
form, fe := r.MultipartReader()
|
||||
if fe != nil {
|
||||
glog.V(0).Infoln("MultipartReader [ERROR]", fe)
|
||||
e = fe
|
||||
return
|
||||
}
|
||||
|
||||
//first multi-part item
|
||||
part, fe := form.NextPart()
|
||||
if fe != nil {
|
||||
glog.V(0).Infoln("Reading Multi part [ERROR]", fe)
|
||||
e = fe
|
||||
return
|
||||
}
|
||||
|
||||
pu.FileName = part.FileName()
|
||||
if pu.FileName != "" {
|
||||
pu.FileName = path.Base(pu.FileName)
|
||||
}
|
||||
|
||||
pu.Data, e = ioutil.ReadAll(io.LimitReader(part, sizeLimit+1))
|
||||
if e != nil {
|
||||
glog.V(0).Infoln("Reading Content [ERROR]", e)
|
||||
return
|
||||
}
|
||||
if len(pu.Data) == int(sizeLimit)+1 {
|
||||
e = fmt.Errorf("file over the limited %d bytes", sizeLimit)
|
||||
return
|
||||
}
|
||||
|
||||
//if the filename is empty string, do a search on the other multi-part items
|
||||
for pu.FileName == "" {
|
||||
part2, fe := form.NextPart()
|
||||
if fe != nil {
|
||||
break // no more or on error, just safely break
|
||||
}
|
||||
|
||||
fName := part2.FileName()
|
||||
|
||||
//found the first <file type> multi-part has filename
|
||||
if fName != "" {
|
||||
data2, fe2 := ioutil.ReadAll(io.LimitReader(part2, sizeLimit+1))
|
||||
if fe2 != nil {
|
||||
glog.V(0).Infoln("Reading Content [ERROR]", fe2)
|
||||
e = fe2
|
||||
return
|
||||
}
|
||||
if len(data2) == int(sizeLimit)+1 {
|
||||
e = fmt.Errorf("file over the limited %d bytes", sizeLimit)
|
||||
return
|
||||
}
|
||||
|
||||
//update
|
||||
pu.Data = data2
|
||||
pu.FileName = path.Base(fName)
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
pu.IsChunkedFile, _ = strconv.ParseBool(r.FormValue("cm"))
|
||||
|
||||
if !pu.IsChunkedFile {
|
||||
|
||||
dotIndex := strings.LastIndex(pu.FileName, ".")
|
||||
ext, mtype := "", ""
|
||||
if dotIndex > 0 {
|
||||
ext = strings.ToLower(pu.FileName[dotIndex:])
|
||||
mtype = mime.TypeByExtension(ext)
|
||||
}
|
||||
contentType := part.Header.Get("Content-Type")
|
||||
if contentType != "" && contentType != "application/octet-stream" && mtype != contentType {
|
||||
pu.MimeType = contentType //only return mime type if not deductable
|
||||
mtype = contentType
|
||||
}
|
||||
|
||||
pu.IsGzipped = part.Header.Get("Content-Encoding") == "gzip"
|
||||
}
|
||||
|
||||
return
|
||||
}
|
Reference in New Issue
Block a user