aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorChris Lu <chris.lu@gmail.com>2020-03-07 06:06:58 -0800
committerChris Lu <chris.lu@gmail.com>2020-03-07 06:08:08 -0800
commitea1169dc8021172a5d14e618b041efb56db98de5 (patch)
treec94b032402d2c7726b4992d3621f671d5758482c
parente3b8bf5588835621db05d4572d0960ac7c9d3976 (diff)
downloadseaweedfs-ea1169dc8021172a5d14e618b041efb56db98de5.tar.xz
seaweedfs-ea1169dc8021172a5d14e618b041efb56db98de5.zip
filer cipher: single chunk http POST and PUT and read
-rw-r--r--weed/operation/upload_content.go12
-rw-r--r--weed/server/common.go14
-rw-r--r--weed/server/filer_server_handlers_read.go11
-rw-r--r--weed/server/filer_server_handlers_write.go19
-rw-r--r--weed/server/filer_server_handlers_write_autochunk.go28
-rw-r--r--weed/server/filer_server_handlers_write_cipher.go103
-rw-r--r--weed/storage/needle/needle.go74
-rw-r--r--weed/storage/needle/needle_parse_multipart.go118
-rw-r--r--weed/storage/needle/needle_parse_upload.go166
-rw-r--r--weed/util/cipher.go21
10 files changed, 354 insertions, 212 deletions
diff --git a/weed/operation/upload_content.go b/weed/operation/upload_content.go
index ba15aea78..884933f18 100644
--- a/weed/operation/upload_content.go
+++ b/weed/operation/upload_content.go
@@ -69,18 +69,12 @@ func doUpload(uploadUrl string, filename string, cipher bool, reader io.Reader,
// encrypt data
var cipherKey util.CipherKey
var clearDataLen int
+ var err error
if cipher {
- clearData, err := ioutil.ReadAll(reader)
+ cipherKey, reader, clearDataLen, _, err = util.EncryptReader(reader)
if err != nil {
- return nil, fmt.Errorf("read raw input: %v", err)
+ return nil, err
}
- clearDataLen = len(clearData)
- cipherKey = util.GenCipherKey()
- encryptedData, err := util.Encrypt(clearData, cipherKey)
- if err != nil {
- return nil, fmt.Errorf("encrypt input: %v", err)
- }
- reader = bytes.NewReader(encryptedData)
}
// upload data
diff --git a/weed/server/common.go b/weed/server/common.go
index d7ab8d1ee..f88533c24 100644
--- a/weed/server/common.go
+++ b/weed/server/common.go
@@ -99,13 +99,13 @@ func submitForClientHandler(w http.ResponseWriter, r *http.Request, masterUrl st
}
debug("parsing upload file...")
- fname, data, mimeType, pairMap, isGzipped, originalDataSize, lastModified, _, _, pe := needle.ParseUpload(r, 256*1024*1024)
+ pu, pe := needle.ParseUpload(r, 256*1024*1024)
if pe != nil {
writeJsonError(w, r, http.StatusBadRequest, pe)
return
}
- debug("assigning file id for", fname)
+ debug("assigning file id for", pu.FileName)
r.ParseForm()
count := uint64(1)
if r.FormValue("count") != "" {
@@ -129,21 +129,21 @@ func submitForClientHandler(w http.ResponseWriter, r *http.Request, masterUrl st
}
url := "http://" + assignResult.Url + "/" + assignResult.Fid
- if lastModified != 0 {
- url = url + "?ts=" + strconv.FormatUint(lastModified, 10)
+ if pu.ModifiedTime != 0 {
+ url = url + "?ts=" + strconv.FormatUint(pu.ModifiedTime, 10)
}
debug("upload file to store", url)
- uploadResult, err := operation.Upload(url, fname, false, bytes.NewReader(data), isGzipped, mimeType, pairMap, assignResult.Auth)
+ uploadResult, err := operation.Upload(url, pu.FileName, false, bytes.NewReader(pu.Data), pu.IsGzipped, pu.MimeType, pu.PairMap, assignResult.Auth)
if err != nil {
writeJsonError(w, r, http.StatusInternalServerError, err)
return
}
- m["fileName"] = fname
+ m["fileName"] = pu.FileName
m["fid"] = assignResult.Fid
m["fileUrl"] = assignResult.PublicUrl + "/" + assignResult.Fid
- m["size"] = originalDataSize
+ m["size"] = pu.OriginalDataSize
m["eTag"] = uploadResult.ETag
writeJsonQuiet(w, r, http.StatusCreated, m)
return
diff --git a/weed/server/filer_server_handlers_read.go b/weed/server/filer_server_handlers_read.go
index aff2b9159..796fd9c1c 100644
--- a/weed/server/filer_server_handlers_read.go
+++ b/weed/server/filer_server_handlers_read.go
@@ -2,6 +2,7 @@ package weed_server
import (
"context"
+ "fmt"
"io"
"io/ioutil"
"mime"
@@ -14,7 +15,6 @@ import (
"github.com/chrislusf/seaweedfs/weed/filer2"
"github.com/chrislusf/seaweedfs/weed/glog"
- "github.com/chrislusf/seaweedfs/weed/pb/filer_pb"
"github.com/chrislusf/seaweedfs/weed/stats"
"github.com/chrislusf/seaweedfs/weed/util"
)
@@ -136,15 +136,16 @@ func (fs *FilerServer) handleSingleChunk(w http.ResponseWriter, r *http.Request,
if entry.Attr.Mime != "" {
w.Header().Set("Content-Type", entry.Attr.Mime)
}
- w.WriteHeader(resp.StatusCode)
if entry.Chunks[0].CipherKey == nil {
+ w.WriteHeader(resp.StatusCode)
io.Copy(w, resp.Body)
} else {
- fs.writeEncryptedChunk(w, resp, entry.Chunks[0])
+ fs.writeEncryptedChunk(w, resp, entry)
}
}
-func (fs *FilerServer) writeEncryptedChunk(w http.ResponseWriter, resp *http.Response, chunk *filer_pb.FileChunk) {
+func (fs *FilerServer) writeEncryptedChunk(w http.ResponseWriter, resp *http.Response, entry *filer2.Entry) {
+ chunk := entry.Chunks[0]
encryptedData, err := ioutil.ReadAll(resp.Body)
if err != nil {
glog.V(1).Infof("read encrypted %s failed, err: %v", chunk.FileId, err)
@@ -157,6 +158,8 @@ func (fs *FilerServer) writeEncryptedChunk(w http.ResponseWriter, resp *http.Res
w.WriteHeader(http.StatusNotFound)
return
}
+ w.Header().Set("Content-Length", fmt.Sprintf("%d", chunk.Size))
+ w.WriteHeader(resp.StatusCode)
w.Write(decryptedData)
}
diff --git a/weed/server/filer_server_handlers_write.go b/weed/server/filer_server_handlers_write.go
index bb5f28663..b36333447 100644
--- a/weed/server/filer_server_handlers_write.go
+++ b/weed/server/filer_server_handlers_write.go
@@ -90,10 +90,22 @@ func (fs *FilerServer) PostHandler(w http.ResponseWriter, r *http.Request) {
return
}
+ if fs.option.Cipher {
+ reply, err := fs.encrypt(ctx, w, r, replication, collection, dataCenter)
+ if err != nil {
+ writeJsonError(w, r, http.StatusInternalServerError, err)
+ } else if reply != nil {
+ writeJsonQuiet(w, r, http.StatusCreated, reply)
+ }
+
+ return
+ }
+
fileId, urlLocation, auth, err := fs.assignNewFileInfo(w, r, replication, collection, dataCenter)
if err != nil || fileId == "" || urlLocation == "" {
glog.V(0).Infof("fail to allocate volume for %s, collection:%s, datacenter:%s", r.URL.Path, collection, dataCenter)
+ writeJsonError(w, r, http.StatusInternalServerError, fmt.Errorf("fail to allocate volume for %s, collection:%s, datacenter:%s", r.URL.Path, collection, dataCenter))
return
}
@@ -134,7 +146,7 @@ func (fs *FilerServer) PostHandler(w http.ResponseWriter, r *http.Request) {
// update metadata in filer store
func (fs *FilerServer) updateFilerStore(ctx context.Context, r *http.Request, w http.ResponseWriter,
- replication string, collection string, ret operation.UploadResult, fileId string) (err error) {
+ replication string, collection string, ret *operation.UploadResult, fileId string) (err error) {
stats.FilerRequestCounter.WithLabelValues("postStoreWrite").Inc()
start := time.Now()
@@ -198,12 +210,14 @@ func (fs *FilerServer) updateFilerStore(ctx context.Context, r *http.Request, w
}
// send request to volume server
-func (fs *FilerServer) uploadToVolumeServer(r *http.Request, u *url.URL, auth security.EncodedJwt, w http.ResponseWriter, fileId string) (ret operation.UploadResult, err error) {
+func (fs *FilerServer) uploadToVolumeServer(r *http.Request, u *url.URL, auth security.EncodedJwt, w http.ResponseWriter, fileId string) (ret *operation.UploadResult, err error) {
stats.FilerRequestCounter.WithLabelValues("postUpload").Inc()
start := time.Now()
defer func() { stats.FilerRequestHistogram.WithLabelValues("postUpload").Observe(time.Since(start).Seconds()) }()
+ ret = &operation.UploadResult{}
+
request := &http.Request{
Method: r.Method,
URL: u,
@@ -215,6 +229,7 @@ func (fs *FilerServer) uploadToVolumeServer(r *http.Request, u *url.URL, auth se
Host: r.Host,
ContentLength: r.ContentLength,
}
+
if auth != "" {
request.Header.Set("Authorization", "BEARER "+string(auth))
}
diff --git a/weed/server/filer_server_handlers_write_autochunk.go b/weed/server/filer_server_handlers_write_autochunk.go
index c8eadf82a..1c7891353 100644
--- a/weed/server/filer_server_handlers_write_autochunk.go
+++ b/weed/server/filer_server_handlers_write_autochunk.go
@@ -103,33 +103,35 @@ func (fs *FilerServer) doAutoChunk(ctx context.Context, w http.ResponseWriter, r
// upload the chunk to the volume server
chunkName := fileName + "_chunk_" + strconv.FormatInt(int64(len(fileChunks)+1), 10)
- uploadedSize, uploadErr := fs.doUpload(urlLocation, w, r, limitedReader, chunkName, "", fileId, auth)
+ uploadResult, uploadErr := fs.doUpload(urlLocation, w, r, limitedReader, chunkName, "", fileId, auth)
if uploadErr != nil {
return nil, uploadErr
}
// if last chunk exhausted the reader exactly at the border
- if uploadedSize == 0 {
+ if uploadResult.Size == 0 {
break
}
// Save to chunk manifest structure
fileChunks = append(fileChunks,
&filer_pb.FileChunk{
- FileId: fileId,
- Offset: chunkOffset,
- Size: uint64(uploadedSize),
- Mtime: time.Now().UnixNano(),
+ FileId: fileId,
+ Offset: chunkOffset,
+ Size: uint64(uploadResult.Size),
+ Mtime: time.Now().UnixNano(),
+ ETag: uploadResult.ETag,
+ CipherKey: uploadResult.CipherKey,
},
)
- glog.V(4).Infof("uploaded %s chunk %d to %s [%d,%d) of %d", fileName, len(fileChunks), fileId, chunkOffset, chunkOffset+int64(uploadedSize), contentLength)
+ glog.V(4).Infof("uploaded %s chunk %d to %s [%d,%d) of %d", fileName, len(fileChunks), fileId, chunkOffset, chunkOffset+int64(uploadResult.Size), contentLength)
// reset variables for the next chunk
- chunkOffset = chunkOffset + int64(uploadedSize)
+ chunkOffset = chunkOffset + int64(uploadResult.Size)
// if last chunk was not at full chunk size, but already exhausted the reader
- if uploadedSize < int64(chunkSize) {
+ if int64(uploadResult.Size) < int64(chunkSize) {
break
}
}
@@ -174,7 +176,7 @@ func (fs *FilerServer) doAutoChunk(ctx context.Context, w http.ResponseWriter, r
}
func (fs *FilerServer) doUpload(urlLocation string, w http.ResponseWriter, r *http.Request,
- limitedReader io.Reader, fileName string, contentType string, fileId string, auth security.EncodedJwt) (size int64, err error) {
+ limitedReader io.Reader, fileName string, contentType string, fileId string, auth security.EncodedJwt) (*operation.UploadResult, error) {
stats.FilerRequestCounter.WithLabelValues("postAutoChunkUpload").Inc()
start := time.Now()
@@ -182,9 +184,5 @@ func (fs *FilerServer) doUpload(urlLocation string, w http.ResponseWriter, r *ht
stats.FilerRequestHistogram.WithLabelValues("postAutoChunkUpload").Observe(time.Since(start).Seconds())
}()
- uploadResult, uploadError := operation.Upload(urlLocation, fileName, fs.option.Cipher, limitedReader, false, contentType, nil, auth)
- if uploadError != nil {
- return 0, uploadError
- }
- return int64(uploadResult.Size), nil
+ return operation.Upload(urlLocation, fileName, fs.option.Cipher, limitedReader, false, contentType, nil, auth)
}
diff --git a/weed/server/filer_server_handlers_write_cipher.go b/weed/server/filer_server_handlers_write_cipher.go
new file mode 100644
index 000000000..e65915971
--- /dev/null
+++ b/weed/server/filer_server_handlers_write_cipher.go
@@ -0,0 +1,103 @@
+package weed_server
+
+import (
+ "bytes"
+ "context"
+ "fmt"
+ "net/http"
+ "strings"
+ "time"
+
+ "github.com/chrislusf/seaweedfs/weed/filer2"
+ "github.com/chrislusf/seaweedfs/weed/glog"
+ "github.com/chrislusf/seaweedfs/weed/operation"
+ "github.com/chrislusf/seaweedfs/weed/pb/filer_pb"
+ "github.com/chrislusf/seaweedfs/weed/storage/needle"
+ "github.com/chrislusf/seaweedfs/weed/util"
+)
+
+// handling single chunk POST or PUT upload
+func (fs *FilerServer) encrypt(ctx context.Context, w http.ResponseWriter, r *http.Request,
+ replication string, collection string, dataCenter string) (filerResult *FilerPostResult, err error) {
+
+ fileId, urlLocation, auth, err := fs.assignNewFileInfo(w, r, replication, collection, dataCenter)
+
+ if err != nil || fileId == "" || urlLocation == "" {
+ return nil, fmt.Errorf("fail to allocate volume for %s, collection:%s, datacenter:%s", r.URL.Path, collection, dataCenter)
+ }
+
+ glog.V(4).Infof("write %s to %v", r.URL.Path, urlLocation)
+
+ // Note: gzip(cipher(data)), cipher data first, then gzip
+
+ sizeLimit := int64(fs.option.MaxMB) * 1024 * 1024
+
+ pu, err := needle.ParseUpload(r, sizeLimit)
+ data := pu.Data
+ uncompressedData := pu.Data
+ cipherKey := util.GenCipherKey()
+ if pu.IsGzipped {
+ uncompressedData = pu.UncompressedData
+ data, err = util.Encrypt(pu.UncompressedData, cipherKey)
+ if err != nil {
+ return nil, fmt.Errorf("encrypt input: %v", err)
+ }
+ }
+ if pu.MimeType == "" {
+ pu.MimeType = http.DetectContentType(uncompressedData)
+ }
+
+ uploadResult, uploadError := operation.Upload(urlLocation, pu.FileName, true, bytes.NewReader(data), pu.IsGzipped, "", pu.PairMap, auth)
+ if uploadError != nil {
+ return nil, fmt.Errorf("upload to volume server: %v", uploadError)
+ }
+
+ // Save to chunk manifest structure
+ fileChunks := []*filer_pb.FileChunk{
+ {
+ FileId: fileId,
+ Offset: 0,
+ Size: uint64(uploadResult.Size),
+ Mtime: time.Now().UnixNano(),
+ ETag: uploadResult.ETag,
+ CipherKey: uploadResult.CipherKey,
+ },
+ }
+
+ path := r.URL.Path
+ if strings.HasSuffix(path, "/") {
+ if pu.FileName != "" {
+ path += pu.FileName
+ }
+ }
+
+ entry := &filer2.Entry{
+ FullPath: filer2.FullPath(path),
+ Attr: filer2.Attr{
+ Mtime: time.Now(),
+ Crtime: time.Now(),
+ Mode: 0660,
+ Uid: OS_UID,
+ Gid: OS_GID,
+ Replication: replication,
+ Collection: collection,
+ TtlSec: int32(util.ParseInt(r.URL.Query().Get("ttl"), 0)),
+ Mime: pu.MimeType,
+ },
+ Chunks: fileChunks,
+ }
+
+ filerResult = &FilerPostResult{
+ Name: pu.FileName,
+ Size: int64(pu.OriginalDataSize),
+ }
+
+ if dbErr := fs.filer.CreateEntry(ctx, entry, false); dbErr != nil {
+ fs.filer.DeleteChunks(entry.Chunks)
+ err = dbErr
+ filerResult.Error = dbErr.Error()
+ return
+ }
+
+ return
+}
diff --git a/weed/storage/needle/needle.go b/weed/storage/needle/needle.go
index 022e8bf14..f906062de 100644
--- a/weed/storage/needle/needle.go
+++ b/weed/storage/needle/needle.go
@@ -3,8 +3,6 @@ package needle
import (
"encoding/json"
"fmt"
- "io"
- "io/ioutil"
"net/http"
"strconv"
"strings"
@@ -12,7 +10,6 @@ import (
"github.com/chrislusf/seaweedfs/weed/images"
. "github.com/chrislusf/seaweedfs/weed/storage/types"
- "github.com/chrislusf/seaweedfs/weed/util"
)
const (
@@ -51,67 +48,30 @@ func (n *Needle) String() (str string) {
return
}
-func ParseUpload(r *http.Request, sizeLimit int64) (
- fileName string, data []byte, mimeType string, pairMap map[string]string, isGzipped bool, originalDataSize int,
- modifiedTime uint64, ttl *TTL, isChunkedFile bool, e error) {
- pairMap = make(map[string]string)
- for k, v := range r.Header {
- if len(v) > 0 && strings.HasPrefix(k, PairNamePrefix) {
- pairMap[k] = v[0]
- }
- }
-
- if r.Method == "POST" {
- fileName, data, mimeType, isGzipped, originalDataSize, isChunkedFile, e = parseMultipart(r, sizeLimit)
- } else {
- isGzipped = r.Header.Get("Content-Encoding") == "gzip"
- mimeType = r.Header.Get("Content-Type")
- fileName = ""
- data, e = ioutil.ReadAll(io.LimitReader(r.Body, sizeLimit+1))
- originalDataSize = len(data)
- if e == io.EOF || int64(originalDataSize) == sizeLimit+1 {
- io.Copy(ioutil.Discard, r.Body)
- }
- r.Body.Close()
- if isGzipped {
- if unzipped, e := util.UnGzipData(data); e == nil {
- originalDataSize = len(unzipped)
- }
- } else if shouldGzip, _ := util.IsGzippableFileType("", mimeType); shouldGzip {
- if compressedData, err := util.GzipData(data); err == nil {
- data = compressedData
- isGzipped = true
- }
- }
- }
- if e != nil {
- return
- }
- modifiedTime, _ = strconv.ParseUint(r.FormValue("ts"), 10, 64)
- ttl, _ = ReadTTL(r.FormValue("ttl"))
-
- return
-}
func CreateNeedleFromRequest(r *http.Request, fixJpgOrientation bool, sizeLimit int64) (n *Needle, originalSize int, e error) {
- var pairMap map[string]string
- fname, mimeType, isGzipped, isChunkedFile := "", "", false, false
n = new(Needle)
- fname, n.Data, mimeType, pairMap, isGzipped, originalSize, n.LastModified, n.Ttl, isChunkedFile, e = ParseUpload(r, sizeLimit)
+ pu, e := ParseUpload(r, sizeLimit)
if e != nil {
return
}
- if len(fname) < 256 {
- n.Name = []byte(fname)
+ n.Data = pu.Data
+ originalSize = pu.OriginalDataSize
+ n.LastModified = pu.ModifiedTime
+ n.Ttl = pu.Ttl
+
+
+ if len(pu.FileName) < 256 {
+ n.Name = []byte(pu.FileName)
n.SetHasName()
}
- if len(mimeType) < 256 {
- n.Mime = []byte(mimeType)
+ if len(pu.MimeType) < 256 {
+ n.Mime = []byte(pu.MimeType)
n.SetHasMime()
}
- if len(pairMap) != 0 {
+ if len(pu.PairMap) != 0 {
trimmedPairMap := make(map[string]string)
- for k, v := range pairMap {
+ for k, v := range pu.PairMap {
trimmedPairMap[k[len(PairNamePrefix):]] = v
}
@@ -122,7 +82,7 @@ func CreateNeedleFromRequest(r *http.Request, fixJpgOrientation bool, sizeLimit
n.SetHasPairs()
}
}
- if isGzipped {
+ if pu.IsGzipped {
n.SetGzipped()
}
if n.LastModified == 0 {
@@ -133,13 +93,13 @@ func CreateNeedleFromRequest(r *http.Request, fixJpgOrientation bool, sizeLimit
n.SetHasTtl()
}
- if isChunkedFile {
+ if pu.IsChunkedFile {
n.SetIsChunkManifest()
}
if fixJpgOrientation {
- loweredName := strings.ToLower(fname)
- if mimeType == "image/jpeg" || strings.HasSuffix(loweredName, ".jpg") || strings.HasSuffix(loweredName, ".jpeg") {
+ loweredName := strings.ToLower(pu.FileName)
+ if pu.MimeType == "image/jpeg" || strings.HasSuffix(loweredName, ".jpg") || strings.HasSuffix(loweredName, ".jpeg") {
n.Data = images.FixJpgOrientation(n.Data)
}
}
diff --git a/weed/storage/needle/needle_parse_multipart.go b/weed/storage/needle/needle_parse_multipart.go
deleted file mode 100644
index 8c9032f5f..000000000
--- a/weed/storage/needle/needle_parse_multipart.go
+++ /dev/null
@@ -1,118 +0,0 @@
-package needle
-
-import (
- "fmt"
- "io"
- "io/ioutil"
- "mime"
- "net/http"
- "path"
- "strconv"
- "strings"
-
- "github.com/chrislusf/seaweedfs/weed/glog"
- "github.com/chrislusf/seaweedfs/weed/util"
-)
-
-func parseMultipart(r *http.Request, sizeLimit int64) (
- fileName string, data []byte, mimeType string, isGzipped bool, originalDataSize int, isChunkedFile bool, e error) {
- defer func() {
- if e != nil && r.Body != nil {
- io.Copy(ioutil.Discard, r.Body)
- r.Body.Close()
- }
- }()
- form, fe := r.MultipartReader()
- if fe != nil {
- glog.V(0).Infoln("MultipartReader [ERROR]", fe)
- e = fe
- return
- }
-
- //first multi-part item
- part, fe := form.NextPart()
- if fe != nil {
- glog.V(0).Infoln("Reading Multi part [ERROR]", fe)
- e = fe
- return
- }
-
- fileName = part.FileName()
- if fileName != "" {
- fileName = path.Base(fileName)
- }
-
- data, e = ioutil.ReadAll(io.LimitReader(part, sizeLimit+1))
- if e != nil {
- glog.V(0).Infoln("Reading Content [ERROR]", e)
- return
- }
- if len(data) == int(sizeLimit)+1 {
- e = fmt.Errorf("file over the limited %d bytes", sizeLimit)
- return
- }
-
- //if the filename is empty string, do a search on the other multi-part items
- for fileName == "" {
- part2, fe := form.NextPart()
- if fe != nil {
- break // no more or on error, just safely break
- }
-
- fName := part2.FileName()
-
- //found the first <file type> multi-part has filename
- if fName != "" {
- data2, fe2 := ioutil.ReadAll(io.LimitReader(part2, sizeLimit+1))
- if fe2 != nil {
- glog.V(0).Infoln("Reading Content [ERROR]", fe2)
- e = fe2
- return
- }
- if len(data) == int(sizeLimit)+1 {
- e = fmt.Errorf("file over the limited %d bytes", sizeLimit)
- return
- }
-
- //update
- data = data2
- fileName = path.Base(fName)
- break
- }
- }
-
- originalDataSize = len(data)
-
- isChunkedFile, _ = strconv.ParseBool(r.FormValue("cm"))
-
- if !isChunkedFile {
-
- dotIndex := strings.LastIndex(fileName, ".")
- ext, mtype := "", ""
- if dotIndex > 0 {
- ext = strings.ToLower(fileName[dotIndex:])
- mtype = mime.TypeByExtension(ext)
- }
- contentType := part.Header.Get("Content-Type")
- if contentType != "" && mtype != contentType {
- mimeType = contentType //only return mime type if not deductable
- mtype = contentType
- }
-
- if part.Header.Get("Content-Encoding") == "gzip" {
- if unzipped, e := util.UnGzipData(data); e == nil {
- originalDataSize = len(unzipped)
- }
- isGzipped = true
- } else if util.IsGzippable(ext, mtype, data) {
- if compressedData, err := util.GzipData(data); err == nil {
- if len(data) > len(compressedData) {
- data = compressedData
- isGzipped = true
- }
- }
- }
- }
-
- return
-}
diff --git a/weed/storage/needle/needle_parse_upload.go b/weed/storage/needle/needle_parse_upload.go
new file mode 100644
index 000000000..85526aaa8
--- /dev/null
+++ b/weed/storage/needle/needle_parse_upload.go
@@ -0,0 +1,166 @@
+package needle
+
+import (
+ "fmt"
+ "io"
+ "io/ioutil"
+ "mime"
+ "net/http"
+ "path"
+ "strconv"
+ "strings"
+
+ "github.com/chrislusf/seaweedfs/weed/glog"
+ "github.com/chrislusf/seaweedfs/weed/util"
+)
+
+type ParsedUpload struct {
+ FileName string
+ Data []byte
+ MimeType string
+ PairMap map[string]string
+ IsGzipped bool
+ OriginalDataSize int
+ ModifiedTime uint64
+ Ttl *TTL
+ IsChunkedFile bool
+ UncompressedData []byte
+}
+
+func ParseUpload(r *http.Request, sizeLimit int64) (pu *ParsedUpload, e error) {
+ pu = &ParsedUpload{}
+ pu.PairMap = make(map[string]string)
+ for k, v := range r.Header {
+ if len(v) > 0 && strings.HasPrefix(k, PairNamePrefix) {
+ pu.PairMap[k] = v[0]
+ }
+ }
+
+ if r.Method == "POST" {
+ e = parseMultipart(r, sizeLimit, pu)
+ } else {
+ e = parsePut(r, sizeLimit, pu)
+ }
+ if e != nil {
+ return
+ }
+
+ pu.ModifiedTime, _ = strconv.ParseUint(r.FormValue("ts"), 10, 64)
+ pu.Ttl, _ = ReadTTL(r.FormValue("ttl"))
+
+ pu.OriginalDataSize = len(pu.Data)
+ pu.UncompressedData = pu.Data
+ if pu.IsGzipped {
+ if unzipped, e := util.UnGzipData(pu.Data); e == nil {
+ pu.OriginalDataSize = len(unzipped)
+ pu.UncompressedData = unzipped
+ }
+ } else if shouldGzip, _ := util.IsGzippableFileType("", pu.MimeType); shouldGzip {
+ if compressedData, err := util.GzipData(pu.Data); err == nil {
+ pu.Data = compressedData
+ pu.IsGzipped = true
+ }
+ }
+
+ return
+}
+
+func parsePut(r *http.Request, sizeLimit int64, pu *ParsedUpload) (e error) {
+ pu.IsGzipped = r.Header.Get("Content-Encoding") == "gzip"
+ pu.MimeType = r.Header.Get("Content-Type")
+ pu.FileName = ""
+ pu.Data, e = ioutil.ReadAll(io.LimitReader(r.Body, sizeLimit+1))
+ if e == io.EOF || int64(pu.OriginalDataSize) == sizeLimit+1 {
+ io.Copy(ioutil.Discard, r.Body)
+ }
+ r.Body.Close()
+ return nil
+}
+
+func parseMultipart(r *http.Request, sizeLimit int64, pu *ParsedUpload) (e error) {
+ defer func() {
+ if e != nil && r.Body != nil {
+ io.Copy(ioutil.Discard, r.Body)
+ r.Body.Close()
+ }
+ }()
+ form, fe := r.MultipartReader()
+ if fe != nil {
+ glog.V(0).Infoln("MultipartReader [ERROR]", fe)
+ e = fe
+ return
+ }
+
+ //first multi-part item
+ part, fe := form.NextPart()
+ if fe != nil {
+ glog.V(0).Infoln("Reading Multi part [ERROR]", fe)
+ e = fe
+ return
+ }
+
+ pu.FileName = part.FileName()
+ if pu.FileName != "" {
+ pu.FileName = path.Base(pu.FileName)
+ }
+
+ pu.Data, e = ioutil.ReadAll(io.LimitReader(part, sizeLimit+1))
+ if e != nil {
+ glog.V(0).Infoln("Reading Content [ERROR]", e)
+ return
+ }
+ if len(pu.Data) == int(sizeLimit)+1 {
+ e = fmt.Errorf("file over the limited %d bytes", sizeLimit)
+ return
+ }
+
+ //if the filename is empty string, do a search on the other multi-part items
+ for pu.FileName == "" {
+ part2, fe := form.NextPart()
+ if fe != nil {
+ break // no more or on error, just safely break
+ }
+
+ fName := part2.FileName()
+
+ //found the first <file type> multi-part has filename
+ if fName != "" {
+ data2, fe2 := ioutil.ReadAll(io.LimitReader(part2, sizeLimit+1))
+ if fe2 != nil {
+ glog.V(0).Infoln("Reading Content [ERROR]", fe2)
+ e = fe2
+ return
+ }
+ if len(data2) == int(sizeLimit)+1 {
+ e = fmt.Errorf("file over the limited %d bytes", sizeLimit)
+ return
+ }
+
+ //update
+ pu.Data = data2
+ pu.FileName = path.Base(fName)
+ break
+ }
+ }
+
+ pu.IsChunkedFile, _ = strconv.ParseBool(r.FormValue("cm"))
+
+ if !pu.IsChunkedFile {
+
+ dotIndex := strings.LastIndex(pu.FileName, ".")
+ ext, mtype := "", ""
+ if dotIndex > 0 {
+ ext = strings.ToLower(pu.FileName[dotIndex:])
+ mtype = mime.TypeByExtension(ext)
+ }
+ contentType := part.Header.Get("Content-Type")
+ if contentType != "" && contentType != "application/octet-stream" && mtype != contentType {
+ pu.MimeType = contentType //only return mime type if not deductable
+ mtype = contentType
+ }
+
+ pu.IsGzipped = part.Header.Get("Content-Encoding") == "gzip"
+ }
+
+ return
+}
diff --git a/weed/util/cipher.go b/weed/util/cipher.go
index f044c2ca3..7bcb6559a 100644
--- a/weed/util/cipher.go
+++ b/weed/util/cipher.go
@@ -1,11 +1,14 @@
package util
import (
+ "bytes"
"crypto/aes"
"crypto/cipher"
"crypto/rand"
"errors"
+ "fmt"
"io"
+ "io/ioutil"
"github.com/chrislusf/seaweedfs/weed/glog"
)
@@ -58,3 +61,21 @@ func Decrypt(ciphertext []byte, key CipherKey) ([]byte, error) {
nonce, ciphertext := ciphertext[:nonceSize], ciphertext[nonceSize:]
return gcm.Open(nil, nonce, ciphertext, nil)
}
+
+func EncryptReader(clearReader io.Reader) (cipherKey CipherKey, encryptedReader io.ReadCloser, clearDataLen, encryptedDataLen int, err error) {
+ clearData, err := ioutil.ReadAll(clearReader)
+ if err != nil {
+ err = fmt.Errorf("read raw input: %v", err)
+ return
+ }
+ clearDataLen = len(clearData)
+ cipherKey = GenCipherKey()
+ encryptedData, err := Encrypt(clearData, cipherKey)
+ if err != nil {
+ err = fmt.Errorf("encrypt input: %v", err)
+ return
+ }
+ encryptedDataLen = len(encryptedData)
+ encryptedReader = ioutil.NopCloser(bytes.NewReader(encryptedData))
+ return
+}