aboutsummaryrefslogtreecommitdiff
path: root/weed/server/filer_server_handlers_write_autochunk.go
diff options
context:
space:
mode:
Diffstat (limited to 'weed/server/filer_server_handlers_write_autochunk.go')
-rw-r--r--weed/server/filer_server_handlers_write_autochunk.go35
1 files changed, 16 insertions, 19 deletions
diff --git a/weed/server/filer_server_handlers_write_autochunk.go b/weed/server/filer_server_handlers_write_autochunk.go
index 9bf2df6ef..9c2b9959f 100644
--- a/weed/server/filer_server_handlers_write_autochunk.go
+++ b/weed/server/filer_server_handlers_write_autochunk.go
@@ -3,6 +3,7 @@ package weed_server
import (
"context"
"fmt"
+ "github.com/chrislusf/seaweedfs/weed/s3api/s3_constants"
"io"
"net/http"
"os"
@@ -15,7 +16,6 @@ import (
"github.com/chrislusf/seaweedfs/weed/glog"
"github.com/chrislusf/seaweedfs/weed/operation"
"github.com/chrislusf/seaweedfs/weed/pb/filer_pb"
- xhttp "github.com/chrislusf/seaweedfs/weed/s3api/http"
"github.com/chrislusf/seaweedfs/weed/stats"
"github.com/chrislusf/seaweedfs/weed/storage/needle"
"github.com/chrislusf/seaweedfs/weed/util"
@@ -201,18 +201,15 @@ func (fs *FilerServer) saveMetaData(ctx context.Context, r *http.Request, fileNa
entry = &filer.Entry{
FullPath: util.FullPath(path),
Attr: filer.Attr{
- Mtime: time.Now(),
- Crtime: time.Now(),
- Mode: os.FileMode(mode),
- Uid: OS_UID,
- Gid: OS_GID,
- Replication: so.Replication,
- Collection: so.Collection,
- TtlSec: so.TtlSeconds,
- DiskType: so.DiskType,
- Mime: contentType,
- Md5: md5bytes,
- FileSize: uint64(chunkOffset),
+ Mtime: time.Now(),
+ Crtime: time.Now(),
+ Mode: os.FileMode(mode),
+ Uid: OS_UID,
+ Gid: OS_GID,
+ TtlSec: so.TtlSeconds,
+ Mime: contentType,
+ Md5: md5bytes,
+ FileSize: uint64(chunkOffset),
},
Content: content,
}
@@ -349,23 +346,23 @@ func SaveAmzMetaData(r *http.Request, existing map[string][]byte, isReplace bool
}
}
- if sc := r.Header.Get(xhttp.AmzStorageClass); sc != "" {
- metadata[xhttp.AmzStorageClass] = []byte(sc)
+ if sc := r.Header.Get(s3_constants.AmzStorageClass); sc != "" {
+ metadata[s3_constants.AmzStorageClass] = []byte(sc)
}
- if tags := r.Header.Get(xhttp.AmzObjectTagging); tags != "" {
+ if tags := r.Header.Get(s3_constants.AmzObjectTagging); tags != "" {
for _, v := range strings.Split(tags, "&") {
tag := strings.Split(v, "=")
if len(tag) == 2 {
- metadata[xhttp.AmzObjectTagging+"-"+tag[0]] = []byte(tag[1])
+ metadata[s3_constants.AmzObjectTagging+"-"+tag[0]] = []byte(tag[1])
} else if len(tag) == 1 {
- metadata[xhttp.AmzObjectTagging+"-"+tag[0]] = nil
+ metadata[s3_constants.AmzObjectTagging+"-"+tag[0]] = nil
}
}
}
for header, values := range r.Header {
- if strings.HasPrefix(header, xhttp.AmzUserMetaPrefix) {
+ if strings.HasPrefix(header, s3_constants.AmzUserMetaPrefix) {
for _, value := range values {
metadata[header] = []byte(value)
}