aboutsummaryrefslogtreecommitdiff
path: root/weed/s3api/s3api_object_copy_handlers.go
diff options
context:
space:
mode:
authorHongyanShen <763987993@qq.com>2020-03-11 12:55:24 +0800
committerGitHub <noreply@github.com>2020-03-11 12:55:24 +0800
commit03529fc0c29072f6f26e11ffbd7229cf92dc71ce (patch)
treeed8833386a712c850dcef0815509774681a6ab56 /weed/s3api/s3api_object_copy_handlers.go
parent0fca1ae776783b37481549df40f477b7d9248d3c (diff)
parent60f5f05c78a2918d5219c925cea5847759281a2c (diff)
downloadseaweedfs-03529fc0c29072f6f26e11ffbd7229cf92dc71ce.tar.xz
seaweedfs-03529fc0c29072f6f26e11ffbd7229cf92dc71ce.zip
Merge pull request #1 from chrislusf/master
sync
Diffstat (limited to 'weed/s3api/s3api_object_copy_handlers.go')
-rw-r--r--weed/s3api/s3api_object_copy_handlers.go151
1 files changed, 151 insertions, 0 deletions
diff --git a/weed/s3api/s3api_object_copy_handlers.go b/weed/s3api/s3api_object_copy_handlers.go
new file mode 100644
index 000000000..b8fb3f6a4
--- /dev/null
+++ b/weed/s3api/s3api_object_copy_handlers.go
@@ -0,0 +1,151 @@
+package s3api
+
+import (
+ "fmt"
+ "net/http"
+ "net/url"
+ "strconv"
+ "strings"
+ "time"
+
+ "github.com/gorilla/mux"
+
+ "github.com/chrislusf/seaweedfs/weed/util"
+)
+
+func (s3a *S3ApiServer) CopyObjectHandler(w http.ResponseWriter, r *http.Request) {
+
+ vars := mux.Vars(r)
+ dstBucket := vars["bucket"]
+ dstObject := getObject(vars)
+
+ // Copy source path.
+ cpSrcPath, err := url.QueryUnescape(r.Header.Get("X-Amz-Copy-Source"))
+ if err != nil {
+ // Save unescaped string as is.
+ cpSrcPath = r.Header.Get("X-Amz-Copy-Source")
+ }
+
+ srcBucket, srcObject := pathToBucketAndObject(cpSrcPath)
+ // If source object is empty or bucket is empty, reply back invalid copy source.
+ if srcObject == "" || srcBucket == "" {
+ writeErrorResponse(w, ErrInvalidCopySource, r.URL)
+ return
+ }
+
+ if srcBucket == dstBucket && srcObject == dstObject {
+ writeErrorResponse(w, ErrInvalidCopySource, r.URL)
+ return
+ }
+
+ dstUrl := fmt.Sprintf("http://%s%s/%s%s?collection=%s",
+ s3a.option.Filer, s3a.option.BucketsPath, dstBucket, dstObject, dstBucket)
+ srcUrl := fmt.Sprintf("http://%s%s/%s%s",
+ s3a.option.Filer, s3a.option.BucketsPath, srcBucket, srcObject)
+
+ _, _, dataReader, err := util.DownloadFile(srcUrl)
+ if err != nil {
+ writeErrorResponse(w, ErrInvalidCopySource, r.URL)
+ return
+ }
+ defer dataReader.Close()
+
+ etag, errCode := s3a.putToFiler(r, dstUrl, dataReader)
+
+ if errCode != ErrNone {
+ writeErrorResponse(w, errCode, r.URL)
+ return
+ }
+
+ setEtag(w, etag)
+
+ response := CopyObjectResult{
+ ETag: etag,
+ LastModified: time.Now(),
+ }
+
+ writeSuccessResponseXML(w, encodeResponse(response))
+
+}
+
+func pathToBucketAndObject(path string) (bucket, object string) {
+ path = strings.TrimPrefix(path, "/")
+ parts := strings.SplitN(path, "/", 2)
+ if len(parts) == 2 {
+ return parts[0], "/" + parts[1]
+ }
+ return parts[0], "/"
+}
+
+type CopyPartResult struct {
+ LastModified time.Time `xml:"LastModified"`
+ ETag string `xml:"ETag"`
+}
+
+func (s3a *S3ApiServer) CopyObjectPartHandler(w http.ResponseWriter, r *http.Request) {
+ // https://docs.aws.amazon.com/AmazonS3/latest/dev/CopyingObjctsUsingRESTMPUapi.html
+ // https://docs.aws.amazon.com/AmazonS3/latest/API/API_UploadPartCopy.html
+ vars := mux.Vars(r)
+ dstBucket := vars["bucket"]
+ // dstObject := getObject(vars)
+
+ // Copy source path.
+ cpSrcPath, err := url.QueryUnescape(r.Header.Get("X-Amz-Copy-Source"))
+ if err != nil {
+ // Save unescaped string as is.
+ cpSrcPath = r.Header.Get("X-Amz-Copy-Source")
+ }
+
+ srcBucket, srcObject := pathToBucketAndObject(cpSrcPath)
+ // If source object is empty or bucket is empty, reply back invalid copy source.
+ if srcObject == "" || srcBucket == "" {
+ writeErrorResponse(w, ErrInvalidCopySource, r.URL)
+ return
+ }
+
+ uploadID := r.URL.Query().Get("uploadId")
+ partIDString := r.URL.Query().Get("partNumber")
+
+ partID, err := strconv.Atoi(partIDString)
+ if err != nil {
+ writeErrorResponse(w, ErrInvalidPart, r.URL)
+ return
+ }
+
+ // check partID with maximum part ID for multipart objects
+ if partID > globalMaxPartID {
+ writeErrorResponse(w, ErrInvalidMaxParts, r.URL)
+ return
+ }
+
+ rangeHeader := r.Header.Get("x-amz-copy-source-range")
+
+ dstUrl := fmt.Sprintf("http://%s%s/%s/%04d.part?collection=%s",
+ s3a.option.Filer, s3a.genUploadsFolder(dstBucket), uploadID, partID-1, dstBucket)
+ srcUrl := fmt.Sprintf("http://%s%s/%s%s",
+ s3a.option.Filer, s3a.option.BucketsPath, srcBucket, srcObject)
+
+ dataReader, err := util.ReadUrlAsReaderCloser(srcUrl, rangeHeader)
+ if err != nil {
+ writeErrorResponse(w, ErrInvalidCopySource, r.URL)
+ return
+ }
+ defer dataReader.Close()
+
+ etag, errCode := s3a.putToFiler(r, dstUrl, dataReader)
+
+ if errCode != ErrNone {
+ writeErrorResponse(w, errCode, r.URL)
+ return
+ }
+
+ setEtag(w, etag)
+
+ response := CopyPartResult{
+ ETag: etag,
+ LastModified: time.Now(),
+ }
+
+ writeSuccessResponseXML(w, encodeResponse(response))
+
+}