diff options
| author | monchickey <75814968+monchickey@users.noreply.github.com> | 2023-01-07 01:28:07 +0800 |
|---|---|---|
| committer | GitHub <noreply@github.com> | 2023-01-06 09:28:07 -0800 |
| commit | 3e2c9ea73ddc577da64e5c8596f8faae884c6840 (patch) | |
| tree | df9d61802f54183ad6471a36cc854b2b592ad749 | |
| parent | 296fdc296c8bd672d86c9aef19e9f568dab1b8a0 (diff) | |
| download | seaweedfs-3e2c9ea73ddc577da64e5c8596f8faae884c6840.tar.xz seaweedfs-3e2c9ea73ddc577da64e5c8596f8faae884c6840.zip | |
Add image cropping. (#4117)
| -rw-r--r-- | weed/images/cropping.go | 47 | ||||
| -rw-r--r-- | weed/images/cropping_test.go | 22 | ||||
| -rw-r--r-- | weed/server/volume_server_handlers_read.go | 53 |
3 files changed, 116 insertions, 6 deletions
diff --git a/weed/images/cropping.go b/weed/images/cropping.go new file mode 100644 index 000000000..07a3f41ad --- /dev/null +++ b/weed/images/cropping.go @@ -0,0 +1,47 @@ +package images
+
+import (
+ "bytes"
+ "image"
+ "image/gif"
+ "image/jpeg"
+ "image/png"
+ "io"
+
+ "github.com/disintegration/imaging"
+
+ "github.com/seaweedfs/seaweedfs/weed/glog"
+)
+
+func Cropped(ext string, read io.ReadSeeker, x1, y1, x2, y2 int) (cropped io.ReadSeeker, err error) {
+ srcImage, _, err := image.Decode(read)
+ if err != nil {
+ glog.Error(err)
+ return read, err
+ }
+
+ bounds := srcImage.Bounds()
+ if x2 > bounds.Dx() || y2 > bounds.Dy() {
+ read.Seek(0, 0)
+ return read, nil
+ }
+
+ rectangle := image.Rect(x1, y1, x2, y2)
+ dstImage := imaging.Crop(srcImage, rectangle)
+ var buf bytes.Buffer
+ switch ext {
+ case ".jpg", ".jpeg":
+ if err = jpeg.Encode(&buf, dstImage, nil); err != nil {
+ glog.Error(err)
+ }
+ case ".png":
+ if err = png.Encode(&buf, dstImage); err != nil {
+ glog.Error(err)
+ }
+ case ".gif":
+ if err = gif.Encode(&buf, dstImage, nil); err != nil {
+ glog.Error(err)
+ }
+ }
+ return bytes.NewReader(buf.Bytes()), err
+}
diff --git a/weed/images/cropping_test.go b/weed/images/cropping_test.go new file mode 100644 index 000000000..284432e3a --- /dev/null +++ b/weed/images/cropping_test.go @@ -0,0 +1,22 @@ +package images + +import ( + "bytes" + "os" + "testing" + + "github.com/seaweedfs/seaweedfs/weed/util" +) + +func TestCropping(t *testing.T) { + fname := "sample1.jpg" + + dat, _ := os.ReadFile(fname) + + cropped, _ := Cropped(".jpg", bytes.NewReader(dat), 1072, 932, 1751, 1062) + buf := new(bytes.Buffer) + buf.ReadFrom(cropped) + + util.WriteFile("cropped1.jpg", buf.Bytes(), 0644) + +} diff --git a/weed/server/volume_server_handlers_read.go b/weed/server/volume_server_handlers_read.go index 8ad526d59..10ff15d92 100644 --- a/weed/server/volume_server_handlers_read.go +++ b/weed/server/volume_server_handlers_read.go @@ -5,8 +5,6 @@ import ( "encoding/json" "errors" "fmt" - "github.com/seaweedfs/seaweedfs/weed/storage/types" - "github.com/seaweedfs/seaweedfs/weed/util/mem" "io" "mime" "net/http" @@ -17,6 +15,9 @@ import ( "sync/atomic" "time" + "github.com/seaweedfs/seaweedfs/weed/storage/types" + "github.com/seaweedfs/seaweedfs/weed/util/mem" + "github.com/seaweedfs/seaweedfs/weed/glog" "github.com/seaweedfs/seaweedfs/weed/images" "github.com/seaweedfs/seaweedfs/weed/operation" @@ -204,7 +205,9 @@ func (vs *VolumeServer) GetOrHeadHandler(w http.ResponseWriter, r *http.Request) } if n.IsCompressed() { - if _, _, _, shouldResize := shouldResizeImages(ext, r); shouldResize { + _, _, _, shouldResize := shouldResizeImages(ext, r) + _, _, _, _, shouldCrop := shouldCropImages(ext, r) + if shouldResize || shouldCrop { if n.Data, err = util.DecompressData(n.Data); err != nil { glog.V(0).Infoln("ungzip error:", err, r.URL.Path) } @@ -220,7 +223,8 @@ func (vs *VolumeServer) GetOrHeadHandler(w http.ResponseWriter, r *http.Request) } if !readOption.IsMetaOnly { - rs := conditionallyResizeImages(bytes.NewReader(n.Data), ext, r) + rs := conditionallyCropImages(bytes.NewReader(n.Data), ext, r) + rs = conditionallyResizeImages(rs, ext, r) if e := writeResponseContent(filename, mtype, rs, w, r); e != nil { glog.V(2).Infoln("response write error:", e) } @@ -240,7 +244,8 @@ func shouldAttemptStreamWrite(hasLocalVolume bool, ext string, r *http.Request) return true, true } _, _, _, shouldResize := shouldResizeImages(ext, r) - if shouldResize { + _, _, _, _, shouldCrop := shouldCropImages(ext, r) + if shouldResize || shouldCrop { return false, false } return true, false @@ -277,7 +282,8 @@ func (vs *VolumeServer) tryHandleChunkedFile(n *needle.Needle, fileName string, chunkedFileReader := operation.NewChunkedFileReader(chunkManifest.Chunks, vs.GetMaster(), vs.grpcDialOption) defer chunkedFileReader.Close() - rs := conditionallyResizeImages(chunkedFileReader, ext, r) + rs := conditionallyCropImages(chunkedFileReader, ext, r) + rs = conditionallyResizeImages(rs, ext, r) if e := writeResponseContent(fileName, mType, rs, w, r); e != nil { glog.V(2).Infoln("response write error:", e) @@ -311,6 +317,41 @@ func shouldResizeImages(ext string, r *http.Request) (width, height int, mode st return } +func conditionallyCropImages(originalDataReaderSeeker io.ReadSeeker, ext string, r *http.Request) io.ReadSeeker { + rs := originalDataReaderSeeker + if len(ext) > 0 { + ext = strings.ToLower(ext) + } + x1, y1, x2, y2, shouldCrop := shouldCropImages(ext, r) + if shouldCrop { + var err error + rs, err = images.Cropped(ext, rs, x1, y1, x2, y2) + if err != nil { + glog.Errorf("Cropping images error: %s", err) + } + } + return rs +} + +func shouldCropImages(ext string, r *http.Request) (x1, y1, x2, y2 int, shouldCrop bool) { + if ext == ".png" || ext == ".jpg" || ext == ".jpeg" || ext == ".gif" { + if r.FormValue("crop_x1") != "" { + x1, _ = strconv.Atoi(r.FormValue("crop_x1")) + } + if r.FormValue("crop_y1") != "" { + y1, _ = strconv.Atoi(r.FormValue("crop_y1")) + } + if r.FormValue("crop_x2") != "" { + x2, _ = strconv.Atoi(r.FormValue("crop_x2")) + } + if r.FormValue("crop_y2") != "" { + y2, _ = strconv.Atoi(r.FormValue("crop_y2")) + } + } + shouldCrop = x1 >= 0 && y1 >= 0 && x2 > x1 && y2 > y1 + return +} + func writeResponseContent(filename, mimeType string, rs io.ReadSeeker, w http.ResponseWriter, r *http.Request) error { totalSize, e := rs.Seek(0, 2) if mimeType == "" { |
