aboutsummaryrefslogtreecommitdiff
path: root/weed/command/s3.go
diff options
context:
space:
mode:
authorChris Lu <chrislusf@users.noreply.github.com>2025-12-10 14:12:51 -0800
committerGitHub <noreply@github.com>2025-12-10 14:12:51 -0800
commit2d06ddab414dcc78c282ab91136c8e1dcaa38ffa (patch)
treee965b657dcbd97729ae0f1c0f66d253d819a5f1b /weed/command/s3.go
parent924d410dc88fcf3e72fcaac5d37a526890bdba79 (diff)
downloadseaweedfs-2d06ddab414dcc78c282ab91136c8e1dcaa38ffa.tar.xz
seaweedfs-2d06ddab414dcc78c282ab91136c8e1dcaa38ffa.zip
Remove default concurrent upload/download limits for best performance (#7712)
Change all concurrentUploadLimitMB and concurrentDownloadLimitMB defaults from fixed values (64, 128, 256 MB) to 0 (unlimited). This removes artificial throttling that can limit throughput on high-performance systems, especially on all-flash setups with many cores. Files changed: - volume.go: concurrentUploadLimitMB 256->0, concurrentDownloadLimitMB 256->0 - server.go: filer/volume/s3 concurrent limits 64/128->0 - s3.go: concurrentUploadLimitMB 128->0 - filer.go: concurrentUploadLimitMB 128->0, s3.concurrentUploadLimitMB 128->0 Users can still set explicit limits if needed for resource management.
Diffstat (limited to 'weed/command/s3.go')
-rw-r--r--weed/command/s3.go2
1 files changed, 1 insertions, 1 deletions
diff --git a/weed/command/s3.go b/weed/command/s3.go
index 5fb34155b..5f62e8e58 100644
--- a/weed/command/s3.go
+++ b/weed/command/s3.go
@@ -84,7 +84,7 @@ func init() {
s3StandaloneOptions.localFilerSocket = cmdS3.Flag.String("localFilerSocket", "", "local filer socket path")
s3StandaloneOptions.localSocket = cmdS3.Flag.String("localSocket", "", "default to /tmp/seaweedfs-s3-<port>.sock")
s3StandaloneOptions.idleTimeout = cmdS3.Flag.Int("idleTimeout", 120, "connection idle seconds")
- s3StandaloneOptions.concurrentUploadLimitMB = cmdS3.Flag.Int("concurrentUploadLimitMB", 128, "limit total concurrent upload size")
+ s3StandaloneOptions.concurrentUploadLimitMB = cmdS3.Flag.Int("concurrentUploadLimitMB", 0, "limit total concurrent upload size, 0 means unlimited")
s3StandaloneOptions.concurrentFileUploadLimit = cmdS3.Flag.Int("concurrentFileUploadLimit", 0, "limit number of concurrent file uploads, 0 means unlimited")
}