aboutsummaryrefslogtreecommitdiff
path: root/weed/filesys/dirty_page.go
diff options
context:
space:
mode:
authorhilimd <68371223+hilimd@users.noreply.github.com>2020-10-26 22:01:50 +0800
committerGitHub <noreply@github.com>2020-10-26 22:01:50 +0800
commit843865f2ca534bb6286b7a3d79c436384d875608 (patch)
tree653943fe04caf3fe607416715fb341460a624ab7 /weed/filesys/dirty_page.go
parentcf7a1c722fa82fa78c546f68e4814fff7dc6d1e2 (diff)
parent44921220b01d21c64755cbc7560ff8932f71984d (diff)
downloadseaweedfs-843865f2ca534bb6286b7a3d79c436384d875608.tar.xz
seaweedfs-843865f2ca534bb6286b7a3d79c436384d875608.zip
Merge pull request #33 from chrislusf/master
sync
Diffstat (limited to 'weed/filesys/dirty_page.go')
-rw-r--r--weed/filesys/dirty_page.go8
1 files changed, 3 insertions, 5 deletions
diff --git a/weed/filesys/dirty_page.go b/weed/filesys/dirty_page.go
index 6fda134aa..dd0c48796 100644
--- a/weed/filesys/dirty_page.go
+++ b/weed/filesys/dirty_page.go
@@ -9,10 +9,12 @@ import (
"github.com/chrislusf/seaweedfs/weed/glog"
"github.com/chrislusf/seaweedfs/weed/pb/filer_pb"
+ "github.com/chrislusf/seaweedfs/weed/util"
)
var (
concurrentWriterLimit = runtime.NumCPU()
+ concurrentWriters = util.NewLimitedConcurrentExecutor(4 * concurrentWriterLimit)
)
type ContinuousDirtyPages struct {
@@ -93,8 +95,6 @@ func (pages *ContinuousDirtyPages) saveExistingLargestPageToStorage() (hasSavedD
pages.saveToStorage(maxList.ToReader(), maxList.Offset(), chunkSize)
- maxList.Destroy()
-
return true
}
@@ -110,10 +110,8 @@ func (pages *ContinuousDirtyPages) saveToStorage(reader io.Reader, offset int64,
go func() {
defer pages.writeWaitGroup.Done()
- dir, _ := pages.f.fullpath().DirAndName()
-
reader = io.LimitReader(reader, size)
- chunk, collection, replication, err := pages.f.wfs.saveDataAsChunk(dir)(reader, pages.f.Name, offset)
+ chunk, collection, replication, err := pages.f.wfs.saveDataAsChunk(pages.f.fullpath())(reader, pages.f.Name, offset)
if err != nil {
glog.V(0).Infof("%s saveToStorage [%d,%d): %v", pages.f.fullpath(), offset, offset+size, err)
pages.chunkSaveErrChan <- err