aboutsummaryrefslogtreecommitdiff
path: root/weed/filer
diff options
context:
space:
mode:
authorchrislu <chris.lu@gmail.com>2022-10-28 12:53:19 -0700
committerchrislu <chris.lu@gmail.com>2022-10-28 12:53:19 -0700
commitea2637734a13a08d11d4f26e80c1324664bf7ffc (patch)
tree6708b9a0d07a0518a6fdf151d59bf3fc46053430 /weed/filer
parent1e0d64c04883a8d7b09677e9721f9e189743e2f3 (diff)
downloadseaweedfs-ea2637734a13a08d11d4f26e80c1324664bf7ffc.tar.xz
seaweedfs-ea2637734a13a08d11d4f26e80c1324664bf7ffc.zip
refactor filer proto chunk variable from mtime to modified_ts_ns
Diffstat (limited to 'weed/filer')
-rw-r--r--weed/filer/filechunks.go16
-rw-r--r--weed/filer/filechunks2_test.go36
-rw-r--r--weed/filer/filechunks_read.go6
-rw-r--r--weed/filer/filechunks_read_test.go168
-rw-r--r--weed/filer/filechunks_test.go164
-rw-r--r--weed/filer/filer_notify_test.go2
6 files changed, 196 insertions, 196 deletions
diff --git a/weed/filer/filechunks.go b/weed/filer/filechunks.go
index 00f4c2921..965c73a77 100644
--- a/weed/filer/filechunks.go
+++ b/weed/filer/filechunks.go
@@ -192,7 +192,7 @@ func logPrintf(name string, visibles []VisibleInterval) {
func MergeIntoVisibles(visibles []VisibleInterval, chunk *filer_pb.FileChunk) (newVisibles []VisibleInterval) {
- newV := newVisibleInterval(chunk.Offset, chunk.Offset+int64(chunk.Size), chunk.GetFileIdString(), chunk.Mtime, 0, chunk.Size, chunk.CipherKey, chunk.IsCompressed)
+ newV := newVisibleInterval(chunk.Offset, chunk.Offset+int64(chunk.Size), chunk.GetFileIdString(), chunk.ModifiedTsNs, 0, chunk.Size, chunk.CipherKey, chunk.IsCompressed)
length := len(visibles)
if length == 0 {
@@ -208,12 +208,12 @@ func MergeIntoVisibles(visibles []VisibleInterval, chunk *filer_pb.FileChunk) (n
chunkStop := chunk.Offset + int64(chunk.Size)
for _, v := range visibles {
if v.start < chunk.Offset && chunk.Offset < v.stop {
- t := newVisibleInterval(v.start, chunk.Offset, v.fileId, v.modifiedTime, v.chunkOffset, v.chunkSize, v.cipherKey, v.isGzipped)
+ t := newVisibleInterval(v.start, chunk.Offset, v.fileId, v.modifiedTsNs, v.chunkOffset, v.chunkSize, v.cipherKey, v.isGzipped)
newVisibles = append(newVisibles, t)
// glog.V(0).Infof("visible %d [%d,%d) =1> [%d,%d)", i, v.start, v.stop, t.start, t.stop)
}
if v.start < chunkStop && chunkStop < v.stop {
- t := newVisibleInterval(chunkStop, v.stop, v.fileId, v.modifiedTime, v.chunkOffset+(chunkStop-v.start), v.chunkSize, v.cipherKey, v.isGzipped)
+ t := newVisibleInterval(chunkStop, v.stop, v.fileId, v.modifiedTsNs, v.chunkOffset+(chunkStop-v.start), v.chunkSize, v.cipherKey, v.isGzipped)
newVisibles = append(newVisibles, t)
// glog.V(0).Infof("visible %d [%d,%d) =2> [%d,%d)", i, v.start, v.stop, t.start, t.stop)
}
@@ -254,7 +254,7 @@ func NonOverlappingVisibleIntervals(lookupFileIdFn wdclient.LookupFileIdFunction
return visibles2, err
}
slices.SortFunc(chunks, func(a, b *filer_pb.FileChunk) bool {
- if a.Mtime == b.Mtime {
+ if a.ModifiedTsNs == b.ModifiedTsNs {
filer_pb.EnsureFid(a)
filer_pb.EnsureFid(b)
if a.Fid == nil || b.Fid == nil {
@@ -262,7 +262,7 @@ func NonOverlappingVisibleIntervals(lookupFileIdFn wdclient.LookupFileIdFunction
}
return a.Fid.FileKey < b.Fid.FileKey
}
- return a.Mtime < b.Mtime
+ return a.ModifiedTsNs < b.ModifiedTsNs
})
for _, chunk := range chunks {
@@ -288,7 +288,7 @@ func checkDifference(x, y VisibleInterval) {
if x.start != y.start ||
x.stop != y.stop ||
x.fileId != y.fileId ||
- x.modifiedTime != y.modifiedTime {
+ x.modifiedTsNs != y.modifiedTsNs {
fmt.Printf("different visible %+v : %+v\n", x, y)
}
}
@@ -299,7 +299,7 @@ func checkDifference(x, y VisibleInterval) {
type VisibleInterval struct {
start int64
stop int64
- modifiedTime int64
+ modifiedTsNs int64
fileId string
chunkOffset int64
chunkSize uint64
@@ -312,7 +312,7 @@ func newVisibleInterval(start, stop int64, fileId string, modifiedTime int64, ch
start: start,
stop: stop,
fileId: fileId,
- modifiedTime: modifiedTime,
+ modifiedTsNs: modifiedTime,
chunkOffset: chunkOffset, // the starting position in the chunk
chunkSize: chunkSize,
cipherKey: cipherKey,
diff --git a/weed/filer/filechunks2_test.go b/weed/filer/filechunks2_test.go
index 7aa00864b..6966360ad 100644
--- a/weed/filer/filechunks2_test.go
+++ b/weed/filer/filechunks2_test.go
@@ -17,14 +17,14 @@ func TestDoMinusChunks(t *testing.T) {
// clusterA append a new line and then clusterB also append a new line
// clusterA append a new line again
chunksInA := []*filer_pb.FileChunk{
- {Offset: 0, Size: 3, FileId: "11", Mtime: 100},
- {Offset: 3, Size: 3, FileId: "22", SourceFileId: "2", Mtime: 200},
- {Offset: 6, Size: 3, FileId: "33", Mtime: 300},
+ {Offset: 0, Size: 3, FileId: "11", ModifiedTsNs: 100},
+ {Offset: 3, Size: 3, FileId: "22", SourceFileId: "2", ModifiedTsNs: 200},
+ {Offset: 6, Size: 3, FileId: "33", ModifiedTsNs: 300},
}
chunksInB := []*filer_pb.FileChunk{
- {Offset: 0, Size: 3, FileId: "1", SourceFileId: "11", Mtime: 100},
- {Offset: 3, Size: 3, FileId: "2", Mtime: 200},
- {Offset: 6, Size: 3, FileId: "3", SourceFileId: "33", Mtime: 300},
+ {Offset: 0, Size: 3, FileId: "1", SourceFileId: "11", ModifiedTsNs: 100},
+ {Offset: 3, Size: 3, FileId: "2", ModifiedTsNs: 200},
+ {Offset: 6, Size: 3, FileId: "3", SourceFileId: "33", ModifiedTsNs: 300},
}
// clusterB using command "echo 'content' > hello.txt" to overwrite file
@@ -50,17 +50,17 @@ func TestDoMinusChunks(t *testing.T) {
func TestCompactFileChunksRealCase(t *testing.T) {
chunks := []*filer_pb.FileChunk{
- {FileId: "2,512f31f2c0700a", Offset: 0, Size: 25 - 0, Mtime: 5320497},
- {FileId: "6,512f2c2e24e9e8", Offset: 868352, Size: 917585 - 868352, Mtime: 5320492},
- {FileId: "7,514468dd5954ca", Offset: 884736, Size: 901120 - 884736, Mtime: 5325928},
- {FileId: "5,5144463173fe77", Offset: 917504, Size: 2297856 - 917504, Mtime: 5325894},
- {FileId: "4,51444c7ab54e2d", Offset: 2301952, Size: 2367488 - 2301952, Mtime: 5325900},
- {FileId: "4,514450e643ad22", Offset: 2371584, Size: 2420736 - 2371584, Mtime: 5325904},
- {FileId: "6,514456a5e9e4d7", Offset: 2449408, Size: 2490368 - 2449408, Mtime: 5325910},
- {FileId: "3,51444f8d53eebe", Offset: 2494464, Size: 2555904 - 2494464, Mtime: 5325903},
- {FileId: "4,5144578b097c7e", Offset: 2560000, Size: 2596864 - 2560000, Mtime: 5325911},
- {FileId: "3,51445500b6b4ac", Offset: 2637824, Size: 2678784 - 2637824, Mtime: 5325909},
- {FileId: "1,51446285e52a61", Offset: 2695168, Size: 2715648 - 2695168, Mtime: 5325922},
+ {FileId: "2,512f31f2c0700a", Offset: 0, Size: 25 - 0, ModifiedTsNs: 5320497},
+ {FileId: "6,512f2c2e24e9e8", Offset: 868352, Size: 917585 - 868352, ModifiedTsNs: 5320492},
+ {FileId: "7,514468dd5954ca", Offset: 884736, Size: 901120 - 884736, ModifiedTsNs: 5325928},
+ {FileId: "5,5144463173fe77", Offset: 917504, Size: 2297856 - 917504, ModifiedTsNs: 5325894},
+ {FileId: "4,51444c7ab54e2d", Offset: 2301952, Size: 2367488 - 2301952, ModifiedTsNs: 5325900},
+ {FileId: "4,514450e643ad22", Offset: 2371584, Size: 2420736 - 2371584, ModifiedTsNs: 5325904},
+ {FileId: "6,514456a5e9e4d7", Offset: 2449408, Size: 2490368 - 2449408, ModifiedTsNs: 5325910},
+ {FileId: "3,51444f8d53eebe", Offset: 2494464, Size: 2555904 - 2494464, ModifiedTsNs: 5325903},
+ {FileId: "4,5144578b097c7e", Offset: 2560000, Size: 2596864 - 2560000, ModifiedTsNs: 5325911},
+ {FileId: "3,51445500b6b4ac", Offset: 2637824, Size: 2678784 - 2637824, ModifiedTsNs: 5325909},
+ {FileId: "1,51446285e52a61", Offset: 2695168, Size: 2715648 - 2695168, ModifiedTsNs: 5325922},
}
printChunks("before", chunks)
@@ -75,7 +75,7 @@ func TestCompactFileChunksRealCase(t *testing.T) {
func printChunks(name string, chunks []*filer_pb.FileChunk) {
slices.SortFunc(chunks, func(a, b *filer_pb.FileChunk) bool {
if a.Offset == b.Offset {
- return a.Mtime < b.Mtime
+ return a.ModifiedTsNs < b.ModifiedTsNs
}
return a.Offset < b.Offset
})
diff --git a/weed/filer/filechunks_read.go b/weed/filer/filechunks_read.go
index 96ea92afb..8a15f6e7a 100644
--- a/weed/filer/filechunks_read.go
+++ b/weed/filer/filechunks_read.go
@@ -11,13 +11,13 @@ func readResolvedChunks(chunks []*filer_pb.FileChunk) (visibles []VisibleInterva
for _, chunk := range chunks {
points = append(points, &Point{
x: chunk.Offset,
- ts: chunk.Mtime,
+ ts: chunk.ModifiedTsNs,
chunk: chunk,
isStart: true,
})
points = append(points, &Point{
x: chunk.Offset + int64(chunk.Size),
- ts: chunk.Mtime,
+ ts: chunk.ModifiedTsNs,
chunk: chunk,
isStart: false,
})
@@ -98,7 +98,7 @@ func addToVisibles(visibles []VisibleInterval, prevX int64, startPoint *Point, p
start: prevX,
stop: point.x,
fileId: chunk.GetFileIdString(),
- modifiedTime: chunk.Mtime,
+ modifiedTsNs: chunk.ModifiedTsNs,
chunkOffset: prevX - chunk.Offset,
chunkSize: chunk.Size,
cipherKey: chunk.CipherKey,
diff --git a/weed/filer/filechunks_read_test.go b/weed/filer/filechunks_read_test.go
index b3fa52ebd..d4bfca72e 100644
--- a/weed/filer/filechunks_read_test.go
+++ b/weed/filer/filechunks_read_test.go
@@ -11,41 +11,41 @@ func TestReadResolvedChunks(t *testing.T) {
chunks := []*filer_pb.FileChunk{
{
- FileId: "a",
- Offset: 0,
- Size: 100,
- Mtime: 1,
+ FileId: "a",
+ Offset: 0,
+ Size: 100,
+ ModifiedTsNs: 1,
},
{
- FileId: "b",
- Offset: 50,
- Size: 100,
- Mtime: 2,
+ FileId: "b",
+ Offset: 50,
+ Size: 100,
+ ModifiedTsNs: 2,
},
{
- FileId: "c",
- Offset: 200,
- Size: 50,
- Mtime: 3,
+ FileId: "c",
+ Offset: 200,
+ Size: 50,
+ ModifiedTsNs: 3,
},
{
- FileId: "d",
- Offset: 250,
- Size: 50,
- Mtime: 4,
+ FileId: "d",
+ Offset: 250,
+ Size: 50,
+ ModifiedTsNs: 4,
},
{
- FileId: "e",
- Offset: 175,
- Size: 100,
- Mtime: 5,
+ FileId: "e",
+ Offset: 175,
+ Size: 100,
+ ModifiedTsNs: 5,
},
}
visibles := readResolvedChunks(chunks)
for _, visible := range visibles {
- fmt.Printf("[%d,%d) %s %d\n", visible.start, visible.stop, visible.fileId, visible.modifiedTime)
+ fmt.Printf("[%d,%d) %s %d\n", visible.start, visible.stop, visible.fileId, visible.modifiedTsNs)
}
}
@@ -76,8 +76,8 @@ func TestRandomizedReadResolvedChunks(t *testing.T) {
for _, visible := range visibles {
for i := visible.start; i < visible.stop; i++ {
- if array[i] != visible.modifiedTime {
- t.Errorf("position %d expected ts %d actual ts %d", i, array[i], visible.modifiedTime)
+ if array[i] != visible.modifiedTsNs {
+ t.Errorf("position %d expected ts %d actual ts %d", i, array[i], visible.modifiedTsNs)
}
}
}
@@ -92,10 +92,10 @@ func randomWrite(array []int64, start int64, size int64, ts int64) *filer_pb.Fil
}
// fmt.Printf("write [%d,%d) %d\n", start, start+size, ts)
return &filer_pb.FileChunk{
- FileId: "",
- Offset: start,
- Size: uint64(size),
- Mtime: ts,
+ FileId: "",
+ Offset: start,
+ Size: uint64(size),
+ ModifiedTsNs: ts,
}
}
@@ -105,10 +105,10 @@ func TestSequentialReadResolvedChunks(t *testing.T) {
var chunks []*filer_pb.FileChunk
for ts := int64(0); ts < 13; ts++ {
chunks = append(chunks, &filer_pb.FileChunk{
- FileId: "",
- Offset: chunkSize * ts,
- Size: uint64(chunkSize),
- Mtime: 1,
+ FileId: "",
+ Offset: chunkSize * ts,
+ Size: uint64(chunkSize),
+ ModifiedTsNs: 1,
})
}
@@ -122,89 +122,89 @@ func TestActualReadResolvedChunks(t *testing.T) {
chunks := []*filer_pb.FileChunk{
{
- FileId: "5,e7b96fef48",
- Offset: 0,
- Size: 2097152,
- Mtime: 1634447487595823000,
+ FileId: "5,e7b96fef48",
+ Offset: 0,
+ Size: 2097152,
+ ModifiedTsNs: 1634447487595823000,
},
{
- FileId: "5,e5562640b9",
- Offset: 2097152,
- Size: 2097152,
- Mtime: 1634447487595826000,
+ FileId: "5,e5562640b9",
+ Offset: 2097152,
+ Size: 2097152,
+ ModifiedTsNs: 1634447487595826000,
},
{
- FileId: "5,df033e0fe4",
- Offset: 4194304,
- Size: 2097152,
- Mtime: 1634447487595827000,
+ FileId: "5,df033e0fe4",
+ Offset: 4194304,
+ Size: 2097152,
+ ModifiedTsNs: 1634447487595827000,
},
{
- FileId: "7,eb08148a9b",
- Offset: 6291456,
- Size: 2097152,
- Mtime: 1634447487595827000,
+ FileId: "7,eb08148a9b",
+ Offset: 6291456,
+ Size: 2097152,
+ ModifiedTsNs: 1634447487595827000,
},
{
- FileId: "7,e0f92d1604",
- Offset: 8388608,
- Size: 2097152,
- Mtime: 1634447487595828000,
+ FileId: "7,e0f92d1604",
+ Offset: 8388608,
+ Size: 2097152,
+ ModifiedTsNs: 1634447487595828000,
},
{
- FileId: "7,e33cb63262",
- Offset: 10485760,
- Size: 2097152,
- Mtime: 1634447487595828000,
+ FileId: "7,e33cb63262",
+ Offset: 10485760,
+ Size: 2097152,
+ ModifiedTsNs: 1634447487595828000,
},
{
- FileId: "5,ea98e40e93",
- Offset: 12582912,
- Size: 2097152,
- Mtime: 1634447487595829000,
+ FileId: "5,ea98e40e93",
+ Offset: 12582912,
+ Size: 2097152,
+ ModifiedTsNs: 1634447487595829000,
},
{
- FileId: "5,e165661172",
- Offset: 14680064,
- Size: 2097152,
- Mtime: 1634447487595829000,
+ FileId: "5,e165661172",
+ Offset: 14680064,
+ Size: 2097152,
+ ModifiedTsNs: 1634447487595829000,
},
{
- FileId: "3,e692097486",
- Offset: 16777216,
- Size: 2097152,
- Mtime: 1634447487595830000,
+ FileId: "3,e692097486",
+ Offset: 16777216,
+ Size: 2097152,
+ ModifiedTsNs: 1634447487595830000,
},
{
- FileId: "3,e28e2e3cbd",
- Offset: 18874368,
- Size: 2097152,
- Mtime: 1634447487595830000,
+ FileId: "3,e28e2e3cbd",
+ Offset: 18874368,
+ Size: 2097152,
+ ModifiedTsNs: 1634447487595830000,
},
{
- FileId: "3,e443974d4e",
- Offset: 20971520,
- Size: 2097152,
- Mtime: 1634447487595830000,
+ FileId: "3,e443974d4e",
+ Offset: 20971520,
+ Size: 2097152,
+ ModifiedTsNs: 1634447487595830000,
},
{
- FileId: "2,e815bed597",
- Offset: 23068672,
- Size: 2097152,
- Mtime: 1634447487595831000,
+ FileId: "2,e815bed597",
+ Offset: 23068672,
+ Size: 2097152,
+ ModifiedTsNs: 1634447487595831000,
},
{
- FileId: "5,e94715199e",
- Offset: 25165824,
- Size: 1974736,
- Mtime: 1634447487595832000,
+ FileId: "5,e94715199e",
+ Offset: 25165824,
+ Size: 1974736,
+ ModifiedTsNs: 1634447487595832000,
},
}
visibles := readResolvedChunks(chunks)
for _, visible := range visibles {
- fmt.Printf("[%d,%d) %s %d\n", visible.start, visible.stop, visible.fileId, visible.modifiedTime)
+ fmt.Printf("[%d,%d) %s %d\n", visible.start, visible.stop, visible.fileId, visible.modifiedTsNs)
}
}
diff --git a/weed/filer/filechunks_test.go b/weed/filer/filechunks_test.go
index c3d3d51c6..d29e0a600 100644
--- a/weed/filer/filechunks_test.go
+++ b/weed/filer/filechunks_test.go
@@ -15,10 +15,10 @@ import (
func TestCompactFileChunks(t *testing.T) {
chunks := []*filer_pb.FileChunk{
- {Offset: 10, Size: 100, FileId: "abc", Mtime: 50},
- {Offset: 100, Size: 100, FileId: "def", Mtime: 100},
- {Offset: 200, Size: 100, FileId: "ghi", Mtime: 200},
- {Offset: 110, Size: 200, FileId: "jkl", Mtime: 300},
+ {Offset: 10, Size: 100, FileId: "abc", ModifiedTsNs: 50},
+ {Offset: 100, Size: 100, FileId: "def", ModifiedTsNs: 100},
+ {Offset: 200, Size: 100, FileId: "ghi", ModifiedTsNs: 200},
+ {Offset: 110, Size: 200, FileId: "jkl", ModifiedTsNs: 300},
}
compacted, garbage := CompactFileChunks(nil, chunks)
@@ -35,22 +35,22 @@ func TestCompactFileChunks(t *testing.T) {
func TestCompactFileChunks2(t *testing.T) {
chunks := []*filer_pb.FileChunk{
- {Offset: 0, Size: 100, FileId: "abc", Mtime: 50},
- {Offset: 100, Size: 100, FileId: "def", Mtime: 100},
- {Offset: 200, Size: 100, FileId: "ghi", Mtime: 200},
- {Offset: 0, Size: 100, FileId: "abcf", Mtime: 300},
- {Offset: 50, Size: 100, FileId: "fhfh", Mtime: 400},
- {Offset: 100, Size: 100, FileId: "yuyu", Mtime: 500},
+ {Offset: 0, Size: 100, FileId: "abc", ModifiedTsNs: 50},
+ {Offset: 100, Size: 100, FileId: "def", ModifiedTsNs: 100},
+ {Offset: 200, Size: 100, FileId: "ghi", ModifiedTsNs: 200},
+ {Offset: 0, Size: 100, FileId: "abcf", ModifiedTsNs: 300},
+ {Offset: 50, Size: 100, FileId: "fhfh", ModifiedTsNs: 400},
+ {Offset: 100, Size: 100, FileId: "yuyu", ModifiedTsNs: 500},
}
k := 3
for n := 0; n < k; n++ {
chunks = append(chunks, &filer_pb.FileChunk{
- Offset: int64(n * 100), Size: 100, FileId: fmt.Sprintf("fileId%d", n), Mtime: int64(n),
+ Offset: int64(n * 100), Size: 100, FileId: fmt.Sprintf("fileId%d", n), ModifiedTsNs: int64(n),
})
chunks = append(chunks, &filer_pb.FileChunk{
- Offset: int64(n * 50), Size: 100, FileId: fmt.Sprintf("fileId%d", n+k), Mtime: int64(n + k),
+ Offset: int64(n * 50), Size: 100, FileId: fmt.Sprintf("fileId%d", n+k), ModifiedTsNs: int64(n + k),
})
}
@@ -78,11 +78,11 @@ func TestRandomFileChunksCompact(t *testing.T) {
stop = start + 16
}
chunk := &filer_pb.FileChunk{
- FileId: strconv.Itoa(i),
- Offset: int64(start),
- Size: uint64(stop - start),
- Mtime: int64(i),
- Fid: &filer_pb.FileId{FileKey: uint64(i)},
+ FileId: strconv.Itoa(i),
+ Offset: int64(start),
+ Size: uint64(stop - start),
+ ModifiedTsNs: int64(i),
+ Fid: &filer_pb.FileId{FileKey: uint64(i)},
}
chunks = append(chunks, chunk)
for x := start; x < stop; x++ {
@@ -109,9 +109,9 @@ func TestIntervalMerging(t *testing.T) {
// case 0: normal
{
Chunks: []*filer_pb.FileChunk{
- {Offset: 0, Size: 100, FileId: "abc", Mtime: 123},
- {Offset: 100, Size: 100, FileId: "asdf", Mtime: 134},
- {Offset: 200, Size: 100, FileId: "fsad", Mtime: 353},
+ {Offset: 0, Size: 100, FileId: "abc", ModifiedTsNs: 123},
+ {Offset: 100, Size: 100, FileId: "asdf", ModifiedTsNs: 134},
+ {Offset: 200, Size: 100, FileId: "fsad", ModifiedTsNs: 353},
},
Expected: []*VisibleInterval{
{start: 0, stop: 100, fileId: "abc"},
@@ -122,8 +122,8 @@ func TestIntervalMerging(t *testing.T) {
// case 1: updates overwrite full chunks
{
Chunks: []*filer_pb.FileChunk{
- {Offset: 0, Size: 100, FileId: "abc", Mtime: 123},
- {Offset: 0, Size: 200, FileId: "asdf", Mtime: 134},
+ {Offset: 0, Size: 100, FileId: "abc", ModifiedTsNs: 123},
+ {Offset: 0, Size: 200, FileId: "asdf", ModifiedTsNs: 134},
},
Expected: []*VisibleInterval{
{start: 0, stop: 200, fileId: "asdf"},
@@ -132,8 +132,8 @@ func TestIntervalMerging(t *testing.T) {
// case 2: updates overwrite part of previous chunks
{
Chunks: []*filer_pb.FileChunk{
- {Offset: 0, Size: 100, FileId: "a", Mtime: 123},
- {Offset: 0, Size: 70, FileId: "b", Mtime: 134},
+ {Offset: 0, Size: 100, FileId: "a", ModifiedTsNs: 123},
+ {Offset: 0, Size: 70, FileId: "b", ModifiedTsNs: 134},
},
Expected: []*VisibleInterval{
{start: 0, stop: 70, fileId: "b"},
@@ -143,9 +143,9 @@ func TestIntervalMerging(t *testing.T) {
// case 3: updates overwrite full chunks
{
Chunks: []*filer_pb.FileChunk{
- {Offset: 0, Size: 100, FileId: "abc", Mtime: 123},
- {Offset: 0, Size: 200, FileId: "asdf", Mtime: 134},
- {Offset: 50, Size: 250, FileId: "xxxx", Mtime: 154},
+ {Offset: 0, Size: 100, FileId: "abc", ModifiedTsNs: 123},
+ {Offset: 0, Size: 200, FileId: "asdf", ModifiedTsNs: 134},
+ {Offset: 50, Size: 250, FileId: "xxxx", ModifiedTsNs: 154},
},
Expected: []*VisibleInterval{
{start: 0, stop: 50, fileId: "asdf"},
@@ -155,9 +155,9 @@ func TestIntervalMerging(t *testing.T) {
// case 4: updates far away from prev chunks
{
Chunks: []*filer_pb.FileChunk{
- {Offset: 0, Size: 100, FileId: "abc", Mtime: 123},
- {Offset: 0, Size: 200, FileId: "asdf", Mtime: 134},
- {Offset: 250, Size: 250, FileId: "xxxx", Mtime: 154},
+ {Offset: 0, Size: 100, FileId: "abc", ModifiedTsNs: 123},
+ {Offset: 0, Size: 200, FileId: "asdf", ModifiedTsNs: 134},
+ {Offset: 250, Size: 250, FileId: "xxxx", ModifiedTsNs: 154},
},
Expected: []*VisibleInterval{
{start: 0, stop: 200, fileId: "asdf"},
@@ -167,10 +167,10 @@ func TestIntervalMerging(t *testing.T) {
// case 5: updates overwrite full chunks
{
Chunks: []*filer_pb.FileChunk{
- {Offset: 0, Size: 100, FileId: "a", Mtime: 123},
- {Offset: 0, Size: 200, FileId: "d", Mtime: 184},
- {Offset: 70, Size: 150, FileId: "c", Mtime: 143},
- {Offset: 80, Size: 100, FileId: "b", Mtime: 134},
+ {Offset: 0, Size: 100, FileId: "a", ModifiedTsNs: 123},
+ {Offset: 0, Size: 200, FileId: "d", ModifiedTsNs: 184},
+ {Offset: 70, Size: 150, FileId: "c", ModifiedTsNs: 143},
+ {Offset: 80, Size: 100, FileId: "b", ModifiedTsNs: 134},
},
Expected: []*VisibleInterval{
{start: 0, stop: 200, fileId: "d"},
@@ -180,9 +180,9 @@ func TestIntervalMerging(t *testing.T) {
// case 6: same updates
{
Chunks: []*filer_pb.FileChunk{
- {Offset: 0, Size: 100, FileId: "abc", Fid: &filer_pb.FileId{FileKey: 1}, Mtime: 123},
- {Offset: 0, Size: 100, FileId: "axf", Fid: &filer_pb.FileId{FileKey: 2}, Mtime: 123},
- {Offset: 0, Size: 100, FileId: "xyz", Fid: &filer_pb.FileId{FileKey: 3}, Mtime: 123},
+ {Offset: 0, Size: 100, FileId: "abc", Fid: &filer_pb.FileId{FileKey: 1}, ModifiedTsNs: 123},
+ {Offset: 0, Size: 100, FileId: "axf", Fid: &filer_pb.FileId{FileKey: 2}, ModifiedTsNs: 123},
+ {Offset: 0, Size: 100, FileId: "xyz", Fid: &filer_pb.FileId{FileKey: 3}, ModifiedTsNs: 123},
},
Expected: []*VisibleInterval{
{start: 0, stop: 100, fileId: "xyz"},
@@ -191,12 +191,12 @@ func TestIntervalMerging(t *testing.T) {
// case 7: real updates
{
Chunks: []*filer_pb.FileChunk{
- {Offset: 0, Size: 2097152, FileId: "7,0294cbb9892b", Mtime: 123},
- {Offset: 0, Size: 3145728, FileId: "3,029565bf3092", Mtime: 130},
- {Offset: 2097152, Size: 3145728, FileId: "6,029632f47ae2", Mtime: 140},
- {Offset: 5242880, Size: 3145728, FileId: "2,029734c5aa10", Mtime: 150},
- {Offset: 8388608, Size: 3145728, FileId: "5,02982f80de50", Mtime: 160},
- {Offset: 11534336, Size: 2842193, FileId: "7,0299ad723803", Mtime: 170},
+ {Offset: 0, Size: 2097152, FileId: "7,0294cbb9892b", ModifiedTsNs: 123},
+ {Offset: 0, Size: 3145728, FileId: "3,029565bf3092", ModifiedTsNs: 130},
+ {Offset: 2097152, Size: 3145728, FileId: "6,029632f47ae2", ModifiedTsNs: 140},
+ {Offset: 5242880, Size: 3145728, FileId: "2,029734c5aa10", ModifiedTsNs: 150},
+ {Offset: 8388608, Size: 3145728, FileId: "5,02982f80de50", ModifiedTsNs: 160},
+ {Offset: 11534336, Size: 2842193, FileId: "7,0299ad723803", ModifiedTsNs: 170},
},
Expected: []*VisibleInterval{
{start: 0, stop: 2097152, fileId: "3,029565bf3092"},
@@ -209,11 +209,11 @@ func TestIntervalMerging(t *testing.T) {
// case 8: real bug
{
Chunks: []*filer_pb.FileChunk{
- {Offset: 0, Size: 77824, FileId: "4,0b3df938e301", Mtime: 123},
- {Offset: 471040, Size: 472225 - 471040, FileId: "6,0b3e0650019c", Mtime: 130},
- {Offset: 77824, Size: 208896 - 77824, FileId: "4,0b3f0c7202f0", Mtime: 140},
- {Offset: 208896, Size: 339968 - 208896, FileId: "2,0b4031a72689", Mtime: 150},
- {Offset: 339968, Size: 471040 - 339968, FileId: "3,0b416a557362", Mtime: 160},
+ {Offset: 0, Size: 77824, FileId: "4,0b3df938e301", ModifiedTsNs: 123},
+ {Offset: 471040, Size: 472225 - 471040, FileId: "6,0b3e0650019c", ModifiedTsNs: 130},
+ {Offset: 77824, Size: 208896 - 77824, FileId: "4,0b3f0c7202f0", ModifiedTsNs: 140},
+ {Offset: 208896, Size: 339968 - 208896, FileId: "2,0b4031a72689", ModifiedTsNs: 150},
+ {Offset: 339968, Size: 471040 - 339968, FileId: "3,0b416a557362", ModifiedTsNs: 160},
},
Expected: []*VisibleInterval{
{start: 0, stop: 77824, fileId: "4,0b3df938e301"},
@@ -269,9 +269,9 @@ func TestChunksReading(t *testing.T) {
// case 0: normal
{
Chunks: []*filer_pb.FileChunk{
- {Offset: 0, Size: 100, FileId: "abc", Mtime: 123},
- {Offset: 100, Size: 100, FileId: "asdf", Mtime: 134},
- {Offset: 200, Size: 100, FileId: "fsad", Mtime: 353},
+ {Offset: 0, Size: 100, FileId: "abc", ModifiedTsNs: 123},
+ {Offset: 100, Size: 100, FileId: "asdf", ModifiedTsNs: 134},
+ {Offset: 200, Size: 100, FileId: "fsad", ModifiedTsNs: 353},
},
Offset: 0,
Size: 250,
@@ -284,8 +284,8 @@ func TestChunksReading(t *testing.T) {
// case 1: updates overwrite full chunks
{
Chunks: []*filer_pb.FileChunk{
- {Offset: 0, Size: 100, FileId: "abc", Mtime: 123},
- {Offset: 0, Size: 200, FileId: "asdf", Mtime: 134},
+ {Offset: 0, Size: 100, FileId: "abc", ModifiedTsNs: 123},
+ {Offset: 0, Size: 200, FileId: "asdf", ModifiedTsNs: 134},
},
Offset: 50,
Size: 100,
@@ -296,8 +296,8 @@ func TestChunksReading(t *testing.T) {
// case 2: updates overwrite part of previous chunks
{
Chunks: []*filer_pb.FileChunk{
- {Offset: 3, Size: 100, FileId: "a", Mtime: 123},
- {Offset: 10, Size: 50, FileId: "b", Mtime: 134},
+ {Offset: 3, Size: 100, FileId: "a", ModifiedTsNs: 123},
+ {Offset: 10, Size: 50, FileId: "b", ModifiedTsNs: 134},
},
Offset: 30,
Size: 40,
@@ -309,9 +309,9 @@ func TestChunksReading(t *testing.T) {
// case 3: updates overwrite full chunks
{
Chunks: []*filer_pb.FileChunk{
- {Offset: 0, Size: 100, FileId: "abc", Mtime: 123},
- {Offset: 0, Size: 200, FileId: "asdf", Mtime: 134},
- {Offset: 50, Size: 250, FileId: "xxxx", Mtime: 154},
+ {Offset: 0, Size: 100, FileId: "abc", ModifiedTsNs: 123},
+ {Offset: 0, Size: 200, FileId: "asdf", ModifiedTsNs: 134},
+ {Offset: 50, Size: 250, FileId: "xxxx", ModifiedTsNs: 154},
},
Offset: 0,
Size: 200,
@@ -323,9 +323,9 @@ func TestChunksReading(t *testing.T) {
// case 4: updates far away from prev chunks
{
Chunks: []*filer_pb.FileChunk{
- {Offset: 0, Size: 100, FileId: "abc", Mtime: 123},
- {Offset: 0, Size: 200, FileId: "asdf", Mtime: 134},
- {Offset: 250, Size: 250, FileId: "xxxx", Mtime: 154},
+ {Offset: 0, Size: 100, FileId: "abc", ModifiedTsNs: 123},
+ {Offset: 0, Size: 200, FileId: "asdf", ModifiedTsNs: 134},
+ {Offset: 250, Size: 250, FileId: "xxxx", ModifiedTsNs: 154},
},
Offset: 0,
Size: 400,
@@ -337,10 +337,10 @@ func TestChunksReading(t *testing.T) {
// case 5: updates overwrite full chunks
{
Chunks: []*filer_pb.FileChunk{
- {Offset: 0, Size: 100, FileId: "a", Mtime: 123},
- {Offset: 0, Size: 200, FileId: "c", Mtime: 184},
- {Offset: 70, Size: 150, FileId: "b", Mtime: 143},
- {Offset: 80, Size: 100, FileId: "xxxx", Mtime: 134},
+ {Offset: 0, Size: 100, FileId: "a", ModifiedTsNs: 123},
+ {Offset: 0, Size: 200, FileId: "c", ModifiedTsNs: 184},
+ {Offset: 70, Size: 150, FileId: "b", ModifiedTsNs: 143},
+ {Offset: 80, Size: 100, FileId: "xxxx", ModifiedTsNs: 134},
},
Offset: 0,
Size: 220,
@@ -352,9 +352,9 @@ func TestChunksReading(t *testing.T) {
// case 6: same updates
{
Chunks: []*filer_pb.FileChunk{
- {Offset: 0, Size: 100, FileId: "abc", Fid: &filer_pb.FileId{FileKey: 1}, Mtime: 123},
- {Offset: 0, Size: 100, FileId: "def", Fid: &filer_pb.FileId{FileKey: 2}, Mtime: 123},
- {Offset: 0, Size: 100, FileId: "xyz", Fid: &filer_pb.FileId{FileKey: 3}, Mtime: 123},
+ {Offset: 0, Size: 100, FileId: "abc", Fid: &filer_pb.FileId{FileKey: 1}, ModifiedTsNs: 123},
+ {Offset: 0, Size: 100, FileId: "def", Fid: &filer_pb.FileId{FileKey: 2}, ModifiedTsNs: 123},
+ {Offset: 0, Size: 100, FileId: "xyz", Fid: &filer_pb.FileId{FileKey: 3}, ModifiedTsNs: 123},
},
Offset: 0,
Size: 100,
@@ -365,9 +365,9 @@ func TestChunksReading(t *testing.T) {
// case 7: edge cases
{
Chunks: []*filer_pb.FileChunk{
- {Offset: 0, Size: 100, FileId: "abc", Mtime: 123},
- {Offset: 100, Size: 100, FileId: "asdf", Mtime: 134},
- {Offset: 200, Size: 100, FileId: "fsad", Mtime: 353},
+ {Offset: 0, Size: 100, FileId: "abc", ModifiedTsNs: 123},
+ {Offset: 100, Size: 100, FileId: "asdf", ModifiedTsNs: 134},
+ {Offset: 200, Size: 100, FileId: "fsad", ModifiedTsNs: 353},
},
Offset: 0,
Size: 200,
@@ -379,9 +379,9 @@ func TestChunksReading(t *testing.T) {
// case 8: edge cases
{
Chunks: []*filer_pb.FileChunk{
- {Offset: 0, Size: 100, FileId: "abc", Mtime: 123},
- {Offset: 90, Size: 200, FileId: "asdf", Mtime: 134},
- {Offset: 190, Size: 300, FileId: "fsad", Mtime: 353},
+ {Offset: 0, Size: 100, FileId: "abc", ModifiedTsNs: 123},
+ {Offset: 90, Size: 200, FileId: "asdf", ModifiedTsNs: 134},
+ {Offset: 190, Size: 300, FileId: "fsad", ModifiedTsNs: 353},
},
Offset: 0,
Size: 300,
@@ -394,12 +394,12 @@ func TestChunksReading(t *testing.T) {
// case 9: edge cases
{
Chunks: []*filer_pb.FileChunk{
- {Offset: 0, Size: 43175947, FileId: "2,111fc2cbfac1", Mtime: 1},
- {Offset: 43175936, Size: 52981771 - 43175936, FileId: "2,112a36ea7f85", Mtime: 2},
- {Offset: 52981760, Size: 72564747 - 52981760, FileId: "4,112d5f31c5e7", Mtime: 3},
- {Offset: 72564736, Size: 133255179 - 72564736, FileId: "1,113245f0cdb6", Mtime: 4},
- {Offset: 133255168, Size: 137269259 - 133255168, FileId: "3,1141a70733b5", Mtime: 5},
- {Offset: 137269248, Size: 153578836 - 137269248, FileId: "1,114201d5bbdb", Mtime: 6},
+ {Offset: 0, Size: 43175947, FileId: "2,111fc2cbfac1", ModifiedTsNs: 1},
+ {Offset: 43175936, Size: 52981771 - 43175936, FileId: "2,112a36ea7f85", ModifiedTsNs: 2},
+ {Offset: 52981760, Size: 72564747 - 52981760, FileId: "4,112d5f31c5e7", ModifiedTsNs: 3},
+ {Offset: 72564736, Size: 133255179 - 72564736, FileId: "1,113245f0cdb6", ModifiedTsNs: 4},
+ {Offset: 133255168, Size: 137269259 - 133255168, FileId: "3,1141a70733b5", ModifiedTsNs: 5},
+ {Offset: 137269248, Size: 153578836 - 137269248, FileId: "1,114201d5bbdb", ModifiedTsNs: 6},
},
Offset: 0,
Size: 153578836,
@@ -455,10 +455,10 @@ func BenchmarkCompactFileChunks(b *testing.B) {
for n := 0; n < k; n++ {
chunks = append(chunks, &filer_pb.FileChunk{
- Offset: int64(n * 100), Size: 100, FileId: fmt.Sprintf("fileId%d", n), Mtime: int64(n),
+ Offset: int64(n * 100), Size: 100, FileId: fmt.Sprintf("fileId%d", n), ModifiedTsNs: int64(n),
})
chunks = append(chunks, &filer_pb.FileChunk{
- Offset: int64(n * 50), Size: 100, FileId: fmt.Sprintf("fileId%d", n+k), Mtime: int64(n + k),
+ Offset: int64(n * 50), Size: 100, FileId: fmt.Sprintf("fileId%d", n+k), ModifiedTsNs: int64(n + k),
})
}
diff --git a/weed/filer/filer_notify_test.go b/weed/filer/filer_notify_test.go
index b85b4c410..9ad58629a 100644
--- a/weed/filer/filer_notify_test.go
+++ b/weed/filer/filer_notify_test.go
@@ -26,7 +26,7 @@ func TestProtoMarshal(t *testing.T) {
FileId: "234,2423423422",
Offset: 234234,
Size: 234,
- Mtime: 12312423,
+ ModifiedTsNs: 12312423,
ETag: "2342342354",
SourceFileId: "23234,2342342342",
},