mirror of
https://github.com/seaweedfs/seaweedfs.git
synced 2025-09-24 11:25:01 +08:00
refactor filer proto chunk variable from mtime to modified_ts_ns
This commit is contained in:
@@ -17,14 +17,14 @@ func TestDoMinusChunks(t *testing.T) {
|
||||
// clusterA append a new line and then clusterB also append a new line
|
||||
// clusterA append a new line again
|
||||
chunksInA := []*filer_pb.FileChunk{
|
||||
{Offset: 0, Size: 3, FileId: "11", Mtime: 100},
|
||||
{Offset: 3, Size: 3, FileId: "22", SourceFileId: "2", Mtime: 200},
|
||||
{Offset: 6, Size: 3, FileId: "33", Mtime: 300},
|
||||
{Offset: 0, Size: 3, FileId: "11", ModifiedTsNs: 100},
|
||||
{Offset: 3, Size: 3, FileId: "22", SourceFileId: "2", ModifiedTsNs: 200},
|
||||
{Offset: 6, Size: 3, FileId: "33", ModifiedTsNs: 300},
|
||||
}
|
||||
chunksInB := []*filer_pb.FileChunk{
|
||||
{Offset: 0, Size: 3, FileId: "1", SourceFileId: "11", Mtime: 100},
|
||||
{Offset: 3, Size: 3, FileId: "2", Mtime: 200},
|
||||
{Offset: 6, Size: 3, FileId: "3", SourceFileId: "33", Mtime: 300},
|
||||
{Offset: 0, Size: 3, FileId: "1", SourceFileId: "11", ModifiedTsNs: 100},
|
||||
{Offset: 3, Size: 3, FileId: "2", ModifiedTsNs: 200},
|
||||
{Offset: 6, Size: 3, FileId: "3", SourceFileId: "33", ModifiedTsNs: 300},
|
||||
}
|
||||
|
||||
// clusterB using command "echo 'content' > hello.txt" to overwrite file
|
||||
@@ -50,17 +50,17 @@ func TestDoMinusChunks(t *testing.T) {
|
||||
func TestCompactFileChunksRealCase(t *testing.T) {
|
||||
|
||||
chunks := []*filer_pb.FileChunk{
|
||||
{FileId: "2,512f31f2c0700a", Offset: 0, Size: 25 - 0, Mtime: 5320497},
|
||||
{FileId: "6,512f2c2e24e9e8", Offset: 868352, Size: 917585 - 868352, Mtime: 5320492},
|
||||
{FileId: "7,514468dd5954ca", Offset: 884736, Size: 901120 - 884736, Mtime: 5325928},
|
||||
{FileId: "5,5144463173fe77", Offset: 917504, Size: 2297856 - 917504, Mtime: 5325894},
|
||||
{FileId: "4,51444c7ab54e2d", Offset: 2301952, Size: 2367488 - 2301952, Mtime: 5325900},
|
||||
{FileId: "4,514450e643ad22", Offset: 2371584, Size: 2420736 - 2371584, Mtime: 5325904},
|
||||
{FileId: "6,514456a5e9e4d7", Offset: 2449408, Size: 2490368 - 2449408, Mtime: 5325910},
|
||||
{FileId: "3,51444f8d53eebe", Offset: 2494464, Size: 2555904 - 2494464, Mtime: 5325903},
|
||||
{FileId: "4,5144578b097c7e", Offset: 2560000, Size: 2596864 - 2560000, Mtime: 5325911},
|
||||
{FileId: "3,51445500b6b4ac", Offset: 2637824, Size: 2678784 - 2637824, Mtime: 5325909},
|
||||
{FileId: "1,51446285e52a61", Offset: 2695168, Size: 2715648 - 2695168, Mtime: 5325922},
|
||||
{FileId: "2,512f31f2c0700a", Offset: 0, Size: 25 - 0, ModifiedTsNs: 5320497},
|
||||
{FileId: "6,512f2c2e24e9e8", Offset: 868352, Size: 917585 - 868352, ModifiedTsNs: 5320492},
|
||||
{FileId: "7,514468dd5954ca", Offset: 884736, Size: 901120 - 884736, ModifiedTsNs: 5325928},
|
||||
{FileId: "5,5144463173fe77", Offset: 917504, Size: 2297856 - 917504, ModifiedTsNs: 5325894},
|
||||
{FileId: "4,51444c7ab54e2d", Offset: 2301952, Size: 2367488 - 2301952, ModifiedTsNs: 5325900},
|
||||
{FileId: "4,514450e643ad22", Offset: 2371584, Size: 2420736 - 2371584, ModifiedTsNs: 5325904},
|
||||
{FileId: "6,514456a5e9e4d7", Offset: 2449408, Size: 2490368 - 2449408, ModifiedTsNs: 5325910},
|
||||
{FileId: "3,51444f8d53eebe", Offset: 2494464, Size: 2555904 - 2494464, ModifiedTsNs: 5325903},
|
||||
{FileId: "4,5144578b097c7e", Offset: 2560000, Size: 2596864 - 2560000, ModifiedTsNs: 5325911},
|
||||
{FileId: "3,51445500b6b4ac", Offset: 2637824, Size: 2678784 - 2637824, ModifiedTsNs: 5325909},
|
||||
{FileId: "1,51446285e52a61", Offset: 2695168, Size: 2715648 - 2695168, ModifiedTsNs: 5325922},
|
||||
}
|
||||
|
||||
printChunks("before", chunks)
|
||||
@@ -75,7 +75,7 @@ func TestCompactFileChunksRealCase(t *testing.T) {
|
||||
func printChunks(name string, chunks []*filer_pb.FileChunk) {
|
||||
slices.SortFunc(chunks, func(a, b *filer_pb.FileChunk) bool {
|
||||
if a.Offset == b.Offset {
|
||||
return a.Mtime < b.Mtime
|
||||
return a.ModifiedTsNs < b.ModifiedTsNs
|
||||
}
|
||||
return a.Offset < b.Offset
|
||||
})
|
||||
|
Reference in New Issue
Block a user