Merge branch 'master' into support_ssd_volume

This commit is contained in:
Chris Lu
2020-12-22 17:44:52 -08:00
24 changed files with 739 additions and 301 deletions

View File

@@ -16,8 +16,9 @@ func init() {
}
type CassandraStore struct {
cluster *gocql.ClusterConfig
session *gocql.Session
cluster *gocql.ClusterConfig
session *gocql.Session
superLargeDirectoryHash map[string]string
}
func (store *CassandraStore) GetName() string {
@@ -30,10 +31,16 @@ func (store *CassandraStore) Initialize(configuration util.Configuration, prefix
configuration.GetStringSlice(prefix+"hosts"),
configuration.GetString(prefix+"username"),
configuration.GetString(prefix+"password"),
configuration.GetStringSlice(prefix+"superLargeDirectories"),
)
}
func (store *CassandraStore) initialize(keyspace string, hosts []string, username string, password string) (err error) {
func (store *CassandraStore) isSuperLargeDirectory(dir string) (dirHash string, isSuperLargeDirectory bool) {
dirHash, isSuperLargeDirectory = store.superLargeDirectoryHash[dir]
return
}
func (store *CassandraStore) initialize(keyspace string, hosts []string, username string, password string, superLargeDirectories []string) (err error) {
store.cluster = gocql.NewCluster(hosts...)
if username != "" && password != "" {
store.cluster.Authenticator = gocql.PasswordAuthenticator{Username: username, Password: password}
@@ -44,6 +51,19 @@ func (store *CassandraStore) initialize(keyspace string, hosts []string, usernam
if err != nil {
glog.V(0).Infof("Failed to open cassandra store, hosts %v, keyspace %s", hosts, keyspace)
}
// set directory hash
store.superLargeDirectoryHash = make(map[string]string)
existingHash := make(map[string]string)
for _, dir := range superLargeDirectories {
// adding dir hash to avoid duplicated names
dirHash := util.Md5String([]byte(dir))[:4]
store.superLargeDirectoryHash[dir] = dirHash
if existingDir, found := existingHash[dirHash]; found {
glog.Fatalf("directory %s has the same hash as %s", dir, existingDir)
}
existingHash[dirHash] = dir
}
return
}
@@ -60,6 +80,10 @@ func (store *CassandraStore) RollbackTransaction(ctx context.Context) error {
func (store *CassandraStore) InsertEntry(ctx context.Context, entry *filer.Entry) (err error) {
dir, name := entry.FullPath.DirAndName()
if dirHash, ok := store.isSuperLargeDirectory(dir); ok {
dir, name = dirHash+name, ""
}
meta, err := entry.EncodeAttributesAndChunks()
if err != nil {
return fmt.Errorf("encode %s: %s", entry.FullPath, err)
@@ -86,6 +110,10 @@ func (store *CassandraStore) UpdateEntry(ctx context.Context, entry *filer.Entry
func (store *CassandraStore) FindEntry(ctx context.Context, fullpath util.FullPath) (entry *filer.Entry, err error) {
dir, name := fullpath.DirAndName()
if dirHash, ok := store.isSuperLargeDirectory(dir); ok {
dir, name = dirHash+name, ""
}
var data []byte
if err := store.session.Query(
"SELECT meta FROM filemeta WHERE directory=? AND name=?",
@@ -113,6 +141,9 @@ func (store *CassandraStore) FindEntry(ctx context.Context, fullpath util.FullPa
func (store *CassandraStore) DeleteEntry(ctx context.Context, fullpath util.FullPath) error {
dir, name := fullpath.DirAndName()
if dirHash, ok := store.isSuperLargeDirectory(dir); ok {
dir, name = dirHash+name, ""
}
if err := store.session.Query(
"DELETE FROM filemeta WHERE directory=? AND name=?",
@@ -124,6 +155,9 @@ func (store *CassandraStore) DeleteEntry(ctx context.Context, fullpath util.Full
}
func (store *CassandraStore) DeleteFolderChildren(ctx context.Context, fullpath util.FullPath) error {
if _, ok := store.isSuperLargeDirectory(string(fullpath)); ok {
return nil // filer.ErrUnsupportedSuperLargeDirectoryListing
}
if err := store.session.Query(
"DELETE FROM filemeta WHERE directory=?",
@@ -141,6 +175,10 @@ func (store *CassandraStore) ListDirectoryPrefixedEntries(ctx context.Context, f
func (store *CassandraStore) ListDirectoryEntries(ctx context.Context, fullpath util.FullPath, startFileName string, inclusive bool,
limit int) (entries []*filer.Entry, err error) {
if _, ok := store.isSuperLargeDirectory(string(fullpath)); ok {
return // nil, filer.ErrUnsupportedSuperLargeDirectoryListing
}
cqlStr := "SELECT NAME, meta FROM filemeta WHERE directory=? AND name>? ORDER BY NAME ASC LIMIT ?"
if inclusive {
cqlStr = "SELECT NAME, meta FROM filemeta WHERE directory=? AND name>=? ORDER BY NAME ASC LIMIT ?"

View File

@@ -1,10 +1,11 @@
package filer
import (
"os"
"github.com/chrislusf/seaweedfs/weed/glog"
"github.com/spf13/viper"
"os"
"reflect"
"strings"
)
var (
@@ -15,25 +16,67 @@ func (f *Filer) LoadConfiguration(config *viper.Viper) {
validateOneEnabledStore(config)
// load configuration for default filer store
hasDefaultStoreConfigured := false
for _, store := range Stores {
if config.GetBool(store.GetName() + ".enabled") {
store = reflect.New(reflect.ValueOf(store).Elem().Type()).Interface().(FilerStore)
if err := store.Initialize(config, store.GetName()+"."); err != nil {
glog.Fatalf("Failed to initialize store for %s: %+v",
store.GetName(), err)
glog.Fatalf("failed to initialize store for %s: %+v", store.GetName(), err)
}
f.SetStore(store)
glog.V(0).Infof("Configure filer for %s", store.GetName())
return
glog.V(0).Infof("configured filer store to %s", store.GetName())
hasDefaultStoreConfigured = true
break
}
}
println()
println("Supported filer stores are:")
for _, store := range Stores {
println(" " + store.GetName())
if !hasDefaultStoreConfigured {
println()
println("Supported filer stores are:")
for _, store := range Stores {
println(" " + store.GetName())
}
os.Exit(-1)
}
// load path-specific filer store here
// f.Store.AddPathSpecificStore(path, store)
storeNames := make(map[string]FilerStore)
for _, store := range Stores {
storeNames[store.GetName()] = store
}
allKeys := config.AllKeys()
for _, key := range allKeys {
if !strings.HasSuffix(key, ".enabled") {
continue
}
key = key[:len(key)-len(".enabled")]
if !strings.Contains(key, ".") {
continue
}
parts := strings.Split(key, ".")
storeName, storeId := parts[0], parts[1]
store, found := storeNames[storeName]
if !found {
continue
}
store = reflect.New(reflect.ValueOf(store).Elem().Type()).Interface().(FilerStore)
if err := store.Initialize(config, key+"."); err != nil {
glog.Fatalf("Failed to initialize store for %s: %+v", key, err)
}
location := config.GetString(key + ".location")
if location == "" {
glog.Errorf("path-specific filer store needs %s", key+".location")
os.Exit(-1)
}
f.Store.AddPathSpecificStore(location, storeId, store)
glog.V(0).Infof("configure filer %s for %s", store.GetName(), location)
}
os.Exit(-1)
}
func validateOneEnabledStore(config *viper.Viper) {

View File

@@ -3,19 +3,14 @@ package filer
import (
"context"
"errors"
"github.com/chrislusf/seaweedfs/weed/glog"
"strings"
"time"
"github.com/chrislusf/seaweedfs/weed/pb/filer_pb"
"github.com/chrislusf/seaweedfs/weed/stats"
"github.com/chrislusf/seaweedfs/weed/util"
)
var (
ErrUnsupportedListDirectoryPrefixed = errors.New("unsupported directory prefix listing")
ErrKvNotImplemented = errors.New("kv not implemented yet")
ErrKvNotFound = errors.New("kv: not found")
ErrUnsupportedListDirectoryPrefixed = errors.New("unsupported directory prefix listing")
ErrUnsupportedSuperLargeDirectoryListing = errors.New("unsupported super large directory listing")
ErrKvNotImplemented = errors.New("kv not implemented yet")
ErrKvNotFound = errors.New("kv: not found")
)
type FilerStore interface {
@@ -42,243 +37,3 @@ type FilerStore interface {
Shutdown()
}
type VirtualFilerStore interface {
FilerStore
DeleteHardLink(ctx context.Context, hardLinkId HardLinkId) error
DeleteOneEntry(ctx context.Context, entry *Entry) error
}
type FilerStoreWrapper struct {
ActualStore FilerStore
}
func NewFilerStoreWrapper(store FilerStore) *FilerStoreWrapper {
if innerStore, ok := store.(*FilerStoreWrapper); ok {
return innerStore
}
return &FilerStoreWrapper{
ActualStore: store,
}
}
func (fsw *FilerStoreWrapper) GetName() string {
return fsw.ActualStore.GetName()
}
func (fsw *FilerStoreWrapper) Initialize(configuration util.Configuration, prefix string) error {
return fsw.ActualStore.Initialize(configuration, prefix)
}
func (fsw *FilerStoreWrapper) InsertEntry(ctx context.Context, entry *Entry) error {
stats.FilerStoreCounter.WithLabelValues(fsw.ActualStore.GetName(), "insert").Inc()
start := time.Now()
defer func() {
stats.FilerStoreHistogram.WithLabelValues(fsw.ActualStore.GetName(), "insert").Observe(time.Since(start).Seconds())
}()
filer_pb.BeforeEntrySerialization(entry.Chunks)
if entry.Mime == "application/octet-stream" {
entry.Mime = ""
}
if err := fsw.handleUpdateToHardLinks(ctx, entry); err != nil {
return err
}
glog.V(4).Infof("InsertEntry %s", entry.FullPath)
return fsw.ActualStore.InsertEntry(ctx, entry)
}
func (fsw *FilerStoreWrapper) UpdateEntry(ctx context.Context, entry *Entry) error {
stats.FilerStoreCounter.WithLabelValues(fsw.ActualStore.GetName(), "update").Inc()
start := time.Now()
defer func() {
stats.FilerStoreHistogram.WithLabelValues(fsw.ActualStore.GetName(), "update").Observe(time.Since(start).Seconds())
}()
filer_pb.BeforeEntrySerialization(entry.Chunks)
if entry.Mime == "application/octet-stream" {
entry.Mime = ""
}
if err := fsw.handleUpdateToHardLinks(ctx, entry); err != nil {
return err
}
glog.V(4).Infof("UpdateEntry %s", entry.FullPath)
return fsw.ActualStore.UpdateEntry(ctx, entry)
}
func (fsw *FilerStoreWrapper) FindEntry(ctx context.Context, fp util.FullPath) (entry *Entry, err error) {
stats.FilerStoreCounter.WithLabelValues(fsw.ActualStore.GetName(), "find").Inc()
start := time.Now()
defer func() {
stats.FilerStoreHistogram.WithLabelValues(fsw.ActualStore.GetName(), "find").Observe(time.Since(start).Seconds())
}()
glog.V(4).Infof("FindEntry %s", fp)
entry, err = fsw.ActualStore.FindEntry(ctx, fp)
if err != nil {
return nil, err
}
fsw.maybeReadHardLink(ctx, entry)
filer_pb.AfterEntryDeserialization(entry.Chunks)
return
}
func (fsw *FilerStoreWrapper) DeleteEntry(ctx context.Context, fp util.FullPath) (err error) {
stats.FilerStoreCounter.WithLabelValues(fsw.ActualStore.GetName(), "delete").Inc()
start := time.Now()
defer func() {
stats.FilerStoreHistogram.WithLabelValues(fsw.ActualStore.GetName(), "delete").Observe(time.Since(start).Seconds())
}()
existingEntry, findErr := fsw.FindEntry(ctx, fp)
if findErr == filer_pb.ErrNotFound {
return nil
}
if len(existingEntry.HardLinkId) != 0 {
// remove hard link
glog.V(4).Infof("DeleteHardLink %s", existingEntry.FullPath)
if err = fsw.DeleteHardLink(ctx, existingEntry.HardLinkId); err != nil {
return err
}
}
glog.V(4).Infof("DeleteEntry %s", fp)
return fsw.ActualStore.DeleteEntry(ctx, fp)
}
func (fsw *FilerStoreWrapper) DeleteOneEntry(ctx context.Context, existingEntry *Entry) (err error) {
stats.FilerStoreCounter.WithLabelValues(fsw.ActualStore.GetName(), "delete").Inc()
start := time.Now()
defer func() {
stats.FilerStoreHistogram.WithLabelValues(fsw.ActualStore.GetName(), "delete").Observe(time.Since(start).Seconds())
}()
if len(existingEntry.HardLinkId) != 0 {
// remove hard link
glog.V(4).Infof("DeleteHardLink %s", existingEntry.FullPath)
if err = fsw.DeleteHardLink(ctx, existingEntry.HardLinkId); err != nil {
return err
}
}
glog.V(4).Infof("DeleteOneEntry %s", existingEntry.FullPath)
return fsw.ActualStore.DeleteEntry(ctx, existingEntry.FullPath)
}
func (fsw *FilerStoreWrapper) DeleteFolderChildren(ctx context.Context, fp util.FullPath) (err error) {
stats.FilerStoreCounter.WithLabelValues(fsw.ActualStore.GetName(), "deleteFolderChildren").Inc()
start := time.Now()
defer func() {
stats.FilerStoreHistogram.WithLabelValues(fsw.ActualStore.GetName(), "deleteFolderChildren").Observe(time.Since(start).Seconds())
}()
glog.V(4).Infof("DeleteFolderChildren %s", fp)
return fsw.ActualStore.DeleteFolderChildren(ctx, fp)
}
func (fsw *FilerStoreWrapper) ListDirectoryEntries(ctx context.Context, dirPath util.FullPath, startFileName string, includeStartFile bool, limit int) ([]*Entry, error) {
stats.FilerStoreCounter.WithLabelValues(fsw.ActualStore.GetName(), "list").Inc()
start := time.Now()
defer func() {
stats.FilerStoreHistogram.WithLabelValues(fsw.ActualStore.GetName(), "list").Observe(time.Since(start).Seconds())
}()
glog.V(4).Infof("ListDirectoryEntries %s from %s limit %d", dirPath, startFileName, limit)
entries, err := fsw.ActualStore.ListDirectoryEntries(ctx, dirPath, startFileName, includeStartFile, limit)
if err != nil {
return nil, err
}
for _, entry := range entries {
fsw.maybeReadHardLink(ctx, entry)
filer_pb.AfterEntryDeserialization(entry.Chunks)
}
return entries, err
}
func (fsw *FilerStoreWrapper) ListDirectoryPrefixedEntries(ctx context.Context, dirPath util.FullPath, startFileName string, includeStartFile bool, limit int, prefix string) ([]*Entry, error) {
stats.FilerStoreCounter.WithLabelValues(fsw.ActualStore.GetName(), "prefixList").Inc()
start := time.Now()
defer func() {
stats.FilerStoreHistogram.WithLabelValues(fsw.ActualStore.GetName(), "prefixList").Observe(time.Since(start).Seconds())
}()
glog.V(4).Infof("ListDirectoryPrefixedEntries %s from %s prefix %s limit %d", dirPath, startFileName, prefix, limit)
entries, err := fsw.ActualStore.ListDirectoryPrefixedEntries(ctx, dirPath, startFileName, includeStartFile, limit, prefix)
if err == ErrUnsupportedListDirectoryPrefixed {
entries, err = fsw.prefixFilterEntries(ctx, dirPath, startFileName, includeStartFile, limit, prefix)
}
if err != nil {
return nil, err
}
for _, entry := range entries {
fsw.maybeReadHardLink(ctx, entry)
filer_pb.AfterEntryDeserialization(entry.Chunks)
}
return entries, nil
}
func (fsw *FilerStoreWrapper) prefixFilterEntries(ctx context.Context, dirPath util.FullPath, startFileName string, includeStartFile bool, limit int, prefix string) (entries []*Entry, err error) {
entries, err = fsw.ActualStore.ListDirectoryEntries(ctx, dirPath, startFileName, includeStartFile, limit)
if err != nil {
return nil, err
}
if prefix == "" {
return
}
count := 0
var lastFileName string
notPrefixed := entries
entries = nil
for count < limit && len(notPrefixed) > 0 {
for _, entry := range notPrefixed {
lastFileName = entry.Name()
if strings.HasPrefix(entry.Name(), prefix) {
count++
entries = append(entries, entry)
if count >= limit {
break
}
}
}
if count < limit {
notPrefixed, err = fsw.ActualStore.ListDirectoryEntries(ctx, dirPath, lastFileName, false, limit)
if err != nil {
return
}
}
}
return
}
func (fsw *FilerStoreWrapper) BeginTransaction(ctx context.Context) (context.Context, error) {
return fsw.ActualStore.BeginTransaction(ctx)
}
func (fsw *FilerStoreWrapper) CommitTransaction(ctx context.Context) error {
return fsw.ActualStore.CommitTransaction(ctx)
}
func (fsw *FilerStoreWrapper) RollbackTransaction(ctx context.Context) error {
return fsw.ActualStore.RollbackTransaction(ctx)
}
func (fsw *FilerStoreWrapper) Shutdown() {
fsw.ActualStore.Shutdown()
}
func (fsw *FilerStoreWrapper) KvPut(ctx context.Context, key []byte, value []byte) (err error) {
return fsw.ActualStore.KvPut(ctx, key, value)
}
func (fsw *FilerStoreWrapper) KvGet(ctx context.Context, key []byte) (value []byte, err error) {
return fsw.ActualStore.KvGet(ctx, key)
}
func (fsw *FilerStoreWrapper) KvDelete(ctx context.Context, key []byte) (err error) {
return fsw.ActualStore.KvDelete(ctx, key)
}

View File

@@ -19,7 +19,8 @@ func (fsw *FilerStoreWrapper) handleUpdateToHardLinks(ctx context.Context, entry
// check what is existing entry
glog.V(4).Infof("handleUpdateToHardLinks FindEntry %s", entry.FullPath)
existingEntry, err := fsw.ActualStore.FindEntry(ctx, entry.FullPath)
actualStore := fsw.getActualStore(entry.FullPath)
existingEntry, err := actualStore.FindEntry(ctx, entry.FullPath)
if err != nil && err != filer_pb.ErrNotFound {
return fmt.Errorf("update existing entry %s: %v", entry.FullPath, err)
}

View File

@@ -0,0 +1,161 @@
package filer
import (
"context"
"github.com/chrislusf/seaweedfs/weed/util"
"strings"
)
var (
_ = FilerStore(&FilerStorePathTranlator{})
)
type FilerStorePathTranlator struct {
actualStore FilerStore
storeRoot string
}
func NewFilerStorePathTranlator(storeRoot string, store FilerStore) *FilerStorePathTranlator {
if innerStore, ok := store.(*FilerStorePathTranlator); ok {
return innerStore
}
if !strings.HasSuffix(storeRoot, "/") {
storeRoot += "/"
}
return &FilerStorePathTranlator{
actualStore: store,
storeRoot: storeRoot,
}
}
func (t *FilerStorePathTranlator) translatePath(fp util.FullPath) (newPath util.FullPath) {
newPath = fp
if t.storeRoot == "/" {
return
}
newPath = fp[len(t.storeRoot)-1:]
if newPath == "" {
newPath = "/"
}
return
}
func (t *FilerStorePathTranlator) changeEntryPath(entry *Entry) (previousPath util.FullPath) {
previousPath = entry.FullPath
if t.storeRoot == "/" {
return
}
entry.FullPath = t.translatePath(previousPath)
return
}
func (t *FilerStorePathTranlator) recoverEntryPath(entry *Entry, previousPath util.FullPath) {
entry.FullPath = previousPath
}
func (t *FilerStorePathTranlator) GetName() string {
return t.actualStore.GetName()
}
func (t *FilerStorePathTranlator) Initialize(configuration util.Configuration, prefix string) error {
return t.actualStore.Initialize(configuration, prefix)
}
func (t *FilerStorePathTranlator) InsertEntry(ctx context.Context, entry *Entry) error {
previousPath := t.changeEntryPath(entry)
defer t.recoverEntryPath(entry, previousPath)
return t.actualStore.InsertEntry(ctx, entry)
}
func (t *FilerStorePathTranlator) UpdateEntry(ctx context.Context, entry *Entry) error {
previousPath := t.changeEntryPath(entry)
defer t.recoverEntryPath(entry, previousPath)
return t.actualStore.UpdateEntry(ctx, entry)
}
func (t *FilerStorePathTranlator) FindEntry(ctx context.Context, fp util.FullPath) (entry *Entry, err error) {
if t.storeRoot == "/" {
return t.actualStore.FindEntry(ctx, fp)
}
newFullPath := t.translatePath(fp)
entry, err = t.actualStore.FindEntry(ctx, newFullPath)
if err == nil {
entry.FullPath = fp[:len(t.storeRoot)-1] + entry.FullPath
}
return
}
func (t *FilerStorePathTranlator) DeleteEntry(ctx context.Context, fp util.FullPath) (err error) {
newFullPath := t.translatePath(fp)
return t.actualStore.DeleteEntry(ctx, newFullPath)
}
func (t *FilerStorePathTranlator) DeleteOneEntry(ctx context.Context, existingEntry *Entry) (err error) {
previousPath := t.changeEntryPath(existingEntry)
defer t.recoverEntryPath(existingEntry, previousPath)
return t.actualStore.DeleteEntry(ctx, existingEntry.FullPath)
}
func (t *FilerStorePathTranlator) DeleteFolderChildren(ctx context.Context, fp util.FullPath) (err error) {
newFullPath := t.translatePath(fp)
return t.actualStore.DeleteFolderChildren(ctx, newFullPath)
}
func (t *FilerStorePathTranlator) ListDirectoryEntries(ctx context.Context, dirPath util.FullPath, startFileName string, includeStartFile bool, limit int) ([]*Entry, error) {
newFullPath := t.translatePath(dirPath)
entries, err := t.actualStore.ListDirectoryEntries(ctx, newFullPath, startFileName, includeStartFile, limit)
if err != nil {
return nil, err
}
for _, entry := range entries {
entry.FullPath = dirPath[:len(t.storeRoot)-1] + entry.FullPath
}
return entries, err
}
func (t *FilerStorePathTranlator) ListDirectoryPrefixedEntries(ctx context.Context, dirPath util.FullPath, startFileName string, includeStartFile bool, limit int, prefix string) ([]*Entry, error) {
newFullPath := t.translatePath(dirPath)
entries, err := t.actualStore.ListDirectoryPrefixedEntries(ctx, newFullPath, startFileName, includeStartFile, limit, prefix)
if err != nil {
return nil, err
}
for _, entry := range entries {
entry.FullPath = dirPath[:len(t.storeRoot)-1] + entry.FullPath
}
return entries, nil
}
func (t *FilerStorePathTranlator) BeginTransaction(ctx context.Context) (context.Context, error) {
return t.actualStore.BeginTransaction(ctx)
}
func (t *FilerStorePathTranlator) CommitTransaction(ctx context.Context) error {
return t.actualStore.CommitTransaction(ctx)
}
func (t *FilerStorePathTranlator) RollbackTransaction(ctx context.Context) error {
return t.actualStore.RollbackTransaction(ctx)
}
func (t *FilerStorePathTranlator) Shutdown() {
t.actualStore.Shutdown()
}
func (t *FilerStorePathTranlator) KvPut(ctx context.Context, key []byte, value []byte) (err error) {
return t.actualStore.KvPut(ctx, key, value)
}
func (t *FilerStorePathTranlator) KvGet(ctx context.Context, key []byte) (value []byte, err error) {
return t.actualStore.KvGet(ctx, key)
}
func (t *FilerStorePathTranlator) KvDelete(ctx context.Context, key []byte) (err error) {
return t.actualStore.KvDelete(ctx, key)
}

View File

@@ -0,0 +1,299 @@
package filer
import (
"context"
"github.com/chrislusf/seaweedfs/weed/glog"
"github.com/viant/ptrie"
"strings"
"time"
"github.com/chrislusf/seaweedfs/weed/pb/filer_pb"
"github.com/chrislusf/seaweedfs/weed/stats"
"github.com/chrislusf/seaweedfs/weed/util"
)
var (
_ = VirtualFilerStore(&FilerStoreWrapper{})
)
type VirtualFilerStore interface {
FilerStore
DeleteHardLink(ctx context.Context, hardLinkId HardLinkId) error
DeleteOneEntry(ctx context.Context, entry *Entry) error
AddPathSpecificStore(path string, storeId string, store FilerStore)
}
type FilerStoreWrapper struct {
defaultStore FilerStore
pathToStore ptrie.Trie
storeIdToStore map[string]FilerStore
}
func NewFilerStoreWrapper(store FilerStore) *FilerStoreWrapper {
if innerStore, ok := store.(*FilerStoreWrapper); ok {
return innerStore
}
return &FilerStoreWrapper{
defaultStore: store,
pathToStore: ptrie.New(),
storeIdToStore: make(map[string]FilerStore),
}
}
func (fsw *FilerStoreWrapper) AddPathSpecificStore(path string, storeId string, store FilerStore) {
fsw.storeIdToStore[storeId] = NewFilerStorePathTranlator(path, store)
err := fsw.pathToStore.Put([]byte(path), storeId)
if err != nil {
glog.Fatalf("put path specific store: %v", err)
}
}
func (fsw *FilerStoreWrapper) getActualStore(path util.FullPath) (store FilerStore) {
store = fsw.defaultStore
if path == "/" {
return
}
var storeId string
fsw.pathToStore.MatchPrefix([]byte(path), func(key []byte, value interface{}) bool {
storeId = value.(string)
return false
})
if storeId != "" {
store = fsw.storeIdToStore[storeId]
}
return
}
func (fsw *FilerStoreWrapper) getDefaultStore() (store FilerStore) {
return fsw.defaultStore
}
func (fsw *FilerStoreWrapper) GetName() string {
return fsw.getDefaultStore().GetName()
}
func (fsw *FilerStoreWrapper) Initialize(configuration util.Configuration, prefix string) error {
return fsw.getDefaultStore().Initialize(configuration, prefix)
}
func (fsw *FilerStoreWrapper) InsertEntry(ctx context.Context, entry *Entry) error {
actualStore := fsw.getActualStore(entry.FullPath)
stats.FilerStoreCounter.WithLabelValues(actualStore.GetName(), "insert").Inc()
start := time.Now()
defer func() {
stats.FilerStoreHistogram.WithLabelValues(actualStore.GetName(), "insert").Observe(time.Since(start).Seconds())
}()
filer_pb.BeforeEntrySerialization(entry.Chunks)
if entry.Mime == "application/octet-stream" {
entry.Mime = ""
}
if err := fsw.handleUpdateToHardLinks(ctx, entry); err != nil {
return err
}
glog.V(4).Infof("InsertEntry %s", entry.FullPath)
return actualStore.InsertEntry(ctx, entry)
}
func (fsw *FilerStoreWrapper) UpdateEntry(ctx context.Context, entry *Entry) error {
actualStore := fsw.getActualStore(entry.FullPath)
stats.FilerStoreCounter.WithLabelValues(actualStore.GetName(), "update").Inc()
start := time.Now()
defer func() {
stats.FilerStoreHistogram.WithLabelValues(actualStore.GetName(), "update").Observe(time.Since(start).Seconds())
}()
filer_pb.BeforeEntrySerialization(entry.Chunks)
if entry.Mime == "application/octet-stream" {
entry.Mime = ""
}
if err := fsw.handleUpdateToHardLinks(ctx, entry); err != nil {
return err
}
glog.V(4).Infof("UpdateEntry %s", entry.FullPath)
return actualStore.UpdateEntry(ctx, entry)
}
func (fsw *FilerStoreWrapper) FindEntry(ctx context.Context, fp util.FullPath) (entry *Entry, err error) {
actualStore := fsw.getActualStore(fp)
stats.FilerStoreCounter.WithLabelValues(actualStore.GetName(), "find").Inc()
start := time.Now()
defer func() {
stats.FilerStoreHistogram.WithLabelValues(actualStore.GetName(), "find").Observe(time.Since(start).Seconds())
}()
glog.V(4).Infof("FindEntry %s", fp)
entry, err = actualStore.FindEntry(ctx, fp)
if err != nil {
return nil, err
}
fsw.maybeReadHardLink(ctx, entry)
filer_pb.AfterEntryDeserialization(entry.Chunks)
return
}
func (fsw *FilerStoreWrapper) DeleteEntry(ctx context.Context, fp util.FullPath) (err error) {
actualStore := fsw.getActualStore(fp)
stats.FilerStoreCounter.WithLabelValues(actualStore.GetName(), "delete").Inc()
start := time.Now()
defer func() {
stats.FilerStoreHistogram.WithLabelValues(actualStore.GetName(), "delete").Observe(time.Since(start).Seconds())
}()
existingEntry, findErr := fsw.FindEntry(ctx, fp)
if findErr == filer_pb.ErrNotFound {
return nil
}
if len(existingEntry.HardLinkId) != 0 {
// remove hard link
glog.V(4).Infof("DeleteHardLink %s", existingEntry.FullPath)
if err = fsw.DeleteHardLink(ctx, existingEntry.HardLinkId); err != nil {
return err
}
}
glog.V(4).Infof("DeleteEntry %s", fp)
return actualStore.DeleteEntry(ctx, fp)
}
func (fsw *FilerStoreWrapper) DeleteOneEntry(ctx context.Context, existingEntry *Entry) (err error) {
actualStore := fsw.getActualStore(existingEntry.FullPath)
stats.FilerStoreCounter.WithLabelValues(actualStore.GetName(), "delete").Inc()
start := time.Now()
defer func() {
stats.FilerStoreHistogram.WithLabelValues(actualStore.GetName(), "delete").Observe(time.Since(start).Seconds())
}()
if len(existingEntry.HardLinkId) != 0 {
// remove hard link
glog.V(4).Infof("DeleteHardLink %s", existingEntry.FullPath)
if err = fsw.DeleteHardLink(ctx, existingEntry.HardLinkId); err != nil {
return err
}
}
glog.V(4).Infof("DeleteOneEntry %s", existingEntry.FullPath)
return actualStore.DeleteEntry(ctx, existingEntry.FullPath)
}
func (fsw *FilerStoreWrapper) DeleteFolderChildren(ctx context.Context, fp util.FullPath) (err error) {
actualStore := fsw.getActualStore(fp + "/")
stats.FilerStoreCounter.WithLabelValues(actualStore.GetName(), "deleteFolderChildren").Inc()
start := time.Now()
defer func() {
stats.FilerStoreHistogram.WithLabelValues(actualStore.GetName(), "deleteFolderChildren").Observe(time.Since(start).Seconds())
}()
glog.V(4).Infof("DeleteFolderChildren %s", fp)
return actualStore.DeleteFolderChildren(ctx, fp)
}
func (fsw *FilerStoreWrapper) ListDirectoryEntries(ctx context.Context, dirPath util.FullPath, startFileName string, includeStartFile bool, limit int) ([]*Entry, error) {
actualStore := fsw.getActualStore(dirPath + "/")
stats.FilerStoreCounter.WithLabelValues(actualStore.GetName(), "list").Inc()
start := time.Now()
defer func() {
stats.FilerStoreHistogram.WithLabelValues(actualStore.GetName(), "list").Observe(time.Since(start).Seconds())
}()
glog.V(4).Infof("ListDirectoryEntries %s from %s limit %d", dirPath, startFileName, limit)
entries, err := actualStore.ListDirectoryEntries(ctx, dirPath, startFileName, includeStartFile, limit)
if err != nil {
return nil, err
}
for _, entry := range entries {
fsw.maybeReadHardLink(ctx, entry)
filer_pb.AfterEntryDeserialization(entry.Chunks)
}
return entries, err
}
func (fsw *FilerStoreWrapper) ListDirectoryPrefixedEntries(ctx context.Context, dirPath util.FullPath, startFileName string, includeStartFile bool, limit int, prefix string) ([]*Entry, error) {
actualStore := fsw.getActualStore(dirPath + "/")
stats.FilerStoreCounter.WithLabelValues(actualStore.GetName(), "prefixList").Inc()
start := time.Now()
defer func() {
stats.FilerStoreHistogram.WithLabelValues(actualStore.GetName(), "prefixList").Observe(time.Since(start).Seconds())
}()
glog.V(4).Infof("ListDirectoryPrefixedEntries %s from %s prefix %s limit %d", dirPath, startFileName, prefix, limit)
entries, err := actualStore.ListDirectoryPrefixedEntries(ctx, dirPath, startFileName, includeStartFile, limit, prefix)
if err == ErrUnsupportedListDirectoryPrefixed {
entries, err = fsw.prefixFilterEntries(ctx, dirPath, startFileName, includeStartFile, limit, prefix)
}
if err != nil {
return nil, err
}
for _, entry := range entries {
fsw.maybeReadHardLink(ctx, entry)
filer_pb.AfterEntryDeserialization(entry.Chunks)
}
return entries, nil
}
func (fsw *FilerStoreWrapper) prefixFilterEntries(ctx context.Context, dirPath util.FullPath, startFileName string, includeStartFile bool, limit int, prefix string) (entries []*Entry, err error) {
actualStore := fsw.getActualStore(dirPath + "/")
entries, err = actualStore.ListDirectoryEntries(ctx, dirPath, startFileName, includeStartFile, limit)
if err != nil {
return nil, err
}
if prefix == "" {
return
}
count := 0
var lastFileName string
notPrefixed := entries
entries = nil
for count < limit && len(notPrefixed) > 0 {
for _, entry := range notPrefixed {
lastFileName = entry.Name()
if strings.HasPrefix(entry.Name(), prefix) {
count++
entries = append(entries, entry)
if count >= limit {
break
}
}
}
if count < limit {
notPrefixed, err = actualStore.ListDirectoryEntries(ctx, dirPath, lastFileName, false, limit)
if err != nil {
return
}
}
}
return
}
func (fsw *FilerStoreWrapper) BeginTransaction(ctx context.Context) (context.Context, error) {
return fsw.getDefaultStore().BeginTransaction(ctx)
}
func (fsw *FilerStoreWrapper) CommitTransaction(ctx context.Context) error {
return fsw.getDefaultStore().CommitTransaction(ctx)
}
func (fsw *FilerStoreWrapper) RollbackTransaction(ctx context.Context) error {
return fsw.getDefaultStore().RollbackTransaction(ctx)
}
func (fsw *FilerStoreWrapper) Shutdown() {
fsw.getDefaultStore().Shutdown()
}
func (fsw *FilerStoreWrapper) KvPut(ctx context.Context, key []byte, value []byte) (err error) {
return fsw.getDefaultStore().KvPut(ctx, key, value)
}
func (fsw *FilerStoreWrapper) KvGet(ctx context.Context, key []byte) (value []byte, err error) {
return fsw.getDefaultStore().KvGet(ctx, key)
}
func (fsw *FilerStoreWrapper) KvDelete(ctx context.Context, key []byte) (err error) {
return fsw.getDefaultStore().KvDelete(ctx, key)
}

View File

@@ -28,15 +28,17 @@ func (store *RedisCluster2Store) Initialize(configuration util.Configuration, pr
configuration.GetString(prefix+"password"),
configuration.GetBool(prefix+"useReadOnly"),
configuration.GetBool(prefix+"routeByLatency"),
configuration.GetStringSlice(prefix+"superLargeDirectories"),
)
}
func (store *RedisCluster2Store) initialize(addresses []string, password string, readOnly, routeByLatency bool) (err error) {
func (store *RedisCluster2Store) initialize(addresses []string, password string, readOnly, routeByLatency bool, superLargeDirectories []string) (err error) {
store.Client = redis.NewClusterClient(&redis.ClusterOptions{
Addrs: addresses,
Password: password,
ReadOnly: readOnly,
RouteByLatency: routeByLatency,
})
store.loadSuperLargeDirectories(superLargeDirectories)
return
}

View File

@@ -23,14 +23,16 @@ func (store *Redis2Store) Initialize(configuration util.Configuration, prefix st
configuration.GetString(prefix+"address"),
configuration.GetString(prefix+"password"),
configuration.GetInt(prefix+"database"),
configuration.GetStringSlice(prefix+"superLargeDirectories"),
)
}
func (store *Redis2Store) initialize(hostPort string, password string, database int) (err error) {
func (store *Redis2Store) initialize(hostPort string, password string, database int, superLargeDirectories []string) (err error) {
store.Client = redis.NewClient(&redis.Options{
Addr: hostPort,
Password: password,
DB: database,
})
store.loadSuperLargeDirectories(superLargeDirectories)
return
}

View File

@@ -18,7 +18,21 @@ const (
)
type UniversalRedis2Store struct {
Client redis.UniversalClient
Client redis.UniversalClient
superLargeDirectoryHash map[string]bool
}
func (store *UniversalRedis2Store) isSuperLargeDirectory(dir string) (isSuperLargeDirectory bool) {
_, isSuperLargeDirectory = store.superLargeDirectoryHash[dir]
return
}
func (store *UniversalRedis2Store) loadSuperLargeDirectories(superLargeDirectories []string) {
// set directory hash
store.superLargeDirectoryHash = make(map[string]bool)
for _, dir := range superLargeDirectories {
store.superLargeDirectoryHash[dir] = true
}
}
func (store *UniversalRedis2Store) BeginTransaction(ctx context.Context) (context.Context, error) {
@@ -47,6 +61,10 @@ func (store *UniversalRedis2Store) InsertEntry(ctx context.Context, entry *filer
}
dir, name := entry.FullPath.DirAndName()
if store.isSuperLargeDirectory(dir) {
return nil
}
if name != "" {
if err = store.Client.ZAddNX(genDirectoryListKey(dir), redis.Z{Score: 0, Member: name}).Err(); err != nil {
return fmt.Errorf("persisting %s in parent dir: %v", entry.FullPath, err)
@@ -96,6 +114,9 @@ func (store *UniversalRedis2Store) DeleteEntry(ctx context.Context, fullpath uti
}
dir, name := fullpath.DirAndName()
if store.isSuperLargeDirectory(dir) {
return nil
}
if name != "" {
_, err = store.Client.ZRem(genDirectoryListKey(dir), name).Result()
if err != nil {
@@ -108,6 +129,10 @@ func (store *UniversalRedis2Store) DeleteEntry(ctx context.Context, fullpath uti
func (store *UniversalRedis2Store) DeleteFolderChildren(ctx context.Context, fullpath util.FullPath) (err error) {
if store.isSuperLargeDirectory(string(fullpath)) {
return nil
}
members, err := store.Client.ZRange(genDirectoryListKey(string(fullpath)), 0, -1).Result()
if err != nil {
return fmt.Errorf("DeleteFolderChildren %s : %v", fullpath, err)