From bc10296228fd5003e7c5cebbca6b2ee0e5a0fd09 Mon Sep 17 00:00:00 2001 From: Zijian Chen Date: Fri, 24 Jun 2022 13:04:23 -0700 Subject: [PATCH] Change dynamic client interface --- common/constants.go | 9 + common/dynamicconfig/clientInterface.go | 22 +- common/dynamicconfig/clientInterface_mock.go | 64 +- common/dynamicconfig/config.go | 89 +- common/dynamicconfig/config_benchmark_test.go | 2 +- common/dynamicconfig/config_test.go | 36 +- .../configstore/config_store_client.go | 103 +- .../configstore/config_store_client_test.go | 74 +- common/dynamicconfig/constants.go | 2587 ++++++++--------- common/dynamicconfig/file_based_client.go | 45 +- .../dynamicconfig/file_based_client_test.go | 57 +- common/dynamicconfig/inMemoryClient.go | 45 +- common/dynamicconfig/nopClient.go | 40 +- common/log/loggerimpl/logger_test.go | 2 +- common/persistence/serializer_test.go | 2 +- common/task/parallelTaskProcessor_test.go | 7 +- host/dynamicconfig.go | 38 +- service/frontend/adminHandler.go | 7 +- service/frontend/adminHandler_test.go | 13 +- service/frontend/workflowHandler_test.go | 8 +- service/history/config/config.go | 84 +- .../history/execution/history_builder_test.go | 2 +- .../history/task/priority_assigner_test.go | 53 +- service/history/testing/events_util.go | 3 +- service/worker/service.go | 2 +- tools/cli/adminConfigStoreCommands.go | 4 +- tools/cli/domainUtils.go | 2 +- 27 files changed, 1550 insertions(+), 1850 deletions(-) diff --git a/common/constants.go b/common/constants.go index 5b8d725b5af..019dbe6dc4c 100644 --- a/common/constants.go +++ b/common/constants.go @@ -235,14 +235,18 @@ const ( // NoPriority is the value returned if no priority is ever assigned to the task NoPriority = -1 +) +const ( // HighPriorityClass is the priority class for high priority tasks HighPriorityClass = iota << numBitsPerLevel // DefaultPriorityClass is the priority class for default priority tasks DefaultPriorityClass // LowPriorityClass is the priority class for low priority tasks LowPriorityClass +) +const ( // HighPrioritySubclass is the priority subclass for high priority tasks HighPrioritySubclass = iota // DefaultPrioritySubclass is the priority subclass for high priority tasks @@ -250,3 +254,8 @@ const ( // LowPrioritySubclass is the priority subclass for high priority tasks LowPrioritySubclass ) + +const ( + // DefaultHistoryMaxAutoResetPoints is the default maximum number for auto reset points + DefaultHistoryMaxAutoResetPoints = 20 +) diff --git a/common/dynamicconfig/clientInterface.go b/common/dynamicconfig/clientInterface.go index 63389953f46..3b8a8e4054d 100644 --- a/common/dynamicconfig/clientInterface.go +++ b/common/dynamicconfig/clientInterface.go @@ -38,19 +38,15 @@ const ( // Client allows fetching values from a dynamic configuration system NOTE: This does not have async // options right now. In the interest of keeping it minimal, we can add when requirement arises. type Client interface { - GetValue(name Key, defaultValue interface{}) (interface{}, error) - GetValueWithFilters(name Key, filters map[Filter]interface{}, defaultValue interface{}) (interface{}, error) - - GetIntValue(name IntKey, filters map[Filter]interface{}, defaultValue int) (int, error) - GetFloatValue(name FloatKey, filters map[Filter]interface{}, defaultValue float64) (float64, error) - GetBoolValue(name BoolKey, filters map[Filter]interface{}, defaultValue bool) (bool, error) - GetStringValue(name StringKey, filters map[Filter]interface{}, defaultValue string) (string, error) - GetMapValue( - name MapKey, filters map[Filter]interface{}, defaultValue map[string]interface{}, - ) (map[string]interface{}, error) - GetDurationValue( - name DurationKey, filters map[Filter]interface{}, defaultValue time.Duration, - ) (time.Duration, error) + GetValue(name Key) (interface{}, error) + GetValueWithFilters(name Key, filters map[Filter]interface{}) (interface{}, error) + + GetIntValue(name IntKey, filters map[Filter]interface{}) (int, error) + GetFloatValue(name FloatKey, filters map[Filter]interface{}) (float64, error) + GetBoolValue(name BoolKey, filters map[Filter]interface{}) (bool, error) + GetStringValue(name StringKey, filters map[Filter]interface{}) (string, error) + GetMapValue(name MapKey, filters map[Filter]interface{}) (map[string]interface{}, error) + GetDurationValue(name DurationKey, filters map[Filter]interface{}) (time.Duration, error) // UpdateValue takes value as map and updates by overriding. It doesn't support update with filters. UpdateValue(name Key, value interface{}) error RestoreValue(name Key, filters map[Filter]interface{}) error diff --git a/common/dynamicconfig/clientInterface_mock.go b/common/dynamicconfig/clientInterface_mock.go index 12d1198db06..99a69fef0d4 100644 --- a/common/dynamicconfig/clientInterface_mock.go +++ b/common/dynamicconfig/clientInterface_mock.go @@ -58,123 +58,123 @@ func (m *MockClient) EXPECT() *MockClientMockRecorder { } // GetBoolValue mocks base method. -func (m *MockClient) GetBoolValue(name Key, filters map[Filter]interface{}, defaultValue bool) (bool, error) { +func (m *MockClient) GetBoolValue(name BoolKey, filters map[Filter]interface{}) (bool, error) { m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetBoolValue", name, filters, defaultValue) + ret := m.ctrl.Call(m, "GetBoolValue", name, filters) ret0, _ := ret[0].(bool) ret1, _ := ret[1].(error) return ret0, ret1 } // GetBoolValue indicates an expected call of GetBoolValue. -func (mr *MockClientMockRecorder) GetBoolValue(name, filters, defaultValue interface{}) *gomock.Call { +func (mr *MockClientMockRecorder) GetBoolValue(name, filters interface{}) *gomock.Call { mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetBoolValue", reflect.TypeOf((*MockClient)(nil).GetBoolValue), name, filters, defaultValue) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetBoolValue", reflect.TypeOf((*MockClient)(nil).GetBoolValue), name, filters) } // GetDurationValue mocks base method. -func (m *MockClient) GetDurationValue(name Key, filters map[Filter]interface{}, defaultValue time.Duration) (time.Duration, error) { +func (m *MockClient) GetDurationValue(name DurationKey, filters map[Filter]interface{}) (time.Duration, error) { m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetDurationValue", name, filters, defaultValue) + ret := m.ctrl.Call(m, "GetDurationValue", name, filters) ret0, _ := ret[0].(time.Duration) ret1, _ := ret[1].(error) return ret0, ret1 } // GetDurationValue indicates an expected call of GetDurationValue. -func (mr *MockClientMockRecorder) GetDurationValue(name, filters, defaultValue interface{}) *gomock.Call { +func (mr *MockClientMockRecorder) GetDurationValue(name, filters interface{}) *gomock.Call { mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetDurationValue", reflect.TypeOf((*MockClient)(nil).GetDurationValue), name, filters, defaultValue) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetDurationValue", reflect.TypeOf((*MockClient)(nil).GetDurationValue), name, filters) } // GetFloatValue mocks base method. -func (m *MockClient) GetFloatValue(name Key, filters map[Filter]interface{}, defaultValue float64) (float64, error) { +func (m *MockClient) GetFloatValue(name FloatKey, filters map[Filter]interface{}) (float64, error) { m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetFloatValue", name, filters, defaultValue) + ret := m.ctrl.Call(m, "GetFloatValue", name, filters) ret0, _ := ret[0].(float64) ret1, _ := ret[1].(error) return ret0, ret1 } // GetFloatValue indicates an expected call of GetFloatValue. -func (mr *MockClientMockRecorder) GetFloatValue(name, filters, defaultValue interface{}) *gomock.Call { +func (mr *MockClientMockRecorder) GetFloatValue(name, filters interface{}) *gomock.Call { mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetFloatValue", reflect.TypeOf((*MockClient)(nil).GetFloatValue), name, filters, defaultValue) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetFloatValue", reflect.TypeOf((*MockClient)(nil).GetFloatValue), name, filters) } // GetIntValue mocks base method. -func (m *MockClient) GetIntValue(name Key, filters map[Filter]interface{}, defaultValue int) (int, error) { +func (m *MockClient) GetIntValue(name IntKey, filters map[Filter]interface{}) (int, error) { m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetIntValue", name, filters, defaultValue) + ret := m.ctrl.Call(m, "GetIntValue", name, filters) ret0, _ := ret[0].(int) ret1, _ := ret[1].(error) return ret0, ret1 } // GetIntValue indicates an expected call of GetIntValue. -func (mr *MockClientMockRecorder) GetIntValue(name, filters, defaultValue interface{}) *gomock.Call { +func (mr *MockClientMockRecorder) GetIntValue(name, filters interface{}) *gomock.Call { mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetIntValue", reflect.TypeOf((*MockClient)(nil).GetIntValue), name, filters, defaultValue) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetIntValue", reflect.TypeOf((*MockClient)(nil).GetIntValue), name, filters) } // GetMapValue mocks base method. -func (m *MockClient) GetMapValue(name Key, filters map[Filter]interface{}, defaultValue map[string]interface{}) (map[string]interface{}, error) { +func (m *MockClient) GetMapValue(name MapKey, filters map[Filter]interface{}) (map[string]interface{}, error) { m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetMapValue", name, filters, defaultValue) + ret := m.ctrl.Call(m, "GetMapValue", name, filters) ret0, _ := ret[0].(map[string]interface{}) ret1, _ := ret[1].(error) return ret0, ret1 } // GetMapValue indicates an expected call of GetMapValue. -func (mr *MockClientMockRecorder) GetMapValue(name, filters, defaultValue interface{}) *gomock.Call { +func (mr *MockClientMockRecorder) GetMapValue(name, filters interface{}) *gomock.Call { mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetMapValue", reflect.TypeOf((*MockClient)(nil).GetMapValue), name, filters, defaultValue) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetMapValue", reflect.TypeOf((*MockClient)(nil).GetMapValue), name, filters) } // GetStringValue mocks base method. -func (m *MockClient) GetStringValue(name Key, filters map[Filter]interface{}, defaultValue string) (string, error) { +func (m *MockClient) GetStringValue(name StringKey, filters map[Filter]interface{}) (string, error) { m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetStringValue", name, filters, defaultValue) + ret := m.ctrl.Call(m, "GetStringValue", name, filters) ret0, _ := ret[0].(string) ret1, _ := ret[1].(error) return ret0, ret1 } // GetStringValue indicates an expected call of GetStringValue. -func (mr *MockClientMockRecorder) GetStringValue(name, filters, defaultValue interface{}) *gomock.Call { +func (mr *MockClientMockRecorder) GetStringValue(name, filters interface{}) *gomock.Call { mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetStringValue", reflect.TypeOf((*MockClient)(nil).GetStringValue), name, filters, defaultValue) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetStringValue", reflect.TypeOf((*MockClient)(nil).GetStringValue), name, filters) } // GetValue mocks base method. -func (m *MockClient) GetValue(name Key, defaultValue interface{}) (interface{}, error) { +func (m *MockClient) GetValue(name Key) (interface{}, error) { m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetValue", name, defaultValue) + ret := m.ctrl.Call(m, "GetValue", name) ret0, _ := ret[0].(interface{}) ret1, _ := ret[1].(error) return ret0, ret1 } // GetValue indicates an expected call of GetValue. -func (mr *MockClientMockRecorder) GetValue(name, defaultValue interface{}) *gomock.Call { +func (mr *MockClientMockRecorder) GetValue(name interface{}) *gomock.Call { mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetValue", reflect.TypeOf((*MockClient)(nil).GetValue), name, defaultValue) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetValue", reflect.TypeOf((*MockClient)(nil).GetValue), name) } // GetValueWithFilters mocks base method. -func (m *MockClient) GetValueWithFilters(name Key, filters map[Filter]interface{}, defaultValue interface{}) (interface{}, error) { +func (m *MockClient) GetValueWithFilters(name Key, filters map[Filter]interface{}) (interface{}, error) { m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetValueWithFilters", name, filters, defaultValue) + ret := m.ctrl.Call(m, "GetValueWithFilters", name, filters) ret0, _ := ret[0].(interface{}) ret1, _ := ret[1].(error) return ret0, ret1 } // GetValueWithFilters indicates an expected call of GetValueWithFilters. -func (mr *MockClientMockRecorder) GetValueWithFilters(name, filters, defaultValue interface{}) *gomock.Call { +func (mr *MockClientMockRecorder) GetValueWithFilters(name, filters interface{}) *gomock.Call { mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetValueWithFilters", reflect.TypeOf((*MockClient)(nil).GetValueWithFilters), name, filters, defaultValue) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetValueWithFilters", reflect.TypeOf((*MockClient)(nil).GetValueWithFilters), name, filters) } // ListValue mocks base method. diff --git a/common/dynamicconfig/config.go b/common/dynamicconfig/config.go index a5996cea86b..b7cc567e6f6 100644 --- a/common/dynamicconfig/config.go +++ b/common/dynamicconfig/config.go @@ -172,56 +172,50 @@ type DurationPropertyFnWithWorkflowTypeFilter func(domainName string, workflowTy // GetProperty gets a interface property and returns defaultValue if property is not found func (c *Collection) GetProperty(key Key) PropertyFn { - defaultValue := key.DefaultValue() return func() interface{} { - val, err := c.client.GetValue(key, defaultValue) + val, err := c.client.GetValue(key) if err != nil { c.logError(key, nil, err) } - c.logValue(key, nil, val, defaultValue, reflect.DeepEqual) + c.logValue(key, nil, val, key.DefaultValue(), reflect.DeepEqual) return val } } // GetIntProperty gets property and asserts that it's an integer func (c *Collection) GetIntProperty(key IntKey) IntPropertyFn { - defaultValue := key.DefaultInt() return func(opts ...FilterOption) int { filters := c.toFilterMap(opts...) val, err := c.client.GetIntValue( key, filters, - defaultValue, ) if err != nil { c.logError(key, filters, err) } - c.logValue(key, filters, val, defaultValue, intCompareEquals) + c.logValue(key, filters, val, key.DefaultValue(), intCompareEquals) return val } } // GetIntPropertyFilteredByDomain gets property with domain filter and asserts that it's an integer func (c *Collection) GetIntPropertyFilteredByDomain(key IntKey) IntPropertyFnWithDomainFilter { - defaultValue := key.DefaultInt() return func(domain string) int { filters := c.toFilterMap(DomainFilter(domain)) val, err := c.client.GetIntValue( key, filters, - defaultValue, ) if err != nil { c.logError(key, filters, err) } - c.logValue(key, filters, val, defaultValue, intCompareEquals) + c.logValue(key, filters, val, key.DefaultValue(), intCompareEquals) return val } } // GetIntPropertyFilteredByWorkflowType gets property with workflow type filter and asserts that it's an integer func (c *Collection) GetIntPropertyFilteredByWorkflowType(key IntKey) IntPropertyFnWithWorkflowTypeFilter { - defaultValue := key.DefaultInt() return func(domainName string, workflowType string) int { filters := c.toFilterMap( DomainFilter(domainName), @@ -230,19 +224,17 @@ func (c *Collection) GetIntPropertyFilteredByWorkflowType(key IntKey) IntPropert val, err := c.client.GetIntValue( key, filters, - defaultValue, ) if err != nil { c.logError(key, filters, err) } - c.logValue(key, filters, val, defaultValue, intCompareEquals) + c.logValue(key, filters, val, key.DefaultValue(), intCompareEquals) return val } } // GetDurationPropertyFilteredByWorkflowType gets property with workflow type filter and asserts that it's a duration func (c *Collection) GetDurationPropertyFilteredByWorkflowType(key DurationKey) DurationPropertyFnWithWorkflowTypeFilter { - defaultValue := key.DefaultDuration() return func(domainName string, workflowType string) time.Duration { filters := c.toFilterMap( DomainFilter(domainName), @@ -251,19 +243,17 @@ func (c *Collection) GetDurationPropertyFilteredByWorkflowType(key DurationKey) val, err := c.client.GetDurationValue( key, filters, - defaultValue, ) if err != nil { c.logError(key, filters, err) } - c.logValue(key, filters, val, defaultValue, durationCompareEquals) + c.logValue(key, filters, val, key.DefaultValue(), durationCompareEquals) return val } } // GetIntPropertyFilteredByTaskListInfo gets property with taskListInfo as filters and asserts that it's an integer func (c *Collection) GetIntPropertyFilteredByTaskListInfo(key IntKey) IntPropertyFnWithTaskListInfoFilters { - defaultValue := key.DefaultInt() return func(domain string, taskList string, taskType int) int { filters := c.toFilterMap( DomainFilter(domain), @@ -273,127 +263,113 @@ func (c *Collection) GetIntPropertyFilteredByTaskListInfo(key IntKey) IntPropert val, err := c.client.GetIntValue( key, filters, - defaultValue, ) if err != nil { c.logError(key, filters, err) } - c.logValue(key, filters, val, defaultValue, intCompareEquals) + c.logValue(key, filters, val, key.DefaultValue(), intCompareEquals) return val } } // GetIntPropertyFilteredByShardID gets property with shardID as filter and asserts that it's an integer func (c *Collection) GetIntPropertyFilteredByShardID(key IntKey) IntPropertyFnWithShardIDFilter { - defaultValue := key.DefaultInt() return func(shardID int) int { filters := c.toFilterMap(ShardIDFilter(shardID)) val, err := c.client.GetIntValue( key, filters, - defaultValue, ) if err != nil { c.logError(key, filters, err) } - c.logValue(key, filters, val, defaultValue, intCompareEquals) + c.logValue(key, filters, val, key.DefaultValue(), intCompareEquals) return val } } // GetFloat64Property gets property and asserts that it's a float64 func (c *Collection) GetFloat64Property(key FloatKey) FloatPropertyFn { - defaultValue := key.DefaultFloat() return func(opts ...FilterOption) float64 { filters := c.toFilterMap(opts...) val, err := c.client.GetFloatValue( key, filters, - defaultValue, ) if err != nil { c.logError(key, filters, err) } - c.logValue(key, filters, val, defaultValue, float64CompareEquals) + c.logValue(key, filters, val, key.DefaultValue(), float64CompareEquals) return val } } // GetFloat64PropertyFilteredByShardID gets property with shardID filter and asserts that it's a float64 func (c *Collection) GetFloat64PropertyFilteredByShardID(key FloatKey) FloatPropertyFnWithShardIDFilter { - defaultValue := key.DefaultFloat() return func(shardID int) float64 { filters := c.toFilterMap(ShardIDFilter(shardID)) val, err := c.client.GetFloatValue( key, filters, - defaultValue, ) if err != nil { c.logError(key, filters, err) } - c.logValue(key, filters, val, defaultValue, float64CompareEquals) + c.logValue(key, filters, val, key.DefaultValue(), float64CompareEquals) return val } } // GetDurationProperty gets property and asserts that it's a duration func (c *Collection) GetDurationProperty(key DurationKey) DurationPropertyFn { - defaultValue := key.DefaultDuration() return func(opts ...FilterOption) time.Duration { filters := c.toFilterMap(opts...) val, err := c.client.GetDurationValue( key, filters, - defaultValue, ) if err != nil { c.logError(key, filters, err) } - c.logValue(key, filters, val, defaultValue, durationCompareEquals) + c.logValue(key, filters, val, key.DefaultValue(), durationCompareEquals) return val } } // GetDurationPropertyFilteredByDomain gets property with domain filter and asserts that it's a duration func (c *Collection) GetDurationPropertyFilteredByDomain(key DurationKey) DurationPropertyFnWithDomainFilter { - defaultValue := key.DefaultDuration() return func(domain string) time.Duration { filters := c.toFilterMap(DomainFilter(domain)) val, err := c.client.GetDurationValue( key, filters, - defaultValue, ) if err != nil { c.logError(key, filters, err) } - c.logValue(key, filters, val, defaultValue, durationCompareEquals) + c.logValue(key, filters, val, key.DefaultValue(), durationCompareEquals) return val } } // GetDurationPropertyFilteredByDomainID gets property with domainID filter and asserts that it's a duration func (c *Collection) GetDurationPropertyFilteredByDomainID(key DurationKey) DurationPropertyFnWithDomainIDFilter { - defaultValue := key.DefaultDuration() return func(domainID string) time.Duration { filters := c.toFilterMap(DomainIDFilter(domainID)) val, err := c.client.GetDurationValue( key, filters, - defaultValue, ) if err != nil { c.logError(key, filters, err) } - c.logValue(key, filters, val, defaultValue, durationCompareEquals) + c.logValue(key, filters, val, key.DefaultValue(), durationCompareEquals) return val } } // GetDurationPropertyFilteredByTaskListInfo gets property with taskListInfo as filters and asserts that it's a duration func (c *Collection) GetDurationPropertyFilteredByTaskListInfo(key DurationKey) DurationPropertyFnWithTaskListInfoFilters { - defaultValue := key.DefaultDuration() return func(domain string, taskList string, taskType int) time.Duration { filters := c.toFilterMap( DomainFilter(domain), @@ -403,163 +379,145 @@ func (c *Collection) GetDurationPropertyFilteredByTaskListInfo(key DurationKey) val, err := c.client.GetDurationValue( key, filters, - defaultValue, ) if err != nil { c.logError(key, filters, err) } - c.logValue(key, filters, val, defaultValue, durationCompareEquals) + c.logValue(key, filters, val, key.DefaultValue(), durationCompareEquals) return val } } // GetDurationPropertyFilteredByShardID gets property with shardID id as filter and asserts that it's a duration func (c *Collection) GetDurationPropertyFilteredByShardID(key DurationKey) DurationPropertyFnWithShardIDFilter { - defaultValue := key.DefaultDuration() return func(shardID int) time.Duration { filters := c.toFilterMap(ShardIDFilter(shardID)) val, err := c.client.GetDurationValue( key, filters, - defaultValue, ) if err != nil { c.logError(key, filters, err) } - c.logValue(key, filters, val, defaultValue, durationCompareEquals) + c.logValue(key, filters, val, key.DefaultValue(), durationCompareEquals) return val } } // GetBoolProperty gets property and asserts that it's an bool func (c *Collection) GetBoolProperty(key BoolKey) BoolPropertyFn { - defaultValue := key.DefaultBool() return func(opts ...FilterOption) bool { filters := c.toFilterMap(opts...) val, err := c.client.GetBoolValue( key, filters, - defaultValue, ) if err != nil { c.logError(key, filters, err) } - c.logValue(key, filters, val, defaultValue, boolCompareEquals) + c.logValue(key, filters, val, key.DefaultValue(), boolCompareEquals) return val } } // GetStringProperty gets property and asserts that it's an string func (c *Collection) GetStringProperty(key StringKey) StringPropertyFn { - defaultValue := key.DefaultString() return func(opts ...FilterOption) string { filters := c.toFilterMap(opts...) val, err := c.client.GetStringValue( key, filters, - defaultValue, ) if err != nil { c.logError(key, filters, err) } - c.logValue(key, filters, val, defaultValue, stringCompareEquals) + c.logValue(key, filters, val, key.DefaultValue(), stringCompareEquals) return val } } // GetMapProperty gets property and asserts that it's a map func (c *Collection) GetMapProperty(key MapKey) MapPropertyFn { - defaultValue := key.DefaultMap() return func(opts ...FilterOption) map[string]interface{} { filters := c.toFilterMap(opts...) val, err := c.client.GetMapValue( key, filters, - defaultValue, ) if err != nil { c.logError(key, filters, err) } - c.logValue(key, filters, val, defaultValue, reflect.DeepEqual) + c.logValue(key, filters, val, key.DefaultValue(), reflect.DeepEqual) return val } } // GetStringPropertyFilteredByDomain gets property with domain filter and asserts that it's a string func (c *Collection) GetStringPropertyFilteredByDomain(key StringKey) StringPropertyFnWithDomainFilter { - defaultValue := key.DefaultString() return func(domain string) string { filters := c.toFilterMap(DomainFilter(domain)) val, err := c.client.GetStringValue( key, filters, - defaultValue, ) if err != nil { c.logError(key, filters, err) } - c.logValue(key, filters, val, defaultValue, stringCompareEquals) + c.logValue(key, filters, val, key.DefaultValue(), stringCompareEquals) return val } } // GetBoolPropertyFilteredByDomain gets property with domain filter and asserts that it's a bool func (c *Collection) GetBoolPropertyFilteredByDomain(key BoolKey) BoolPropertyFnWithDomainFilter { - defaultValue := key.DefaultBool() return func(domain string) bool { filters := c.toFilterMap(DomainFilter(domain)) val, err := c.client.GetBoolValue( key, filters, - defaultValue, ) if err != nil { c.logError(key, filters, err) } - c.logValue(key, filters, val, defaultValue, boolCompareEquals) + c.logValue(key, filters, val, key.DefaultValue(), boolCompareEquals) return val } } // GetBoolPropertyFilteredByDomainID gets property with domainID filter and asserts that it's a bool func (c *Collection) GetBoolPropertyFilteredByDomainID(key BoolKey) BoolPropertyFnWithDomainIDFilter { - defaultValue := key.DefaultBool() return func(domainID string) bool { filters := c.toFilterMap(DomainIDFilter(domainID)) val, err := c.client.GetBoolValue( key, filters, - defaultValue, ) if err != nil { c.logError(key, filters, err) } - c.logValue(key, filters, val, defaultValue, boolCompareEquals) + c.logValue(key, filters, val, key.DefaultValue(), boolCompareEquals) return val } } // GetBoolPropertyFilteredByDomainIDAndWorkflowID gets property with domainID and workflowID filters and asserts that it's a bool func (c *Collection) GetBoolPropertyFilteredByDomainIDAndWorkflowID(key BoolKey) BoolPropertyFnWithDomainIDAndWorkflowIDFilter { - defaultValue := key.DefaultBool() return func(domainID string, workflowID string) bool { filters := c.toFilterMap(DomainIDFilter(domainID), WorkflowIDFilter(workflowID)) val, err := c.client.GetBoolValue( key, filters, - defaultValue, ) if err != nil { c.logError(key, filters, err) } - c.logValue(key, filters, val, defaultValue, boolCompareEquals) + c.logValue(key, filters, val, key.DefaultValue(), boolCompareEquals) return val } } // GetBoolPropertyFilteredByTaskListInfo gets property with taskListInfo as filters and asserts that it's an bool func (c *Collection) GetBoolPropertyFilteredByTaskListInfo(key BoolKey) BoolPropertyFnWithTaskListInfoFilters { - defaultValue := key.DefaultBool() return func(domain string, taskList string, taskType int) bool { filters := c.toFilterMap( DomainFilter(domain), @@ -569,12 +527,11 @@ func (c *Collection) GetBoolPropertyFilteredByTaskListInfo(key BoolKey) BoolProp val, err := c.client.GetBoolValue( key, filters, - defaultValue, ) if err != nil { c.logError(key, filters, err) } - c.logValue(key, filters, val, defaultValue, boolCompareEquals) + c.logValue(key, filters, val, key.DefaultValue(), boolCompareEquals) return val } } diff --git a/common/dynamicconfig/config_benchmark_test.go b/common/dynamicconfig/config_benchmark_test.go index b882d771378..3ff405a3f9f 100644 --- a/common/dynamicconfig/config_benchmark_test.go +++ b/common/dynamicconfig/config_benchmark_test.go @@ -34,6 +34,6 @@ func BenchmarkGetIntProperty(b *testing.B) { key := MatchingMaxTaskBatchSize for i := 0; i < b.N; i++ { size := cln.GetIntProperty(key) - assert.Equal(b, key.DefaultInt(), size()) + assert.Equal(b, 100, size()) } } diff --git a/common/dynamicconfig/config_test.go b/common/dynamicconfig/config_test.go index 75f3b04010b..924ef30e2fa 100644 --- a/common/dynamicconfig/config_test.go +++ b/common/dynamicconfig/config_test.go @@ -181,35 +181,35 @@ func (s *configSuite) TestUpdateConfig() { } func TestDynamicConfigKeyIsMapped(t *testing.T) { - for i := UnknownIntKey; i < LastIntKey; i++ { + for i := UnknownIntKey + 1; i < LastIntKey; i++ { key, ok := IntKeys[i] - require.True(t, ok) - require.NotEmpty(t, key) + require.True(t, ok, "missing IntKey: %d", i) + require.NotEmpty(t, key, "empty IntKey: %d", i) } - for i := UnknownBoolKey; i < LastBoolKey; i++ { + for i := UnknownBoolKey + 1; i < LastBoolKey; i++ { key, ok := BoolKeys[i] - require.True(t, ok) - require.NotEmpty(t, key) + require.True(t, ok, "missing BoolKey: %d", i) + require.NotEmpty(t, key, "empty BoolKey: %d", i) } - for i := UnknownFloatKey; i < LastFloatKey; i++ { + for i := UnknownFloatKey + 1; i < LastFloatKey; i++ { key, ok := FloatKeys[i] - require.True(t, ok) - require.NotEmpty(t, key) + require.True(t, ok, "missing FloatKey: %d", i) + require.NotEmpty(t, key, "empty FloatKey: %d", i) } - for i := UnknownStringKey; i < LastStringKey; i++ { + for i := UnknownStringKey + 1; i < LastStringKey; i++ { key, ok := StringKeys[i] - require.True(t, ok) - require.NotEmpty(t, key) + require.True(t, ok, "missing StringKey: %d", i) + require.NotEmpty(t, key, "empty StringKey: %d", i) } - for i := UnknownDurationKey; i < LastDurationKey; i++ { + for i := UnknownDurationKey + 1; i < LastDurationKey; i++ { key, ok := DurationKeys[i] - require.True(t, ok) - require.NotEmpty(t, key) + require.True(t, ok, "missing DurationKey: %d", i) + require.NotEmpty(t, key, "empty DurationKey: %d", i) } - for i := UnknownMapKey; i < LastMapKey; i++ { + for i := UnknownMapKey + 1; i < LastMapKey; i++ { key, ok := MapKeys[i] - require.True(t, ok) - require.NotEmpty(t, key) + require.True(t, ok, "missing MapKey: %d", i) + require.NotEmpty(t, key, "empty MapKey: %d", i) } } diff --git a/common/dynamicconfig/configstore/config_store_client.go b/common/dynamicconfig/configstore/config_store_client.go index c2786665b9d..4a2ab14ed0f 100644 --- a/common/dynamicconfig/configstore/config_store_client.go +++ b/common/dynamicconfig/configstore/config_store_client.go @@ -133,15 +133,16 @@ func (csc *configStoreClient) startUpdate() error { return nil } -func (csc *configStoreClient) GetValue(name dc.Key, defaultValue interface{}) (interface{}, error) { - return csc.getValueWithFilters(name, nil, defaultValue) +func (csc *configStoreClient) GetValue(name dc.Key) (interface{}, error) { + return csc.getValueWithFilters(name, nil, name.DefaultValue()) } -func (csc *configStoreClient) GetValueWithFilters(name dc.Key, filters map[dc.Filter]interface{}, defaultValue interface{}) (interface{}, error) { - return csc.getValueWithFilters(name, filters, defaultValue) +func (csc *configStoreClient) GetValueWithFilters(name dc.Key, filters map[dc.Filter]interface{}) (interface{}, error) { + return csc.getValueWithFilters(name, filters, name.DefaultValue()) } -func (csc *configStoreClient) GetIntValue(name dc.IntKey, filters map[dc.Filter]interface{}, defaultValue int) (int, error) { +func (csc *configStoreClient) GetIntValue(name dc.IntKey, filters map[dc.Filter]interface{}) (int, error) { + defaultValue := name.DefaultInt() val, err := csc.getValueWithFilters(name, filters, defaultValue) if err != nil { return defaultValue, err @@ -159,7 +160,8 @@ func (csc *configStoreClient) GetIntValue(name dc.IntKey, filters map[dc.Filter] return int(floatVal), nil } -func (csc *configStoreClient) GetFloatValue(name dc.FloatKey, filters map[dc.Filter]interface{}, defaultValue float64) (float64, error) { +func (csc *configStoreClient) GetFloatValue(name dc.FloatKey, filters map[dc.Filter]interface{}) (float64, error) { + defaultValue := name.DefaultFloat() val, err := csc.getValueWithFilters(name, filters, defaultValue) if err != nil { return defaultValue, err @@ -171,7 +173,8 @@ func (csc *configStoreClient) GetFloatValue(name dc.FloatKey, filters map[dc.Fil return defaultValue, errors.New("value type is not float64") } -func (csc *configStoreClient) GetBoolValue(name dc.BoolKey, filters map[dc.Filter]interface{}, defaultValue bool) (bool, error) { +func (csc *configStoreClient) GetBoolValue(name dc.BoolKey, filters map[dc.Filter]interface{}) (bool, error) { + defaultValue := name.DefaultBool() val, err := csc.getValueWithFilters(name, filters, defaultValue) if err != nil { return defaultValue, err @@ -183,7 +186,8 @@ func (csc *configStoreClient) GetBoolValue(name dc.BoolKey, filters map[dc.Filte return defaultValue, errors.New("value type is not bool") } -func (csc *configStoreClient) GetStringValue(name dc.StringKey, filters map[dc.Filter]interface{}, defaultValue string) (string, error) { +func (csc *configStoreClient) GetStringValue(name dc.StringKey, filters map[dc.Filter]interface{}) (string, error) { + defaultValue := name.DefaultString() val, err := csc.getValueWithFilters(name, filters, defaultValue) if err != nil { return defaultValue, err @@ -197,9 +201,8 @@ func (csc *configStoreClient) GetStringValue(name dc.StringKey, filters map[dc.F // Note that all number types (ex: ints) will be returned as float64. // It is the caller's responsibility to convert based on their context for value type. -func (csc *configStoreClient) GetMapValue( - name dc.MapKey, filters map[dc.Filter]interface{}, defaultValue map[string]interface{}, -) (map[string]interface{}, error) { +func (csc *configStoreClient) GetMapValue(name dc.MapKey, filters map[dc.Filter]interface{}) (map[string]interface{}, error) { + defaultValue := name.DefaultMap() val, err := csc.getValueWithFilters(name, filters, defaultValue) if err != nil { return defaultValue, err @@ -210,9 +213,8 @@ func (csc *configStoreClient) GetMapValue( return defaultValue, errors.New("value type is not map") } -func (csc *configStoreClient) GetDurationValue( - name dc.DurationKey, filters map[dc.Filter]interface{}, defaultValue time.Duration, -) (time.Duration, error) { +func (csc *configStoreClient) GetDurationValue(name dc.DurationKey, filters map[dc.Filter]interface{}) (time.Duration, error) { + defaultValue := name.DefaultDuration() val, err := csc.getValueWithFilters(name, filters, defaultValue) if err != nil { return defaultValue, err @@ -235,7 +237,11 @@ func (csc *configStoreClient) GetDurationValue( } func (csc *configStoreClient) UpdateValue(name dc.Key, value interface{}) error { - return csc.updateValue(name, value, csc.config.UpdateRetryAttempts) + dcValues, ok := value.([]*types.DynamicConfigValue) + if !ok && dcValues != nil { + return errors.New("invalid value") + } + return csc.updateValue(name, dcValues, csc.config.UpdateRetryAttempts) } func (csc *configStoreClient) RestoreValue(name dc.Key, filters map[dc.Filter]interface{}) error { @@ -271,7 +277,7 @@ func (csc *configStoreClient) RestoreValue(name dc.Key, filters map[dc.Filter]in } } - return csc.UpdateValue(name, newValues) + return csc.updateValue(name, newValues, csc.config.UpdateRetryAttempts) } func (csc *configStoreClient) ListValue(name dc.Key) ([]*types.DynamicConfigEntry, error) { @@ -309,10 +315,15 @@ func (csc *configStoreClient) ListValue(name dc.Key) ([]*types.DynamicConfigEntr return resList, nil } -func (csc *configStoreClient) updateValue(name dc.Key, value interface{}, retryAttempts int) error { +func (csc *configStoreClient) updateValue(name dc.Key, dcValues []*types.DynamicConfigValue, retryAttempts int) error { //since values are not unique, no way to know if you are trying to update a specific value //or if you want to add another of the same value with different filters. //UpdateValue will replace everything associated with dc key. + for _, dcValue := range dcValues { + if err := validateKeyDataBlobPair(name, dcValue.Value); err != nil { + return err + } + } loaded := csc.values.Load() var currentCached cacheEntry if loaded == nil { @@ -330,7 +341,7 @@ func (csc *configStoreClient) updateValue(name dc.Key, value interface{}, retryA existingEntry, entryExists := currentCached.dcEntries[keyName] - if value == nil || len(value.([]*types.DynamicConfigValue)) == 0 { + if dcValues == nil || len(dcValues) == 0 { newEntries = make([]*types.DynamicConfigEntry, 0, len(currentCached.dcEntries)) for _, entry := range currentCached.dcEntries { @@ -341,11 +352,6 @@ func (csc *configStoreClient) updateValue(name dc.Key, value interface{}, retryA } } } else { - dcValues, ok := value.([]*types.DynamicConfigValue) - if !ok { - return errors.New("invalid value") - } - if entryExists { newEntries = make([]*types.DynamicConfigEntry, 0, len(currentCached.dcEntries)) } else { @@ -400,7 +406,7 @@ func (csc *configStoreClient) updateValue(name dc.Key, value interface{}, retryA if err != nil { return err } - return csc.updateValue(name, value, retryAttempts-1) + return csc.updateValue(name, dcValues, retryAttempts-1) } if retryAttempts == 0 { @@ -600,3 +606,52 @@ func convertFromDataBlob(blob *types.DataBlob) (interface{}, error) { return nil, errors.New("unsupported blob encoding") } } + +func validateKeyDataBlobPair(key dc.Key, blob *types.DataBlob) error { + value, err := convertFromDataBlob(blob) + if err != nil { + return err + } + err = fmt.Errorf("key value pair mismatch, key type: %T, value type: %T", key, value) + switch key.(type) { + case dc.IntKey: + if _, ok := value.(int); !ok { + floatVal, ok := value.(float64) + if !ok { // int can be decoded as float64 + return err + } + if floatVal != math.Trunc(floatVal) { + return errors.New("value type is not int") + } + } + case dc.BoolKey: + if _, ok := value.(bool); !ok { + return err + } + case dc.FloatKey: + if _, ok := value.(float64); !ok { + return err + } + case dc.StringKey: + if _, ok := value.(string); !ok { + return err + } + case dc.DurationKey: + if _, ok := value.(time.Duration); !ok { + durationStr, ok := value.(string) + if !ok { + return err + } + if _, err = time.ParseDuration(durationStr); err != nil { + return errors.New("value string encoding cannot be parsed into duration") + } + } + case dc.MapKey: + if _, ok := value.(map[string]interface{}); !ok { + return err + } + default: + return fmt.Errorf("unknown key type: %T", key) + } + return nil +} diff --git a/common/dynamicconfig/configstore/config_store_client_test.go b/common/dynamicconfig/configstore/config_store_client_test.go index b18bd9716eb..8bac3426149 100644 --- a/common/dynamicconfig/configstore/config_store_client_test.go +++ b/common/dynamicconfig/configstore/config_store_client_test.go @@ -328,31 +328,31 @@ func defaultTestSetup(s *configStoreClientSuite) { func (s *configStoreClientSuite) TestGetValue() { defaultTestSetup(s) - v, err := s.client.GetValue(dc.TestGetBoolPropertyKey, true) + v, err := s.client.GetValue(dc.TestGetBoolPropertyKey) s.NoError(err) s.Equal(false, v) } func (s *configStoreClientSuite) TestGetValue_NonExistKey() { defaultTestSetup(s) - v, err := s.client.GetValue(dc.LastIntKey, 191231) + v, err := s.client.GetValue(dc.MaxRetentionDays) s.Error(err) - s.Equal(v, 191231) - v, err = s.client.GetValue(dc.LastBoolKey, true) + s.Equal(dc.MaxRetentionDays.DefaultInt(), v) + v, err = s.client.GetValue(dc.EnableVisibilitySampling) s.Error(err) - s.Equal(v, true) - v, err = s.client.GetValue(dc.LastFloatKey, 123120) + s.Equal(dc.EnableVisibilitySampling.DefaultBool(), v) + v, err = s.client.GetValue(dc.FrontendErrorInjectionRate) s.Error(err) - s.Equal(v, 123120) - v, err = s.client.GetValue(dc.LastStringKey, "asdfasdf") + s.Equal(dc.FrontendErrorInjectionRate.DefaultFloat(), v) + v, err = s.client.GetValue(dc.AdvancedVisibilityWritingMode) s.Error(err) - s.Equal(v, "asdfasdf") - v, err = s.client.GetValue(dc.LastDurationKey, time.Duration(1231237897)) + s.Equal(dc.AdvancedVisibilityWritingMode.DefaultString(), v) + v, err = s.client.GetValue(dc.FrontendShutdownDrainDuration) s.Error(err) - s.Equal(v, time.Duration(1231237897)) - v, err = s.client.GetValue(dc.LastMapKey, map[string]interface{}{"asdfas": 1231}) + s.Equal(dc.FrontendShutdownDrainDuration.DefaultDuration(), v) + v, err = s.client.GetValue(dc.RequiredDomainDataKeys) s.Error(err) - s.Equal(v, map[string]interface{}{"asdfas": 1231}) + s.Equal(dc.RequiredDomainDataKeys.DefaultMap(), v) } func (s *configStoreClientSuite) TestGetValueWithFilters() { @@ -362,14 +362,14 @@ func (s *configStoreClientSuite) TestGetValueWithFilters() { dc.DomainName: "global-samples-domain", } - v, err := s.client.GetValueWithFilters(dc.TestGetBoolPropertyKey, filters, false) + v, err := s.client.GetValueWithFilters(dc.TestGetBoolPropertyKey, filters) s.NoError(err) s.Equal(true, v) filters = map[dc.Filter]interface{}{ dc.DomainName: "non-exist-domain", } - v, err = s.client.GetValueWithFilters(dc.TestGetBoolPropertyKey, filters, true) + v, err = s.client.GetValueWithFilters(dc.TestGetBoolPropertyKey, filters) s.NoError(err) s.Equal(false, v) @@ -377,7 +377,7 @@ func (s *configStoreClientSuite) TestGetValueWithFilters() { dc.DomainName: "samples-domain", dc.TaskListName: "non-exist-tasklist", } - v, err = s.client.GetValueWithFilters(dc.TestGetBoolPropertyKey, filters, false) + v, err = s.client.GetValueWithFilters(dc.TestGetBoolPropertyKey, filters) s.NoError(err) s.Equal(true, v) } @@ -388,14 +388,14 @@ func (s *configStoreClientSuite) TestGetValueWithFilters_UnknownFilter() { dc.DomainName: "global-samples-domain1", dc.UnknownFilter: "unknown-filter1", } - v, err := s.client.GetValueWithFilters(dc.TestGetBoolPropertyKey, filters, false) + v, err := s.client.GetValueWithFilters(dc.TestGetBoolPropertyKey, filters) s.NoError(err) s.Equal(false, v) } func (s *configStoreClientSuite) TestGetIntValue() { defaultTestSetup(s) - v, err := s.client.GetIntValue(dc.TestGetIntPropertyKey, nil, 1) + v, err := s.client.GetIntValue(dc.TestGetIntPropertyKey, nil) s.NoError(err) s.Equal(1000, v) } @@ -405,25 +405,24 @@ func (s *configStoreClientSuite) TestGetIntValue_FilterNotMatch() { filters := map[dc.Filter]interface{}{ dc.DomainName: "samples-domain", } - v, err := s.client.GetIntValue(dc.TestGetIntPropertyKey, filters, 500) + v, err := s.client.GetIntValue(dc.TestGetIntPropertyKey, filters) s.NoError(err) s.Equal(1000, v) } func (s *configStoreClientSuite) TestGetIntValue_WrongType() { defaultTestSetup(s) - defaultValue := 2000 filters := map[dc.Filter]interface{}{ dc.DomainName: "global-samples-domain", } - v, err := s.client.GetIntValue(dc.TestGetIntPropertyKey, filters, defaultValue) + v, err := s.client.GetIntValue(dc.TestGetIntPropertyKey, filters) s.Error(err) - s.Equal(defaultValue, v) + s.Equal(dc.TestGetIntPropertyKey.DefaultInt(), v) } func (s *configStoreClientSuite) TestGetFloatValue() { defaultTestSetup(s) - v, err := s.client.GetFloatValue(dc.TestGetFloat64PropertyKey, nil, 1) + v, err := s.client.GetFloatValue(dc.TestGetFloat64PropertyKey, nil) s.NoError(err) s.Equal(12.0, v) } @@ -433,15 +432,14 @@ func (s *configStoreClientSuite) TestGetFloatValue_WrongType() { filters := map[dc.Filter]interface{}{ dc.DomainName: "samples-domain", } - defaultValue := 1.0 - v, err := s.client.GetFloatValue(dc.TestGetFloat64PropertyKey, filters, defaultValue) + v, err := s.client.GetFloatValue(dc.TestGetFloat64PropertyKey, filters) s.Error(err) - s.Equal(defaultValue, v) + s.Equal(dc.TestGetFloat64PropertyKey.DefaultFloat(), v) } func (s *configStoreClientSuite) TestGetBoolValue() { defaultTestSetup(s) - v, err := s.client.GetBoolValue(dc.TestGetBoolPropertyKey, nil, true) + v, err := s.client.GetBoolValue(dc.TestGetBoolPropertyKey, nil) s.NoError(err) s.Equal(false, v) } @@ -451,15 +449,14 @@ func (s *configStoreClientSuite) TestGetStringValue() { filters := map[dc.Filter]interface{}{ dc.TaskListName: "random tasklist", } - v, err := s.client.GetStringValue(dc.TestGetStringPropertyKey, filters, "defaultString") + v, err := s.client.GetStringValue(dc.TestGetStringPropertyKey, filters) s.NoError(err) s.Equal("constrained-string", v) } func (s *configStoreClientSuite) TestGetMapValue() { defaultTestSetup(s) - var defaultVal map[string]interface{} - v, err := s.client.GetMapValue(dc.TestGetMapPropertyKey, nil, defaultVal) + v, err := s.client.GetMapValue(dc.TestGetMapPropertyKey, nil) s.NoError(err) expectedVal := map[string]interface{}{ "key1": "1", @@ -477,18 +474,17 @@ func (s *configStoreClientSuite) TestGetMapValue() { func (s *configStoreClientSuite) TestGetMapValue_WrongType() { defaultTestSetup(s) - var defaultVal map[string]interface{} filters := map[dc.Filter]interface{}{ dc.TaskListName: "random tasklist", } - v, err := s.client.GetMapValue(dc.TestGetMapPropertyKey, filters, defaultVal) + v, err := s.client.GetMapValue(dc.TestGetMapPropertyKey, filters) s.Error(err) - s.Equal(defaultVal, v) + s.Equal(dc.TestGetMapPropertyKey.DefaultMap(), v) } func (s *configStoreClientSuite) TestGetDurationValue() { defaultTestSetup(s) - v, err := s.client.GetDurationValue(dc.TestGetDurationPropertyKey, nil, time.Second) + v, err := s.client.GetDurationValue(dc.TestGetDurationPropertyKey, nil) s.NoError(err) s.Equal(time.Minute, v) } @@ -498,9 +494,9 @@ func (s *configStoreClientSuite) TestGetDurationValue_NotStringRepresentation() filters := map[dc.Filter]interface{}{ dc.DomainName: "samples-domain", } - v, err := s.client.GetDurationValue(dc.TestGetDurationPropertyKey, filters, time.Second) + v, err := s.client.GetDurationValue(dc.TestGetDurationPropertyKey, filters) s.Error(err) - s.Equal(time.Second, v) + s.Equal(dc.TestGetDurationPropertyKey.DefaultDuration(), v) } func (s *configStoreClientSuite) TestGetDurationValue_ParseFailed() { @@ -509,9 +505,9 @@ func (s *configStoreClientSuite) TestGetDurationValue_ParseFailed() { dc.DomainName: "samples-domain", dc.TaskListName: "longIdleTimeTaskList", } - v, err := s.client.GetDurationValue(dc.TestGetDurationPropertyKey, filters, time.Second) + v, err := s.client.GetDurationValue(dc.TestGetDurationPropertyKey, filters) s.Error(err) - s.Equal(time.Second, v) + s.Equal(dc.TestGetDurationPropertyKey.DefaultDuration(), v) } func (s *configStoreClientSuite) TestValidateConfig_InvalidConfig() { @@ -731,7 +727,7 @@ func (s *configStoreClientSuite) TestUpdateValue_NoRetrySuccess() { err = s.client.update() s.NoError(err) - v, err := s.client.GetValue(dc.TestGetBoolPropertyKey, false) + v, err := s.client.GetValue(dc.TestGetBoolPropertyKey) s.NoError(err) s.Equal(true, v) } diff --git a/common/dynamicconfig/constants.go b/common/dynamicconfig/constants.go index 719ee4f0aba..b2afbeb32be 100644 --- a/common/dynamicconfig/constants.go +++ b/common/dynamicconfig/constants.go @@ -30,38 +30,40 @@ import ( ) type ( - DynamicBase struct { - KeyName string - Description string - } // DynamicInt defines the properties for a dynamic config with int value type DynamicInt struct { - DynamicBase + KeyName string + Description string DefaultValue int } DynamicBool struct { - DynamicBase + KeyName string + Description string DefaultValue bool } DynamicFloat struct { - DynamicBase + KeyName string + Description string DefaultValue float64 } DynamicString struct { - DynamicBase + KeyName string + Description string DefaultValue string } DynamicDuration struct { - DynamicBase + KeyName string + Description string DefaultValue time.Duration } DynamicMap struct { - DynamicBase + KeyName string + Description string DefaultValue map[string]interface{} } @@ -87,6 +89,39 @@ func GetKeyFromKeyName(keyName string) (Key, error) { return keyVal, nil } +func ValidateKeyValuePair(key Key, value interface{}) error { + err := fmt.Errorf("key value pair mismatch, key type: %T, value type: %T", key, value) + switch key.(type) { + case IntKey: + if _, ok := value.(int); !ok { + return err + } + case BoolKey: + if _, ok := value.(bool); !ok { + return err + } + case FloatKey: + if _, ok := value.(float64); !ok { + return err + } + case StringKey: + if _, ok := value.(string); !ok { + return err + } + case DurationKey: + if _, ok := value.(time.Duration); !ok { + return err + } + case MapKey: + if _, ok := value.(map[string]interface{}); !ok { + return err + } + default: + return fmt.Errorf("unknown key type: %T", key) + } + return nil +} + func (k IntKey) String() string { return IntKeys[k].KeyName } @@ -2375,2510 +2410,2154 @@ const ( var IntKeys = map[IntKey]DynamicInt{ TestGetIntPropertyKey: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "testGetIntPropertyKey", - Description: "", - }, + + KeyName: "testGetIntPropertyKey", + Description: "", DefaultValue: 0, }, TestGetIntPropertyFilteredByDomainKey: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "testGetIntPropertyFilteredByDomainKey", - Description: "", - }, + + KeyName: "testGetIntPropertyFilteredByDomainKey", + Description: "", DefaultValue: 0, }, TestGetIntPropertyFilteredByTaskListInfoKey: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "testGetIntPropertyFilteredByTaskListInfoKey", - Description: "", - }, + + KeyName: "testGetIntPropertyFilteredByTaskListInfoKey", + Description: "", DefaultValue: 0, }, TransactionSizeLimit: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "system.transactionSizeLimit", - Description: "TransactionSizeLimit is the largest allowed transaction size to persistence", - }, + + KeyName: "system.transactionSizeLimit", + Description: "TransactionSizeLimit is the largest allowed transaction size to persistence", DefaultValue: 14680064, }, MaxRetentionDays: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "system.maxRetentionDays", - Description: "MaxRetentionDays is the maximum allowed retention days for domain", - }, + + KeyName: "system.maxRetentionDays", + Description: "MaxRetentionDays is the maximum allowed retention days for domain", DefaultValue: 30, }, MinRetentionDays: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "system.minRetentionDays", - Description: "MinRetentionDays is the minimal allowed retention days for domain", - }, + + KeyName: "system.minRetentionDays", + Description: "MinRetentionDays is the minimal allowed retention days for domain", DefaultValue: 1, }, MaxDecisionStartToCloseSeconds: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "system.maxDecisionStartToCloseSeconds", - Description: "MaxDecisionStartToCloseSeconds is the maximum allowed value for decision start to close timeout in seconds", - }, + + KeyName: "system.maxDecisionStartToCloseSeconds", + Description: "MaxDecisionStartToCloseSeconds is the maximum allowed value for decision start to close timeout in seconds", DefaultValue: 240, }, GRPCMaxSizeInByte: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "system.grpcMaxSizeInByte", - Description: "GRPCMaxSizeInByte is the key for config GRPC response size", - }, + + KeyName: "system.grpcMaxSizeInByte", + Description: "GRPCMaxSizeInByte is the key for config GRPC response size", DefaultValue: 4 * 1024 * 1024, }, BlobSizeLimitError: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "limit.blobSize.error", - Description: "BlobSizeLimitError is the per event blob size limit", - }, + + KeyName: "limit.blobSize.error", + Description: "BlobSizeLimitError is the per event blob size limit", DefaultValue: 2 * 1024 * 1024, }, BlobSizeLimitWarn: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "limit.blobSize.warn", - Description: "BlobSizeLimitWarn is the per event blob size limit for warning", - }, + + KeyName: "limit.blobSize.warn", + Description: "BlobSizeLimitWarn is the per event blob size limit for warning", DefaultValue: 256 * 1024, }, HistorySizeLimitError: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "limit.historySize.error", - Description: "HistorySizeLimitError is the per workflow execution history size limit", - }, + + KeyName: "limit.historySize.error", + Description: "HistorySizeLimitError is the per workflow execution history size limit", DefaultValue: 200 * 1024 * 1024, }, HistorySizeLimitWarn: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "limit.historySize.warn", - Description: "HistorySizeLimitWarn is the per workflow execution history size limit for warning", - }, + + KeyName: "limit.historySize.warn", + Description: "HistorySizeLimitWarn is the per workflow execution history size limit for warning", DefaultValue: 50 * 1024 * 1024, }, HistoryCountLimitError: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "limit.historyCount.error", - Description: "HistoryCountLimitError is the per workflow execution history event count limit", - }, + + KeyName: "limit.historyCount.error", + Description: "HistoryCountLimitError is the per workflow execution history event count limit", DefaultValue: 200 * 1024, }, HistoryCountLimitWarn: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "limit.historyCount.warn", - Description: "HistoryCountLimitWarn is the per workflow execution history event count limit for warning", - }, + + KeyName: "limit.historyCount.warn", + Description: "HistoryCountLimitWarn is the per workflow execution history event count limit for warning", DefaultValue: 50 * 1024, }, DomainNameMaxLength: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "limit.domainNameLength", - Description: "DomainNameMaxLength is the length limit for domain name", - }, + + KeyName: "limit.domainNameLength", + Description: "DomainNameMaxLength is the length limit for domain name", DefaultValue: 1000, }, IdentityMaxLength: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "limit.identityLength", - Description: "IdentityMaxLength is the length limit for identity", - }, + + KeyName: "limit.identityLength", + Description: "IdentityMaxLength is the length limit for identity", DefaultValue: 1000, }, WorkflowIDMaxLength: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "limit.workflowIDLength", - Description: "WorkflowIDMaxLength is the length limit for workflowID", - }, + + KeyName: "limit.workflowIDLength", + Description: "WorkflowIDMaxLength is the length limit for workflowID", DefaultValue: 1000, }, SignalNameMaxLength: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "limit.signalNameLength", - Description: "SignalNameMaxLength is the length limit for signal name", - }, + + KeyName: "limit.signalNameLength", + Description: "SignalNameMaxLength is the length limit for signal name", DefaultValue: 1000, }, WorkflowTypeMaxLength: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "limit.workflowTypeLength", - Description: "WorkflowTypeMaxLength is the length limit for workflow type", - }, + + KeyName: "limit.workflowTypeLength", + Description: "WorkflowTypeMaxLength is the length limit for workflow type", DefaultValue: 1000, }, RequestIDMaxLength: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "limit.requestIDLength", - Description: "RequestIDMaxLength is the length limit for requestID", - }, + + KeyName: "limit.requestIDLength", + Description: "RequestIDMaxLength is the length limit for requestID", DefaultValue: 1000, }, TaskListNameMaxLength: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "limit.taskListNameLength", - Description: "TaskListNameMaxLength is the length limit for task list name", - }, + + KeyName: "limit.taskListNameLength", + Description: "TaskListNameMaxLength is the length limit for task list name", DefaultValue: 1000, }, ActivityIDMaxLength: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "limit.activityIDLength", - Description: "ActivityIDMaxLength is the length limit for activityID", - }, + + KeyName: "limit.activityIDLength", + Description: "ActivityIDMaxLength is the length limit for activityID", DefaultValue: 1000, }, ActivityTypeMaxLength: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "limit.activityTypeLength", - Description: "ActivityTypeMaxLength is the length limit for activity type", - }, + + KeyName: "limit.activityTypeLength", + Description: "ActivityTypeMaxLength is the length limit for activity type", DefaultValue: 1000, }, MarkerNameMaxLength: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "limit.markerNameLength", - Description: "MarkerNameMaxLength is the length limit for marker name", - }, + + KeyName: "limit.markerNameLength", + Description: "MarkerNameMaxLength is the length limit for marker name", DefaultValue: 1000, }, TimerIDMaxLength: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "limit.timerIDLength", - Description: "TimerIDMaxLength is the length limit for timerID", - }, + + KeyName: "limit.timerIDLength", + Description: "TimerIDMaxLength is the length limit for timerID", DefaultValue: 1000, }, MaxIDLengthWarnLimit: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "limit.maxIDWarnLength", - Description: "MaxIDLengthWarnLimit is the warn length limit for various IDs, including: Domain, TaskList, WorkflowID, ActivityID, TimerID, WorkflowType, ActivityType, SignalName, MarkerName, ErrorReason/FailureReason/CancelCause, Identity, RequestID", - }, + + KeyName: "limit.maxIDWarnLength", + Description: "MaxIDLengthWarnLimit is the warn length limit for various IDs, including: Domain, TaskList, WorkflowID, ActivityID, TimerID, WorkflowType, ActivityType, SignalName, MarkerName, ErrorReason/FailureReason/CancelCause, Identity, RequestID", DefaultValue: 128, }, FrontendPersistenceMaxQPS: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "frontend.persistenceMaxQPS", - Description: "FrontendPersistenceMaxQPS is the max qps frontend host can query DB", - }, + + KeyName: "frontend.persistenceMaxQPS", + Description: "FrontendPersistenceMaxQPS is the max qps frontend host can query DB", DefaultValue: 2000, }, FrontendPersistenceGlobalMaxQPS: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "frontend.persistenceGlobalMaxQPS", - Description: "FrontendPersistenceGlobalMaxQPS is the max qps frontend cluster can query DB", - }, + + KeyName: "frontend.persistenceGlobalMaxQPS", + Description: "FrontendPersistenceGlobalMaxQPS is the max qps frontend cluster can query DB", DefaultValue: 0, }, FrontendVisibilityMaxPageSize: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "frontend.visibilityMaxPageSize", - Description: "FrontendVisibilityMaxPageSize is default max size for ListWorkflowExecutions in one page", - }, + + KeyName: "frontend.visibilityMaxPageSize", + Description: "FrontendVisibilityMaxPageSize is default max size for ListWorkflowExecutions in one page", DefaultValue: 1000, }, FrontendVisibilityListMaxQPS: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "frontend.visibilityListMaxQPS", - Description: "FrontendVisibilityListMaxQPS is max qps frontend can list open/close workflows", - }, + + KeyName: "frontend.visibilityListMaxQPS", + Description: "FrontendVisibilityListMaxQPS is max qps frontend can list open/close workflows", DefaultValue: 10, }, FrontendESVisibilityListMaxQPS: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "frontend.esVisibilityListMaxQPS", - Description: "FrontendESVisibilityListMaxQPS is max qps frontend can list open/close workflows from ElasticSearch", - }, + + KeyName: "frontend.esVisibilityListMaxQPS", + Description: "FrontendESVisibilityListMaxQPS is max qps frontend can list open/close workflows from ElasticSearch", DefaultValue: 30, }, FrontendESIndexMaxResultWindow: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "frontend.esIndexMaxResultWindow", - Description: "FrontendESIndexMaxResultWindow is ElasticSearch index setting max_result_window", - }, + + KeyName: "frontend.esIndexMaxResultWindow", + Description: "FrontendESIndexMaxResultWindow is ElasticSearch index setting max_result_window", DefaultValue: 10000, }, FrontendHistoryMaxPageSize: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "frontend.historyMaxPageSize", - Description: "FrontendHistoryMaxPageSize is default max size for GetWorkflowExecutionHistory in one page", - }, + + KeyName: "frontend.historyMaxPageSize", + Description: "FrontendHistoryMaxPageSize is default max size for GetWorkflowExecutionHistory in one page", DefaultValue: 1000, }, FrontendUserRPS: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "frontend.rps", - Description: "FrontendUserRPS is workflow rate limit per second", - }, + + KeyName: "frontend.rps", + Description: "FrontendUserRPS is workflow rate limit per second", DefaultValue: 1200, }, FrontendWorkerRPS: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "frontend.workerrps", - Description: "FrontendWorkerRPS is background-processing workflow rate limit per second", - }, + + KeyName: "frontend.workerrps", + Description: "FrontendWorkerRPS is background-processing workflow rate limit per second", DefaultValue: UnlimitedRPS, }, FrontendMaxDomainUserRPSPerInstance: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "frontend.domainrps", - Description: "FrontendMaxDomainUserRPSPerInstance is workflow domain rate limit per second", - }, + + KeyName: "frontend.domainrps", + Description: "FrontendMaxDomainUserRPSPerInstance is workflow domain rate limit per second", DefaultValue: 1200, }, FrontendMaxDomainWorkerRPSPerInstance: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "frontend.domainworkerrps", - Description: "FrontendMaxDomainWorkerRPSPerInstance is background-processing workflow domain rate limit per second", - }, + + KeyName: "frontend.domainworkerrps", + Description: "FrontendMaxDomainWorkerRPSPerInstance is background-processing workflow domain rate limit per second", DefaultValue: UnlimitedRPS, }, FrontendGlobalDomainUserRPS: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "frontend.globalDomainrps", - Description: "FrontendGlobalDomainUserRPS is workflow domain rate limit per second for the whole Cadence cluster", - }, + + KeyName: "frontend.globalDomainrps", + Description: "FrontendGlobalDomainUserRPS is workflow domain rate limit per second for the whole Cadence cluster", DefaultValue: 0, }, FrontendGlobalDomainWorkerRPS: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "frontend.globalDomainWorkerrps", - Description: "FrontendGlobalDomainWorkerRPS is background-processing workflow domain rate limit per second for the whole Cadence cluster", - }, + + KeyName: "frontend.globalDomainWorkerrps", + Description: "FrontendGlobalDomainWorkerRPS is background-processing workflow domain rate limit per second for the whole Cadence cluster", DefaultValue: UnlimitedRPS, }, FrontendDecisionResultCountLimit: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "frontend.decisionResultCountLimit", - Description: "FrontendDecisionResultCountLimit is max number of decisions per RespondDecisionTaskCompleted request", - }, + + KeyName: "frontend.decisionResultCountLimit", + Description: "FrontendDecisionResultCountLimit is max number of decisions per RespondDecisionTaskCompleted request", DefaultValue: 0, }, FrontendHistoryMgrNumConns: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "frontend.historyMgrNumConns", - Description: "FrontendHistoryMgrNumConns is for persistence cluster.NumConns", - }, + + KeyName: "frontend.historyMgrNumConns", + Description: "FrontendHistoryMgrNumConns is for persistence cluster.NumConns", DefaultValue: 10, }, FrontendThrottledLogRPS: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "frontend.throttledLogRPS", - Description: "FrontendThrottledLogRPS is the rate limit on number of log messages emitted per second for throttled logger", - }, + + KeyName: "frontend.throttledLogRPS", + Description: "FrontendThrottledLogRPS is the rate limit on number of log messages emitted per second for throttled logger", DefaultValue: 20, }, FrontendMaxBadBinaries: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "frontend.maxBadBinaries", - Description: "FrontendMaxBadBinaries is the max number of bad binaries in domain config", - }, + + KeyName: "frontend.maxBadBinaries", + Description: "FrontendMaxBadBinaries is the max number of bad binaries in domain config", DefaultValue: 10, }, SearchAttributesNumberOfKeysLimit: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "frontend.searchAttributesNumberOfKeysLimit", - Description: "SearchAttributesNumberOfKeysLimit is the limit of number of keys", - }, + + KeyName: "frontend.searchAttributesNumberOfKeysLimit", + Description: "SearchAttributesNumberOfKeysLimit is the limit of number of keys", DefaultValue: 100, }, SearchAttributesSizeOfValueLimit: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "frontend.searchAttributesSizeOfValueLimit", - Description: "SearchAttributesSizeOfValueLimit is the size limit of each value", - }, + + KeyName: "frontend.searchAttributesSizeOfValueLimit", + Description: "SearchAttributesSizeOfValueLimit is the size limit of each value", DefaultValue: 2048, }, SearchAttributesTotalSizeLimit: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "frontend.searchAttributesTotalSizeLimit", - Description: "SearchAttributesTotalSizeLimit is the size limit of the whole map", - }, + + KeyName: "frontend.searchAttributesTotalSizeLimit", + Description: "SearchAttributesTotalSizeLimit is the size limit of the whole map", DefaultValue: 40 * 1024, }, VisibilityArchivalQueryMaxPageSize: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "frontend.visibilityArchivalQueryMaxPageSize", - Description: "VisibilityArchivalQueryMaxPageSize is the maximum page size for a visibility archival query", - }, + + KeyName: "frontend.visibilityArchivalQueryMaxPageSize", + Description: "VisibilityArchivalQueryMaxPageSize is the maximum page size for a visibility archival query", DefaultValue: 10000, }, MatchingUserRPS: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "matching.rps", - Description: "MatchingUserRPS is request rate per second for each matching host", - }, + + KeyName: "matching.rps", + Description: "MatchingUserRPS is request rate per second for each matching host", DefaultValue: 1200, }, MatchingWorkerRPS: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "matching.workerrps", - Description: "MatchingWorkerRPS is background-processing request rate per second for each matching host", - }, + + KeyName: "matching.workerrps", + Description: "MatchingWorkerRPS is background-processing request rate per second for each matching host", DefaultValue: UnlimitedRPS, }, MatchingDomainUserRPS: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "matching.domainrps", - Description: "MatchingDomainUserRPS is request rate per domain per second for each matching host", - }, + + KeyName: "matching.domainrps", + Description: "MatchingDomainUserRPS is request rate per domain per second for each matching host", DefaultValue: 0, }, MatchingDomainWorkerRPS: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "matching.domainworkerrps", - Description: "MatchingDomainWorkerRPS is background-processing request rate per domain per second for each matching host", - }, + + KeyName: "matching.domainworkerrps", + Description: "MatchingDomainWorkerRPS is background-processing request rate per domain per second for each matching host", DefaultValue: UnlimitedRPS, }, MatchingPersistenceMaxQPS: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "matching.persistenceMaxQPS", - Description: "MatchingPersistenceMaxQPS is the max qps matching host can query DB", - }, + + KeyName: "matching.persistenceMaxQPS", + Description: "MatchingPersistenceMaxQPS is the max qps matching host can query DB", DefaultValue: 3000, }, MatchingPersistenceGlobalMaxQPS: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "matching.persistenceGlobalMaxQPS", - Description: "MatchingPersistenceGlobalMaxQPS is the max qps matching cluster can query DB", - }, + + KeyName: "matching.persistenceGlobalMaxQPS", + Description: "MatchingPersistenceGlobalMaxQPS is the max qps matching cluster can query DB", DefaultValue: 0, }, MatchingMinTaskThrottlingBurstSize: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "matching.minTaskThrottlingBurstSize", - Description: "MatchingMinTaskThrottlingBurstSize is the minimum burst size for task list throttling", - }, + + KeyName: "matching.minTaskThrottlingBurstSize", + Description: "MatchingMinTaskThrottlingBurstSize is the minimum burst size for task list throttling", DefaultValue: 1, }, MatchingGetTasksBatchSize: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "matching.getTasksBatchSize", - Description: "MatchingGetTasksBatchSize is the maximum batch size to fetch from the task buffer", - }, + + KeyName: "matching.getTasksBatchSize", + Description: "MatchingGetTasksBatchSize is the maximum batch size to fetch from the task buffer", DefaultValue: 1000, }, MatchingOutstandingTaskAppendsThreshold: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "matching.outstandingTaskAppendsThreshold", - Description: "MatchingOutstandingTaskAppendsThreshold is the threshold for outstanding task appends", - }, + + KeyName: "matching.outstandingTaskAppendsThreshold", + Description: "MatchingOutstandingTaskAppendsThreshold is the threshold for outstanding task appends", DefaultValue: 250, }, MatchingMaxTaskBatchSize: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "matching.maxTaskBatchSize", - Description: "MatchingMaxTaskBatchSize is max batch size for task writer", - }, + + KeyName: "matching.maxTaskBatchSize", + Description: "MatchingMaxTaskBatchSize is max batch size for task writer", DefaultValue: 100, }, MatchingMaxTaskDeleteBatchSize: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "matching.maxTaskDeleteBatchSize", - Description: "MatchingMaxTaskDeleteBatchSize is the max batch size for range deletion of tasks", - }, + + KeyName: "matching.maxTaskDeleteBatchSize", + Description: "MatchingMaxTaskDeleteBatchSize is the max batch size for range deletion of tasks", DefaultValue: 100, }, MatchingThrottledLogRPS: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "matching.throttledLogRPS", - Description: "MatchingThrottledLogRPS is the rate limit on number of log messages emitted per second for throttled logger", - }, + + KeyName: "matching.throttledLogRPS", + Description: "MatchingThrottledLogRPS is the rate limit on number of log messages emitted per second for throttled logger", DefaultValue: 20, }, MatchingNumTasklistWritePartitions: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "matching.numTasklistWritePartitions", - Description: "MatchingNumTasklistWritePartitions is the number of write partitions for a task list", - }, + + KeyName: "matching.numTasklistWritePartitions", + Description: "MatchingNumTasklistWritePartitions is the number of write partitions for a task list", DefaultValue: 1, }, MatchingNumTasklistReadPartitions: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "matching.numTasklistReadPartitions", - Description: "MatchingNumTasklistReadPartitions is the number of read partitions for a task list", - }, + + KeyName: "matching.numTasklistReadPartitions", + Description: "MatchingNumTasklistReadPartitions is the number of read partitions for a task list", DefaultValue: 1, }, MatchingForwarderMaxOutstandingPolls: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "matching.forwarderMaxOutstandingPolls", - Description: "MatchingForwarderMaxOutstandingPolls is the max number of inflight polls from the forwarder", - }, + + KeyName: "matching.forwarderMaxOutstandingPolls", + Description: "MatchingForwarderMaxOutstandingPolls is the max number of inflight polls from the forwarder", DefaultValue: 1, }, MatchingForwarderMaxOutstandingTasks: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "matching.forwarderMaxOutstandingTasks", - Description: "MatchingForwarderMaxOutstandingTasks is the max number of inflight addTask/queryTask from the forwarder", - }, + + KeyName: "matching.forwarderMaxOutstandingTasks", + Description: "MatchingForwarderMaxOutstandingTasks is the max number of inflight addTask/queryTask from the forwarder", DefaultValue: 1, }, MatchingForwarderMaxRatePerSecond: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "matching.forwarderMaxRatePerSecond", - Description: "MatchingForwarderMaxRatePerSecond is the max rate at which add/query can be forwarded", - }, + + KeyName: "matching.forwarderMaxRatePerSecond", + Description: "MatchingForwarderMaxRatePerSecond is the max rate at which add/query can be forwarded", DefaultValue: 10, }, MatchingForwarderMaxChildrenPerNode: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "matching.forwarderMaxChildrenPerNode", - Description: "MatchingForwarderMaxChildrenPerNode is the max number of children per node in the task list partition tree", - }, + + KeyName: "matching.forwarderMaxChildrenPerNode", + Description: "MatchingForwarderMaxChildrenPerNode is the max number of children per node in the task list partition tree", DefaultValue: 20, }, HistoryRPS: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "history.rps", - Description: "HistoryRPS is request rate per second for each history host", - }, + + KeyName: "history.rps", + Description: "HistoryRPS is request rate per second for each history host", DefaultValue: 3000, }, HistoryPersistenceMaxQPS: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "history.persistenceMaxQPS", - Description: "HistoryPersistenceMaxQPS is the max qps history host can query DB", - }, + + KeyName: "history.persistenceMaxQPS", + Description: "HistoryPersistenceMaxQPS is the max qps history host can query DB", DefaultValue: 9000, }, HistoryPersistenceGlobalMaxQPS: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "history.persistenceGlobalMaxQPS", - Description: "HistoryPersistenceGlobalMaxQPS is the max qps history cluster can query DB", - }, + + KeyName: "history.persistenceGlobalMaxQPS", + Description: "HistoryPersistenceGlobalMaxQPS is the max qps history cluster can query DB", DefaultValue: 0, }, HistoryVisibilityOpenMaxQPS: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "history.historyVisibilityOpenMaxQPS", - Description: "HistoryVisibilityOpenMaxQPS is max qps one history host can write visibility open_executions", - }, + + KeyName: "history.historyVisibilityOpenMaxQPS", + Description: "HistoryVisibilityOpenMaxQPS is max qps one history host can write visibility open_executions", DefaultValue: 300, }, HistoryVisibilityClosedMaxQPS: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "history.historyVisibilityClosedMaxQPS", - Description: "HistoryVisibilityClosedMaxQPS is max qps one history host can write visibility closed_executions", - }, + + KeyName: "history.historyVisibilityClosedMaxQPS", + Description: "HistoryVisibilityClosedMaxQPS is max qps one history host can write visibility closed_executions", DefaultValue: 300, }, HistoryCacheInitialSize: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "history.cacheInitialSize", - Description: "HistoryCacheInitialSize is initial size of history cache", - }, + + KeyName: "history.cacheInitialSize", + Description: "HistoryCacheInitialSize is initial size of history cache", DefaultValue: 128, }, HistoryCacheMaxSize: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "history.cacheMaxSize", - Description: "HistoryCacheMaxSize is max size of history cache", - }, + + KeyName: "history.cacheMaxSize", + Description: "HistoryCacheMaxSize is max size of history cache", DefaultValue: 512, }, EventsCacheInitialCount: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "history.eventsCacheInitialSize", - Description: "EventsCacheInitialCount is initial count of events cache", - }, + + KeyName: "history.eventsCacheInitialSize", + Description: "EventsCacheInitialCount is initial count of events cache", DefaultValue: 128, }, EventsCacheMaxCount: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "history.eventsCacheMaxSize", - Description: "EventsCacheMaxCount is max count of events cache", - }, + + KeyName: "history.eventsCacheMaxSize", + Description: "EventsCacheMaxCount is max count of events cache", DefaultValue: 512, }, EventsCacheMaxSize: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "history.eventsCacheMaxSizeInBytes", - Description: "EventsCacheMaxSize is max size of events cache in bytes", - }, + + KeyName: "history.eventsCacheMaxSizeInBytes", + Description: "EventsCacheMaxSize is max size of events cache in bytes", DefaultValue: 0, }, EventsCacheGlobalInitialCount: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "history.eventsCacheGlobalInitialSize", - Description: "EventsCacheGlobalInitialCount is initial count of global events cache", - }, + + KeyName: "history.eventsCacheGlobalInitialSize", + Description: "EventsCacheGlobalInitialCount is initial count of global events cache", DefaultValue: 4096, }, EventsCacheGlobalMaxCount: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "history.eventsCacheGlobalMaxSize", - Description: "EventsCacheGlobalMaxCount is max count of global events cache", - }, + + KeyName: "history.eventsCacheGlobalMaxSize", + Description: "EventsCacheGlobalMaxCount is max count of global events cache", DefaultValue: 131072, }, AcquireShardConcurrency: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "history.acquireShardConcurrency", - Description: "AcquireShardConcurrency is number of goroutines that can be used to acquire shards in the shard controller.", - }, + + KeyName: "history.acquireShardConcurrency", + Description: "AcquireShardConcurrency is number of goroutines that can be used to acquire shards in the shard controller.", DefaultValue: 1, }, TaskProcessRPS: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "history.taskProcessRPS", - Description: "TaskProcessRPS is the task processing rate per second for each domain", - }, + + KeyName: "history.taskProcessRPS", + Description: "TaskProcessRPS is the task processing rate per second for each domain", DefaultValue: 1000, }, TaskSchedulerType: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "history.taskSchedulerType", - Description: "TaskSchedulerType is the task scheduler type for priority task processor", - }, + + KeyName: "history.taskSchedulerType", + Description: "TaskSchedulerType is the task scheduler type for priority task processor", DefaultValue: 2, }, TaskSchedulerWorkerCount: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "history.taskSchedulerWorkerCount", - Description: "TaskSchedulerWorkerCount is the number of workers per host in task scheduler", - }, + + KeyName: "history.taskSchedulerWorkerCount", + Description: "TaskSchedulerWorkerCount is the number of workers per host in task scheduler", DefaultValue: 200, }, TaskSchedulerShardWorkerCount: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "history.taskSchedulerShardWorkerCount", - Description: "TaskSchedulerShardWorkerCount is the number of worker per shard in task scheduler", - }, + + KeyName: "history.taskSchedulerShardWorkerCount", + Description: "TaskSchedulerShardWorkerCount is the number of worker per shard in task scheduler", DefaultValue: 0, }, TaskSchedulerQueueSize: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "history.taskSchedulerQueueSize", - Description: "TaskSchedulerQueueSize is the size of task channel for host level task scheduler", - }, + + KeyName: "history.taskSchedulerQueueSize", + Description: "TaskSchedulerQueueSize is the size of task channel for host level task scheduler", DefaultValue: 10000, }, TaskSchedulerShardQueueSize: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "history.taskSchedulerShardQueueSize", - Description: "TaskSchedulerShardQueueSize is the size of task channel for shard level task scheduler", - }, + + KeyName: "history.taskSchedulerShardQueueSize", + Description: "TaskSchedulerShardQueueSize is the size of task channel for shard level task scheduler", DefaultValue: 200, }, TaskSchedulerDispatcherCount: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "history.taskSchedulerDispatcherCount", - Description: "TaskSchedulerDispatcherCount is the number of task dispatcher in task scheduler (only applies to host level task scheduler)", - }, + + KeyName: "history.taskSchedulerDispatcherCount", + Description: "TaskSchedulerDispatcherCount is the number of task dispatcher in task scheduler (only applies to host level task scheduler)", DefaultValue: 1, }, TaskCriticalRetryCount: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "history.taskCriticalRetryCount", - Description: "TaskCriticalRetryCount is the critical retry count for background tasks, when task attempt exceeds this threshold:- task attempt metrics and additional error logs will be emitted- task priority will be lowered", - }, + + KeyName: "history.taskCriticalRetryCount", + Description: "TaskCriticalRetryCount is the critical retry count for background tasks, when task attempt exceeds this threshold:- task attempt metrics and additional error logs will be emitted- task priority will be lowered", DefaultValue: 50, }, QueueProcessorSplitMaxLevel: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "history.queueProcessorSplitMaxLevel", - Description: "QueueProcessorSplitMaxLevel is the max processing queue level", - }, - DefaultValue: 2, + + KeyName: "history.queueProcessorSplitMaxLevel", + Description: "QueueProcessorSplitMaxLevel is the max processing queue level", + DefaultValue: 2, // 3 levels, start from 0 }, TimerTaskBatchSize: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "history.timerTaskBatchSize", - Description: "TimerTaskBatchSize is batch size for timer processor to process tasks", - }, + + KeyName: "history.timerTaskBatchSize", + Description: "TimerTaskBatchSize is batch size for timer processor to process tasks", DefaultValue: 100, }, TimerTaskDeleteBatchSize: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "history.timerTaskDeleteBatchSize", - Description: "TimerTaskDeleteBatchSize is batch size for timer processor to delete timer tasks", - }, + + KeyName: "history.timerTaskDeleteBatchSize", + Description: "TimerTaskDeleteBatchSize is batch size for timer processor to delete timer tasks", DefaultValue: 4000, }, TimerProcessorGetFailureRetryCount: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "history.timerProcessorGetFailureRetryCount", - Description: "TimerProcessorGetFailureRetryCount is retry count for timer processor get failure operation", - }, + + KeyName: "history.timerProcessorGetFailureRetryCount", + Description: "TimerProcessorGetFailureRetryCount is retry count for timer processor get failure operation", DefaultValue: 5, }, TimerProcessorCompleteTimerFailureRetryCount: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "history.timerProcessorCompleteTimerFailureRetryCount", - Description: "TimerProcessorCompleteTimerFailureRetryCount is retry count for timer processor complete timer operation", - }, + + KeyName: "history.timerProcessorCompleteTimerFailureRetryCount", + Description: "TimerProcessorCompleteTimerFailureRetryCount is retry count for timer processor complete timer operation", DefaultValue: 10, }, TimerProcessorFailoverMaxPollRPS: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "history.timerProcessorFailoverMaxPollRPS", - Description: "TimerProcessorFailoverMaxPollRPS is max poll rate per second for timer processor", - }, + + KeyName: "history.timerProcessorFailoverMaxPollRPS", + Description: "TimerProcessorFailoverMaxPollRPS is max poll rate per second for timer processor", DefaultValue: 1, }, TimerProcessorMaxPollRPS: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "history.timerProcessorMaxPollRPS", - Description: "TimerProcessorMaxPollRPS is max poll rate per second for timer processor", - }, + + KeyName: "history.timerProcessorMaxPollRPS", + Description: "TimerProcessorMaxPollRPS is max poll rate per second for timer processor", DefaultValue: 20, }, TimerProcessorMaxRedispatchQueueSize: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "history.timerProcessorMaxRedispatchQueueSize", - Description: "TimerProcessorMaxRedispatchQueueSize is the threshold of the number of tasks in the redispatch queue for timer processor", - }, + + KeyName: "history.timerProcessorMaxRedispatchQueueSize", + Description: "TimerProcessorMaxRedispatchQueueSize is the threshold of the number of tasks in the redispatch queue for timer processor", DefaultValue: 10000, }, TimerProcessorHistoryArchivalSizeLimit: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "history.timerProcessorHistoryArchivalSizeLimit", - Description: "TimerProcessorHistoryArchivalSizeLimit is the max history size for inline archival", - }, + + KeyName: "history.timerProcessorHistoryArchivalSizeLimit", + Description: "TimerProcessorHistoryArchivalSizeLimit is the max history size for inline archival", DefaultValue: 500 * 1024, }, TransferTaskBatchSize: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "history.transferTaskBatchSize", - Description: "TransferTaskBatchSize is batch size for transferQueueProcessor", - }, + + KeyName: "history.transferTaskBatchSize", + Description: "TransferTaskBatchSize is batch size for transferQueueProcessor", DefaultValue: 100, }, TransferTaskDeleteBatchSize: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "history.transferTaskDeleteBatchSize", - Description: "TransferTaskDeleteBatchSize is batch size for transferQueueProcessor to delete transfer tasks", - }, + + KeyName: "history.transferTaskDeleteBatchSize", + Description: "TransferTaskDeleteBatchSize is batch size for transferQueueProcessor to delete transfer tasks", DefaultValue: 4000, }, TransferProcessorFailoverMaxPollRPS: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "history.transferProcessorFailoverMaxPollRPS", - Description: "TransferProcessorFailoverMaxPollRPS is max poll rate per second for transferQueueProcessor", - }, + + KeyName: "history.transferProcessorFailoverMaxPollRPS", + Description: "TransferProcessorFailoverMaxPollRPS is max poll rate per second for transferQueueProcessor", DefaultValue: 1, }, TransferProcessorMaxPollRPS: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "history.transferProcessorMaxPollRPS", - Description: "TransferProcessorMaxPollRPS is max poll rate per second for transferQueueProcessor", - }, + + KeyName: "history.transferProcessorMaxPollRPS", + Description: "TransferProcessorMaxPollRPS is max poll rate per second for transferQueueProcessor", DefaultValue: 20, }, TransferProcessorCompleteTransferFailureRetryCount: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "history.transferProcessorCompleteTransferFailureRetryCount", - Description: "TransferProcessorCompleteTransferFailureRetryCount is times of retry for failure", - }, + + KeyName: "history.transferProcessorCompleteTransferFailureRetryCount", + Description: "TransferProcessorCompleteTransferFailureRetryCount is times of retry for failure", DefaultValue: 10, }, TransferProcessorMaxRedispatchQueueSize: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "history.transferProcessorMaxRedispatchQueueSize", - Description: "TransferProcessorMaxRedispatchQueueSize is the threshold of the number of tasks in the redispatch queue for transferQueueProcessor", - }, + + KeyName: "history.transferProcessorMaxRedispatchQueueSize", + Description: "TransferProcessorMaxRedispatchQueueSize is the threshold of the number of tasks in the redispatch queue for transferQueueProcessor", DefaultValue: 10000, }, CrossClusterTaskBatchSize: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "history.crossClusterTaskBatchSize", - Description: "CrossClusterTaskBatchSize is the batch size for loading cross cluster tasks from persistence in crossClusterQueueProcessor", - }, + + KeyName: "history.crossClusterTaskBatchSize", + Description: "CrossClusterTaskBatchSize is the batch size for loading cross cluster tasks from persistence in crossClusterQueueProcessor", DefaultValue: 100, }, CrossClusterTaskDeleteBatchSize: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "history.crossClusterTaskDeleteBatchSize", - Description: "CrossClusterTaskDeleteBatchSize is the batch size for deleting cross cluster tasks from persistence in crossClusterQueueProcessor", - }, + + KeyName: "history.crossClusterTaskDeleteBatchSize", + Description: "CrossClusterTaskDeleteBatchSize is the batch size for deleting cross cluster tasks from persistence in crossClusterQueueProcessor", DefaultValue: 4000, }, CrossClusterTaskFetchBatchSize: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "history.crossClusterTaskFetchBatchSize", - Description: "CrossClusterTaskFetchBatchSize is batch size for dispatching cross cluster tasks to target cluster in crossClusterQueueProcessor", - }, + + KeyName: "history.crossClusterTaskFetchBatchSize", + Description: "CrossClusterTaskFetchBatchSize is batch size for dispatching cross cluster tasks to target cluster in crossClusterQueueProcessor", DefaultValue: 100, }, CrossClusterSourceProcessorMaxPollRPS: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "history.crossClusterSourceProcessorMaxPollRPS", - Description: "CrossClusterSourceProcessorMaxPollRPS is max poll rate per second for crossClusterQueueProcessor", - }, + + KeyName: "history.crossClusterSourceProcessorMaxPollRPS", + Description: "CrossClusterSourceProcessorMaxPollRPS is max poll rate per second for crossClusterQueueProcessor", DefaultValue: 20, }, CrossClusterSourceProcessorCompleteTaskFailureRetryCount: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "history.crossClusterSourceProcessorCompleteTaskFailureRetryCount", - Description: "CrossClusterSourceProcessorCompleteTaskFailureRetryCount is times of retry for failure", - }, + + KeyName: "history.crossClusterSourceProcessorCompleteTaskFailureRetryCount", + Description: "CrossClusterSourceProcessorCompleteTaskFailureRetryCount is times of retry for failure", DefaultValue: 10, }, CrossClusterSourceProcessorMaxRedispatchQueueSize: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "history.crossClusterSourceProcessorMaxRedispatchQueueSize", - Description: "CrossClusterSourceProcessorMaxRedispatchQueueSize is the threshold of the number of tasks in the redispatch queue for crossClusterQueueProcessor", - }, + + KeyName: "history.crossClusterSourceProcessorMaxRedispatchQueueSize", + Description: "CrossClusterSourceProcessorMaxRedispatchQueueSize is the threshold of the number of tasks in the redispatch queue for crossClusterQueueProcessor", DefaultValue: 10000, }, CrossClusterSourceProcessorMaxPendingTaskSize: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "history.crossClusterSourceProcessorMaxPendingTaskSize", - Description: "CrossClusterSourceProcessorMaxPendingTaskSize is the threshold of the number of ready for polling tasks in crossClusterQueueProcessor, task loading will be stopped when the number is reached", - }, + + KeyName: "history.crossClusterSourceProcessorMaxPendingTaskSize", + Description: "CrossClusterSourceProcessorMaxPendingTaskSize is the threshold of the number of ready for polling tasks in crossClusterQueueProcessor, task loading will be stopped when the number is reached", DefaultValue: 500, }, CrossClusterTargetProcessorMaxPendingTasks: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "history.crossClusterTargetProcessorMaxPendingTasks", - Description: "CrossClusterTargetProcessorMaxPendingTasks is the max number of pending tasks in cross cluster task processor", - }, + + KeyName: "history.crossClusterTargetProcessorMaxPendingTasks", + Description: "CrossClusterTargetProcessorMaxPendingTasks is the max number of pending tasks in cross cluster task processor", DefaultValue: 200, }, CrossClusterTargetProcessorMaxRetryCount: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "history.crossClusterTargetProcessorMaxRetryCount", - Description: "CrossClusterTargetProcessorMaxRetryCount is the max number of retries when executing a cross-cluster task in target cluster", - }, + + KeyName: "history.crossClusterTargetProcessorMaxRetryCount", + Description: "CrossClusterTargetProcessorMaxRetryCount is the max number of retries when executing a cross-cluster task in target cluster", DefaultValue: 20, }, CrossClusterFetcherParallelism: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "history.crossClusterFetcherParallelism", - Description: "CrossClusterFetcherParallelism is the number of go routines each cross cluster fetcher use, note there's one cross cluster task fetcher per host per source cluster", - }, + + KeyName: "history.crossClusterFetcherParallelism", + Description: "CrossClusterFetcherParallelism is the number of go routines each cross cluster fetcher use, note there's one cross cluster task fetcher per host per source cluster", DefaultValue: 1, }, ReplicatorTaskBatchSize: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "history.replicatorTaskBatchSize", - Description: "ReplicatorTaskBatchSize is batch size for ReplicatorProcessor", - }, + + KeyName: "history.replicatorTaskBatchSize", + Description: "ReplicatorTaskBatchSize is batch size for ReplicatorProcessor", DefaultValue: 25, }, ReplicatorTaskDeleteBatchSize: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "history.replicatorTaskDeleteBatchSize", - Description: "ReplicatorTaskDeleteBatchSize is batch size for ReplicatorProcessor to delete replication tasks", - }, + + KeyName: "history.replicatorTaskDeleteBatchSize", + Description: "ReplicatorTaskDeleteBatchSize is batch size for ReplicatorProcessor to delete replication tasks", DefaultValue: 4000, }, ReplicatorReadTaskMaxRetryCount: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "history.replicatorReadTaskMaxRetryCount", - Description: "ReplicatorReadTaskMaxRetryCount is the number of read replication task retry time", - }, + + KeyName: "history.replicatorReadTaskMaxRetryCount", + Description: "ReplicatorReadTaskMaxRetryCount is the number of read replication task retry time", DefaultValue: 3, }, ExecutionMgrNumConns: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "history.executionMgrNumConns", - Description: "ExecutionMgrNumConns is persistence connections number for ExecutionManager", - }, + + KeyName: "history.executionMgrNumConns", + Description: "ExecutionMgrNumConns is persistence connections number for ExecutionManager", DefaultValue: 50, }, HistoryMgrNumConns: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "history.historyMgrNumConns", - Description: "HistoryMgrNumConns is persistence connections number for HistoryManager", - }, + + KeyName: "history.historyMgrNumConns", + Description: "HistoryMgrNumConns is persistence connections number for HistoryManager", DefaultValue: 50, }, MaximumBufferedEventsBatch: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "history.maximumBufferedEventsBatch", - Description: "MaximumBufferedEventsBatch is max number of buffer event in mutable state", - }, + + KeyName: "history.maximumBufferedEventsBatch", + Description: "MaximumBufferedEventsBatch is max number of buffer event in mutable state", DefaultValue: 100, }, MaximumSignalsPerExecution: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "history.maximumSignalsPerExecution", - Description: "MaximumSignalsPerExecution is max number of signals supported by single execution", - }, - DefaultValue: 10000, + + KeyName: "history.maximumSignalsPerExecution", + Description: "MaximumSignalsPerExecution is max number of signals supported by single execution", + DefaultValue: 10000, // 10K signals should big enough given workflow execution has 200K history lengh limit. It needs to be non-zero to protect continueAsNew from infinit loop }, NumArchiveSystemWorkflows: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "history.numArchiveSystemWorkflows", - Description: "NumArchiveSystemWorkflows is key for number of archive system workflows running in total", - }, + + KeyName: "history.numArchiveSystemWorkflows", + Description: "NumArchiveSystemWorkflows is key for number of archive system workflows running in total", DefaultValue: 1000, }, ArchiveRequestRPS: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "history.archiveRequestRPS", - Description: "ArchiveRequestRPS is the rate limit on the number of archive request per second", - }, - DefaultValue: 300, + + KeyName: "history.archiveRequestRPS", + Description: "ArchiveRequestRPS is the rate limit on the number of archive request per second", + DefaultValue: 300, // should be much smaller than frontend RPS }, ArchiveInlineHistoryRPS: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "history.archiveInlineHistoryRPS", - Description: "ArchiveInlineHistoryRPS is the (per instance) rate limit on the number of inline history archival attempts per second", - }, + + KeyName: "history.archiveInlineHistoryRPS", + Description: "ArchiveInlineHistoryRPS is the (per instance) rate limit on the number of inline history archival attempts per second", DefaultValue: 1000, }, ArchiveInlineHistoryGlobalRPS: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "history.archiveInlineHistoryGlobalRPS", - Description: "ArchiveInlineHistoryGlobalRPS is the global rate limit on the number of inline history archival attempts per second", - }, + + KeyName: "history.archiveInlineHistoryGlobalRPS", + Description: "ArchiveInlineHistoryGlobalRPS is the global rate limit on the number of inline history archival attempts per second", DefaultValue: 10000, }, ArchiveInlineVisibilityRPS: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "history.archiveInlineVisibilityRPS", - Description: "ArchiveInlineVisibilityRPS is the (per instance) rate limit on the number of inline visibility archival attempts per second", - }, + + KeyName: "history.archiveInlineVisibilityRPS", + Description: "ArchiveInlineVisibilityRPS is the (per instance) rate limit on the number of inline visibility archival attempts per second", DefaultValue: 1000, }, ArchiveInlineVisibilityGlobalRPS: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "history.archiveInlineVisibilityGlobalRPS", - Description: "ArchiveInlineVisibilityGlobalRPS is the global rate limit on the number of inline visibility archival attempts per second", - }, + + KeyName: "history.archiveInlineVisibilityGlobalRPS", + Description: "ArchiveInlineVisibilityGlobalRPS is the global rate limit on the number of inline visibility archival attempts per second", DefaultValue: 10000, }, HistoryMaxAutoResetPoints: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "history.historyMaxAutoResetPoints", - Description: "HistoryMaxAutoResetPoints is the key for max number of auto reset points stored in mutableState", - }, + + KeyName: "history.historyMaxAutoResetPoints", + Description: "HistoryMaxAutoResetPoints is the key for max number of auto reset points stored in mutableState", DefaultValue: 20, }, ParentClosePolicyThreshold: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "history.parentClosePolicyThreshold", - Description: "ParentClosePolicyThreshold is decides that parent close policy will be processed by sys workers(if enabled) ifthe number of children greater than or equal to this threshold", - }, + + KeyName: "history.parentClosePolicyThreshold", + Description: "ParentClosePolicyThreshold is decides that parent close policy will be processed by sys workers(if enabled) ifthe number of children greater than or equal to this threshold", DefaultValue: 10, }, NumParentClosePolicySystemWorkflows: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "history.numParentClosePolicySystemWorkflows", - Description: "NumParentClosePolicySystemWorkflows is key for number of parentClosePolicy system workflows running in total", - }, + + KeyName: "history.numParentClosePolicySystemWorkflows", + Description: "NumParentClosePolicySystemWorkflows is key for number of parentClosePolicy system workflows running in total", DefaultValue: 10, }, HistoryThrottledLogRPS: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "history.throttledLogRPS", - Description: "HistoryThrottledLogRPS is the rate limit on number of log messages emitted per second for throttled logger", - }, + + KeyName: "history.throttledLogRPS", + Description: "HistoryThrottledLogRPS is the rate limit on number of log messages emitted per second for throttled logger", DefaultValue: 4, }, DecisionRetryCriticalAttempts: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "history.decisionRetryCriticalAttempts", - Description: "DecisionRetryCriticalAttempts is decision attempt threshold for logging and emiting metrics", - }, + + KeyName: "history.decisionRetryCriticalAttempts", + Description: "DecisionRetryCriticalAttempts is decision attempt threshold for logging and emiting metrics", DefaultValue: 10, }, DecisionRetryMaxAttempts: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "history.decisionRetryMaxAttempts", - Description: "DecisionRetryMaxAttempts is the max limit for decision retry attempts. 0 indicates infinite number of attempts.", - }, + + KeyName: "history.decisionRetryMaxAttempts", + Description: "DecisionRetryMaxAttempts is the max limit for decision retry attempts. 0 indicates infinite number of attempts.", DefaultValue: 1000, }, NormalDecisionScheduleToStartMaxAttempts: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "history.normalDecisionScheduleToStartMaxAttempts", - Description: "NormalDecisionScheduleToStartMaxAttempts is the maximum decision attempt for creating a scheduleToStart timeout timer for normal (non-sticky) decision", - }, + + KeyName: "history.normalDecisionScheduleToStartMaxAttempts", + Description: "NormalDecisionScheduleToStartMaxAttempts is the maximum decision attempt for creating a scheduleToStart timeout timer for normal (non-sticky) decision", DefaultValue: 0, }, MaxBufferedQueryCount: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "history.MaxBufferedQueryCount", - Description: "MaxBufferedQueryCount indicates the maximum number of queries which can be buffered at a given time for a single workflow", - }, + + KeyName: "history.MaxBufferedQueryCount", + Description: "MaxBufferedQueryCount indicates the maximum number of queries which can be buffered at a given time for a single workflow", DefaultValue: 1, }, MutableStateChecksumGenProbability: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "history.mutableStateChecksumGenProbability", - Description: "MutableStateChecksumGenProbability is the probability [0-100] that checksum will be generated for mutable state", - }, + + KeyName: "history.mutableStateChecksumGenProbability", + Description: "MutableStateChecksumGenProbability is the probability [0-100] that checksum will be generated for mutable state", DefaultValue: 0, }, MutableStateChecksumVerifyProbability: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "history.mutableStateChecksumVerifyProbability", - Description: "MutableStateChecksumVerifyProbability is the probability [0-100] that checksum will be verified for mutable state", - }, + + KeyName: "history.mutableStateChecksumVerifyProbability", + Description: "MutableStateChecksumVerifyProbability is the probability [0-100] that checksum will be verified for mutable state", DefaultValue: 0, }, MaxActivityCountDispatchByDomain: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "history.maxActivityCountDispatchByDomain", - Description: "MaxActivityCountDispatchByDomain max # of activity tasks to dispatch to matching before creating transfer tasks. This is an performance optimization to skip activity scheduling efforts.", - }, + + KeyName: "history.maxActivityCountDispatchByDomain", + Description: "MaxActivityCountDispatchByDomain max # of activity tasks to dispatch to matching before creating transfer tasks. This is an performance optimization to skip activity scheduling efforts.", DefaultValue: 0, }, ReplicationTaskFetcherParallelism: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "history.ReplicationTaskFetcherParallelism", - Description: "ReplicationTaskFetcherParallelism determines how many go routines we spin up for fetching tasks", - }, + + KeyName: "history.ReplicationTaskFetcherParallelism", + Description: "ReplicationTaskFetcherParallelism determines how many go routines we spin up for fetching tasks", DefaultValue: 1, }, ReplicationTaskProcessorErrorRetryMaxAttempts: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "history.ReplicationTaskProcessorErrorRetryMaxAttempts", - Description: "ReplicationTaskProcessorErrorRetryMaxAttempts is the max retry attempts for applying replication tasks", - }, + + KeyName: "history.ReplicationTaskProcessorErrorRetryMaxAttempts", + Description: "ReplicationTaskProcessorErrorRetryMaxAttempts is the max retry attempts for applying replication tasks", DefaultValue: 10, }, ReplicationTaskProcessorReadHistoryBatchSize: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "history.ReplicationTaskProcessorReadHistoryBatchSize", - Description: "ReplicationTaskProcessorReadHistoryBatchSize is the batch size to read history events", - }, + + KeyName: "history.ReplicationTaskProcessorReadHistoryBatchSize", + Description: "ReplicationTaskProcessorReadHistoryBatchSize is the batch size to read history events", DefaultValue: 5, }, WorkerPersistenceMaxQPS: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "worker.persistenceMaxQPS", - Description: "WorkerPersistenceMaxQPS is the max qps worker host can query DB", - }, + + KeyName: "worker.persistenceMaxQPS", + Description: "WorkerPersistenceMaxQPS is the max qps worker host can query DB", DefaultValue: 500, }, WorkerPersistenceGlobalMaxQPS: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "worker.persistenceGlobalMaxQPS", - Description: "WorkerPersistenceGlobalMaxQPS is the max qps worker cluster can query DB", - }, + + KeyName: "worker.persistenceGlobalMaxQPS", + Description: "WorkerPersistenceGlobalMaxQPS is the max qps worker cluster can query DB", DefaultValue: 0, }, WorkerIndexerConcurrency: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "worker.indexerConcurrency", - Description: "WorkerIndexerConcurrency is the max concurrent messages to be processed at any given time", - }, + + KeyName: "worker.indexerConcurrency", + Description: "WorkerIndexerConcurrency is the max concurrent messages to be processed at any given time", DefaultValue: 1000, }, WorkerESProcessorNumOfWorkers: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "worker.ESProcessorNumOfWorkers", - Description: "WorkerESProcessorNumOfWorkers is num of workers for esProcessor", - }, + + KeyName: "worker.ESProcessorNumOfWorkers", + Description: "WorkerESProcessorNumOfWorkers is num of workers for esProcessor", DefaultValue: 1, }, WorkerESProcessorBulkActions: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "worker.ESProcessorBulkActions", - Description: "WorkerESProcessorBulkActions is max number of requests in bulk for esProcessor", - }, + + KeyName: "worker.ESProcessorBulkActions", + Description: "WorkerESProcessorBulkActions is max number of requests in bulk for esProcessor", DefaultValue: 1000, }, WorkerESProcessorBulkSize: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "worker.ESProcessorBulkSize", - Description: "WorkerESProcessorBulkSize is max total size of bulk in bytes for esProcessor", - }, - DefaultValue: 2 << 24, + + KeyName: "worker.ESProcessorBulkSize", + Description: "WorkerESProcessorBulkSize is max total size of bulk in bytes for esProcessor", + DefaultValue: 2 << 24, // 16MB }, WorkerArchiverConcurrency: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "worker.ArchiverConcurrency", - Description: "WorkerArchiverConcurrency is controls the number of coroutines handling archival work per archival workflow", - }, + + KeyName: "worker.ArchiverConcurrency", + Description: "WorkerArchiverConcurrency is controls the number of coroutines handling archival work per archival workflow", DefaultValue: 50, }, WorkerArchivalsPerIteration: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "worker.ArchivalsPerIteration", - Description: "WorkerArchivalsPerIteration is controls the number of archivals handled in each iteration of archival workflow", - }, + + KeyName: "worker.ArchivalsPerIteration", + Description: "WorkerArchivalsPerIteration is controls the number of archivals handled in each iteration of archival workflow", DefaultValue: 1000, }, WorkerThrottledLogRPS: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "worker.throttledLogRPS", - Description: "WorkerThrottledLogRPS is the rate limit on number of log messages emitted per second for throttled logger", - }, + + KeyName: "worker.throttledLogRPS", + Description: "WorkerThrottledLogRPS is the rate limit on number of log messages emitted per second for throttled logger", DefaultValue: 20, }, ScannerPersistenceMaxQPS: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "worker.scannerPersistenceMaxQPS", - Description: "ScannerPersistenceMaxQPS is the maximum rate of persistence calls from worker.Scanner", - }, + + KeyName: "worker.scannerPersistenceMaxQPS", + Description: "ScannerPersistenceMaxQPS is the maximum rate of persistence calls from worker.Scanner", DefaultValue: 5, }, ScannerGetOrphanTasksPageSize: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "worker.scannerGetOrphanTasksPageSize", - Description: "ScannerGetOrphanTasksPageSize is the maximum number of orphans to delete in one batch", - }, + + KeyName: "worker.scannerGetOrphanTasksPageSize", + Description: "ScannerGetOrphanTasksPageSize is the maximum number of orphans to delete in one batch", DefaultValue: 1000, }, ScannerBatchSizeForTasklistHandler: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "worker.scannerBatchSizeForTasklistHandler", - Description: "ScannerBatchSizeForTasklistHandler is for: 1. max number of tasks to query per call(get tasks for tasklist) in the scavenger handler. 2. The scavenger then uses the return to decide if a tasklist can be deleted. It's better to keep it a relatively high number to let it be more efficient.", - }, + + KeyName: "worker.scannerBatchSizeForTasklistHandler", + Description: "ScannerBatchSizeForTasklistHandler is for: 1. max number of tasks to query per call(get tasks for tasklist) in the scavenger handler. 2. The scavenger then uses the return to decide if a tasklist can be deleted. It's better to keep it a relatively high number to let it be more efficient.", DefaultValue: 1000, }, ScannerMaxTasksProcessedPerTasklistJob: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "worker.scannerMaxTasksProcessedPerTasklistJob", - Description: "ScannerMaxTasksProcessedPerTasklistJob is the number of tasks to process for a tasklist in each workflow run", - }, + + KeyName: "worker.scannerMaxTasksProcessedPerTasklistJob", + Description: "ScannerMaxTasksProcessedPerTasklistJob is the number of tasks to process for a tasklist in each workflow run", DefaultValue: 256, }, ConcreteExecutionsScannerConcurrency: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "worker.executionsScannerConcurrency", - Description: "ConcreteExecutionsScannerConcurrency is indicates the concurrency of concrete execution scanner", - }, + + KeyName: "worker.executionsScannerConcurrency", + Description: "ConcreteExecutionsScannerConcurrency is indicates the concurrency of concrete execution scanner", DefaultValue: 25, }, ConcreteExecutionsScannerBlobstoreFlushThreshold: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "worker.executionsScannerBlobstoreFlushThreshold", - Description: "ConcreteExecutionsScannerBlobstoreFlushThreshold is indicates the flush threshold of blobstore in concrete execution scanner", - }, + + KeyName: "worker.executionsScannerBlobstoreFlushThreshold", + Description: "ConcreteExecutionsScannerBlobstoreFlushThreshold is indicates the flush threshold of blobstore in concrete execution scanner", DefaultValue: 100, }, ConcreteExecutionsScannerActivityBatchSize: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "worker.executionsScannerActivityBatchSize", - Description: "ConcreteExecutionsScannerActivityBatchSize is indicates the batch size of scanner activities", - }, + + KeyName: "worker.executionsScannerActivityBatchSize", + Description: "ConcreteExecutionsScannerActivityBatchSize is indicates the batch size of scanner activities", DefaultValue: 25, }, ConcreteExecutionsScannerPersistencePageSize: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "worker.executionsScannerPersistencePageSize", - Description: "ConcreteExecutionsScannerPersistencePageSize is indicates the page size of execution persistence fetches in concrete execution scanner", - }, + + KeyName: "worker.executionsScannerPersistencePageSize", + Description: "ConcreteExecutionsScannerPersistencePageSize is indicates the page size of execution persistence fetches in concrete execution scanner", DefaultValue: 1000, }, CurrentExecutionsScannerConcurrency: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "worker.currentExecutionsConcurrency", - Description: "CurrentExecutionsScannerConcurrency is indicates the concurrency of current executions scanner", - }, + + KeyName: "worker.currentExecutionsConcurrency", + Description: "CurrentExecutionsScannerConcurrency is indicates the concurrency of current executions scanner", DefaultValue: 25, }, CurrentExecutionsScannerBlobstoreFlushThreshold: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "worker.currentExecutionsBlobstoreFlushThreshold", - Description: "CurrentExecutionsScannerBlobstoreFlushThreshold is indicates the flush threshold of blobstore in current executions scanner", - }, + + KeyName: "worker.currentExecutionsBlobstoreFlushThreshold", + Description: "CurrentExecutionsScannerBlobstoreFlushThreshold is indicates the flush threshold of blobstore in current executions scanner", DefaultValue: 100, }, CurrentExecutionsScannerActivityBatchSize: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "worker.currentExecutionsActivityBatchSize", - Description: "CurrentExecutionsScannerActivityBatchSize is indicates the batch size of scanner activities", - }, + + KeyName: "worker.currentExecutionsActivityBatchSize", + Description: "CurrentExecutionsScannerActivityBatchSize is indicates the batch size of scanner activities", DefaultValue: 25, }, CurrentExecutionsScannerPersistencePageSize: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "worker.currentExecutionsPersistencePageSize", - Description: "CurrentExecutionsScannerPersistencePageSize is indicates the page size of execution persistence fetches in current executions scanner", - }, + + KeyName: "worker.currentExecutionsPersistencePageSize", + Description: "CurrentExecutionsScannerPersistencePageSize is indicates the page size of execution persistence fetches in current executions scanner", DefaultValue: 1000, }, TimersScannerConcurrency: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "worker.timersScannerConcurrency", - Description: "TimersScannerConcurrency is the concurrency of timers scanner", - }, + + KeyName: "worker.timersScannerConcurrency", + Description: "TimersScannerConcurrency is the concurrency of timers scanner", DefaultValue: 5, }, TimersScannerPersistencePageSize: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "worker.timersScannerPersistencePageSize", - Description: "TimersScannerPersistencePageSize is the page size of timers persistence fetches in timers scanner", - }, + + KeyName: "worker.timersScannerPersistencePageSize", + Description: "TimersScannerPersistencePageSize is the page size of timers persistence fetches in timers scanner", DefaultValue: 1000, }, TimersScannerBlobstoreFlushThreshold: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "worker.timersScannerConcurrency", - Description: "TimersScannerBlobstoreFlushThreshold is threshold to flush blob store", - }, + + KeyName: "worker.timersScannerBlobstoreFlushThreshold", + Description: "TimersScannerBlobstoreFlushThreshold is threshold to flush blob store", DefaultValue: 100, }, TimersScannerActivityBatchSize: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "worker.timersScannerBlobstoreFlushThreshold", - Description: "TimersScannerActivityBatchSize is TimersScannerActivityBatchSize", - }, + + KeyName: "worker.timersScannerActivityBatchSize", + Description: "TimersScannerActivityBatchSize is TimersScannerActivityBatchSize", DefaultValue: 25, }, TimersScannerPeriodStart: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "worker.timersScannerPeriodStart", - Description: "TimersScannerPeriodStart is interval start for fetching scheduled timers", - }, + + KeyName: "worker.timersScannerPeriodStart", + Description: "TimersScannerPeriodStart is interval start for fetching scheduled timers", DefaultValue: 24, }, TimersScannerPeriodEnd: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "worker.timersScannerPeriodEnd", - Description: "TimersScannerPeriodEnd is interval end for fetching scheduled timers", - }, + + KeyName: "worker.timersScannerPeriodEnd", + Description: "TimersScannerPeriodEnd is interval end for fetching scheduled timers", DefaultValue: 3, }, ESAnalyzerMaxNumDomains: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "worker.ESAnalyzerMaxNumDomains", - Description: "ESAnalyzerMaxNumDomains defines how many domains to check", - }, + + KeyName: "worker.ESAnalyzerMaxNumDomains", + Description: "ESAnalyzerMaxNumDomains defines how many domains to check", DefaultValue: 500, }, ESAnalyzerMaxNumWorkflowTypes: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "worker.ESAnalyzerMaxNumWorkflowTypes", - Description: "ESAnalyzerMaxNumWorkflowTypes defines how many workflow types to check per domain", - }, + + KeyName: "worker.ESAnalyzerMaxNumWorkflowTypes", + Description: "ESAnalyzerMaxNumWorkflowTypes defines how many workflow types to check per domain", DefaultValue: 100, }, ESAnalyzerNumWorkflowsToRefresh: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "worker.ESAnalyzerNumWorkflowsToRefresh", - Description: "ESAnalyzerNumWorkflowsToRefresh controls how many workflows per workflow type should be refreshed per workflow type", - }, + + KeyName: "worker.ESAnalyzerNumWorkflowsToRefresh", + Description: "ESAnalyzerNumWorkflowsToRefresh controls how many workflows per workflow type should be refreshed per workflow type", DefaultValue: 100, }, ESAnalyzerMinNumWorkflowsForAvg: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "worker.ESAnalyzerMinNumWorkflowsForAvg", - Description: "ESAnalyzerMinNumWorkflowsForAvg controls how many workflows to have at least to rely on workflow run time avg per type", - }, + + KeyName: "worker.ESAnalyzerMinNumWorkflowsForAvg", + Description: "ESAnalyzerMinNumWorkflowsForAvg controls how many workflows to have at least to rely on workflow run time avg per type", DefaultValue: 100, }, VisibilityArchivalQueryMaxRangeInDays: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "frontend.visibilityArchivalQueryMaxRangeInDays", - Description: "VisibilityArchivalQueryMaxRangeInDays is the maximum number of days for a visibility archival query", - }, + + KeyName: "frontend.visibilityArchivalQueryMaxRangeInDays", + Description: "VisibilityArchivalQueryMaxRangeInDays is the maximum number of days for a visibility archival query", DefaultValue: 60, }, VisibilityArchivalQueryMaxQPS: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "frontend.visibilityArchivalQueryMaxQPS", - Description: "VisibilityArchivalQueryMaxQPS is the timeout for a visibility archival query", - }, + + KeyName: "frontend.visibilityArchivalQueryMaxQPS", + Description: "VisibilityArchivalQueryMaxQPS is the timeout for a visibility archival query", DefaultValue: 1, }, WorkflowDeletionJitterRange: DynamicInt{ - DynamicBase: DynamicBase{ - KeyName: "system.workflowDeletionJitterRange", - Description: "WorkflowDeletionJitterRange defines the duration in minutes for workflow close tasks jittering", - }, + + KeyName: "system.workflowDeletionJitterRange", + Description: "WorkflowDeletionJitterRange defines the duration in minutes for workflow close tasks jittering", DefaultValue: 1, }, } var BoolKeys = map[BoolKey]DynamicBool{ TestGetBoolPropertyKey: DynamicBool{ - DynamicBase: DynamicBase{ - KeyName: "testGetBoolPropertyKey", - Description: "", - }, + + KeyName: "testGetBoolPropertyKey", + Description: "", DefaultValue: false, }, TestGetBoolPropertyFilteredByDomainIDKey: DynamicBool{ - DynamicBase: DynamicBase{ - KeyName: "testGetBoolPropertyFilteredByDomainIDKey", - Description: "", - }, + + KeyName: "testGetBoolPropertyFilteredByDomainIDKey", + Description: "", DefaultValue: false, }, TestGetBoolPropertyFilteredByTaskListInfoKey: DynamicBool{ - DynamicBase: DynamicBase{ - KeyName: "testGetBoolPropertyFilteredByTaskListInfoKey", - Description: "", - }, + + KeyName: "testGetBoolPropertyFilteredByTaskListInfoKey", + Description: "", DefaultValue: false, }, EnableVisibilitySampling: DynamicBool{ - DynamicBase: DynamicBase{ - KeyName: "system.enableVisibilitySampling", - Description: "EnableVisibilitySampling is key for enable visibility sampling for basic(DB based) visibility", - }, + + KeyName: "system.enableVisibilitySampling", + Description: "EnableVisibilitySampling is key for enable visibility sampling for basic(DB based) visibility", DefaultValue: false, // ... }, EnableReadFromClosedExecutionV2: DynamicBool{ - DynamicBase: DynamicBase{ - KeyName: "system.enableReadFromClosedExecutionV2", - Description: "EnableReadFromClosedExecutionV2 is key for enable read from cadence_visibility.closed_executions_v2", - }, + + KeyName: "system.enableReadFromClosedExecutionV2", + Description: "EnableReadFromClosedExecutionV2 is key for enable read from cadence_visibility.closed_executions_v2", DefaultValue: false, }, EnableReadVisibilityFromES: DynamicBool{ - DynamicBase: DynamicBase{ - KeyName: "system.enableReadVisibilityFromES", - Description: "EnableReadVisibilityFromES is key for enable read from elastic search or db visibility, usually using with AdvancedVisibilityWritingMode for seamless migration from db visibility to advanced visibility", - }, + + KeyName: "system.enableReadVisibilityFromES", + Description: "EnableReadVisibilityFromES is key for enable read from elastic search or db visibility, usually using with AdvancedVisibilityWritingMode for seamless migration from db visibility to advanced visibility", DefaultValue: true, }, EmitShardDiffLog: DynamicBool{ - DynamicBase: DynamicBase{ - KeyName: "history.emitShardDiffLog", - Description: "EmitShardDiffLog is whether emit the shard diff log", - }, + + KeyName: "history.emitShardDiffLog", + Description: "EmitShardDiffLog is whether emit the shard diff log", DefaultValue: false, }, DisableListVisibilityByFilter: DynamicBool{ - DynamicBase: DynamicBase{ - KeyName: "frontend.disableListVisibilityByFilter", - Description: "DisableListVisibilityByFilter is config to disable list open/close workflow using filter", - }, + + KeyName: "frontend.disableListVisibilityByFilter", + Description: "DisableListVisibilityByFilter is config to disable list open/close workflow using filter", DefaultValue: false, }, EnableReadFromHistoryArchival: DynamicBool{ - DynamicBase: DynamicBase{ - KeyName: "system.enableReadFromHistoryArchival", - Description: "EnableReadFromHistoryArchival is key for enabling reading history from archival store", - }, + + KeyName: "system.enableReadFromHistoryArchival", + Description: "EnableReadFromHistoryArchival is key for enabling reading history from archival store", DefaultValue: true, }, EnableReadFromVisibilityArchival: DynamicBool{ - DynamicBase: DynamicBase{ - KeyName: "system.enableReadFromVisibilityArchival", - Description: "EnableReadFromVisibilityArchival is key for enabling reading visibility from archival store to override the value from static config.", - }, + + KeyName: "system.enableReadFromVisibilityArchival", + Description: "EnableReadFromVisibilityArchival is key for enabling reading visibility from archival store to override the value from static config.", DefaultValue: true, }, EnableDomainNotActiveAutoForwarding: DynamicBool{ - DynamicBase: DynamicBase{ - KeyName: "system.enableDomainNotActiveAutoForwarding", - Description: "EnableDomainNotActiveAutoForwarding decides requests form which domain will be forwarded to active cluster if domain is not active in current cluster. Only when selected-api-forwarding or all-domain-apis-forwarding is the policy in ClusterRedirectionPolicy(in static config). If the policy is noop(default) this flag is not doing anything.", - }, + + KeyName: "system.enableDomainNotActiveAutoForwarding", + Description: "EnableDomainNotActiveAutoForwarding decides requests form which domain will be forwarded to active cluster if domain is not active in current cluster. Only when selected-api-forwarding or all-domain-apis-forwarding is the policy in ClusterRedirectionPolicy(in static config). If the policy is noop(default) this flag is not doing anything.", DefaultValue: true, }, EnableGracefulFailover: DynamicBool{ - DynamicBase: DynamicBase{ - KeyName: "system.enableGracefulFailover", - Description: "EnableGracefulFailover is whether enabling graceful failover", - }, + + KeyName: "system.enableGracefulFailover", + Description: "EnableGracefulFailover is whether enabling graceful failover", DefaultValue: true, }, DisallowQuery: DynamicBool{ - DynamicBase: DynamicBase{ - KeyName: "system.disallowQuery", - Description: "DisallowQuery is the key to disallow query for a domain", - }, + + KeyName: "system.disallowQuery", + Description: "DisallowQuery is the key to disallow query for a domain", DefaultValue: false, }, EnableDebugMode: DynamicBool{ - DynamicBase: DynamicBase{ - KeyName: "system.enableDebugMode", - Description: "EnableDebugMode is for enabling debugging components, logs and metrics", - }, + + KeyName: "system.enableDebugMode", + Description: "EnableDebugMode is for enabling debugging components, logs and metrics", DefaultValue: false, }, EnableGRPCOutbound: DynamicBool{ - DynamicBase: DynamicBase{ - KeyName: "system.enableGRPCOutbound", - Description: "EnableGRPCOutbound is the key for enabling outbound GRPC traffic", - }, + + KeyName: "system.enableGRPCOutbound", + Description: "EnableGRPCOutbound is the key for enabling outbound GRPC traffic", DefaultValue: true, }, EnableSQLAsyncTransaction: DynamicBool{ - DynamicBase: DynamicBase{ - KeyName: "system.enableSQLAsyncTransaction", - Description: "EnableSQLAsyncTransaction is the key for enabling async transaction", - }, + + KeyName: "system.enableSQLAsyncTransaction", + Description: "EnableSQLAsyncTransaction is the key for enabling async transaction", DefaultValue: false, }, EnableClientVersionCheck: DynamicBool{ - DynamicBase: DynamicBase{ - KeyName: "frontend.enableClientVersionCheck", - Description: "EnableClientVersionCheck is enables client version check for frontend", - }, + + KeyName: "frontend.enableClientVersionCheck", + Description: "EnableClientVersionCheck is enables client version check for frontend", DefaultValue: false, }, SendRawWorkflowHistory: DynamicBool{ - DynamicBase: DynamicBase{ - KeyName: "frontend.sendRawWorkflowHistory", - Description: "SendRawWorkflowHistory is whether to enable raw history retrieving", - }, + + KeyName: "frontend.sendRawWorkflowHistory", + Description: "SendRawWorkflowHistory is whether to enable raw history retrieving", DefaultValue: false, }, FrontendEmitSignalNameMetricsTag: DynamicBool{ - DynamicBase: DynamicBase{ - KeyName: "frontend.emitSignalNameMetricsTag", - Description: "FrontendEmitSignalNameMetricsTag enables emitting signal name tag in metrics in frontend client", - }, + + KeyName: "frontend.emitSignalNameMetricsTag", + Description: "FrontendEmitSignalNameMetricsTag enables emitting signal name tag in metrics in frontend client", DefaultValue: false, }, MatchingEnableSyncMatch: DynamicBool{ - DynamicBase: DynamicBase{ - KeyName: "matching.enableSyncMatch", - Description: "MatchingEnableSyncMatch is to enable sync match", - }, + + KeyName: "matching.enableSyncMatch", + Description: "MatchingEnableSyncMatch is to enable sync match", DefaultValue: true, }, MatchingEnableTaskInfoLogByDomainID: DynamicBool{ - DynamicBase: DynamicBase{ - KeyName: "MatchingEnableTaskInfoLogByDomainID is enables info level logs for decision/activity task based on the request domainID", - Description: "matching.enableTaskInfoLogByDomainID", - }, + + KeyName: "matching.enableTaskInfoLogByDomainID", + Description: "MatchingEnableTaskInfoLogByDomainID is enables info level logs for decision/activity task based on the request domainID", DefaultValue: false, }, EventsCacheGlobalEnable: DynamicBool{ - DynamicBase: DynamicBase{ - KeyName: "history.eventsCacheGlobalEnable", - Description: "EventsCacheGlobalEnable is enables global cache over all history shards", - }, + + KeyName: "history.eventsCacheGlobalEnable", + Description: "EventsCacheGlobalEnable is enables global cache over all history shards", DefaultValue: false, }, QueueProcessorEnableSplit: DynamicBool{ - DynamicBase: DynamicBase{ - KeyName: "history.queueProcessorEnableSplit", - Description: "QueueProcessorEnableSplit is indicates whether processing queue split policy should be enabled", - }, + + KeyName: "history.queueProcessorEnableSplit", + Description: "QueueProcessorEnableSplit is indicates whether processing queue split policy should be enabled", DefaultValue: false, }, QueueProcessorEnableRandomSplitByDomainID: DynamicBool{ - DynamicBase: DynamicBase{ - KeyName: "history.queueProcessorEnableRandomSplitByDomainID", - Description: "QueueProcessorEnableRandomSplitByDomainID is indicates whether random queue split policy should be enabled for a domain", - }, + + KeyName: "history.queueProcessorEnableRandomSplitByDomainID", + Description: "QueueProcessorEnableRandomSplitByDomainID is indicates whether random queue split policy should be enabled for a domain", DefaultValue: false, }, QueueProcessorEnablePendingTaskSplitByDomainID: DynamicBool{ - DynamicBase: DynamicBase{ - KeyName: "history.queueProcessorEnablePendingTaskSplitByDomainID", - Description: "ueueProcessorEnablePendingTaskSplitByDomainID is indicates whether pending task split policy should be enabled", - }, + + KeyName: "history.queueProcessorEnablePendingTaskSplitByDomainID", + Description: "ueueProcessorEnablePendingTaskSplitByDomainID is indicates whether pending task split policy should be enabled", DefaultValue: false, }, QueueProcessorEnableStuckTaskSplitByDomainID: DynamicBool{ - DynamicBase: DynamicBase{ - KeyName: "history.queueProcessorEnableStuckTaskSplitByDomainID", - Description: "QueueProcessorEnableStuckTaskSplitByDomainID is indicates whether stuck task split policy should be enabled", - }, + + KeyName: "history.queueProcessorEnableStuckTaskSplitByDomainID", + Description: "QueueProcessorEnableStuckTaskSplitByDomainID is indicates whether stuck task split policy should be enabled", DefaultValue: false, }, QueueProcessorEnablePersistQueueStates: DynamicBool{ - DynamicBase: DynamicBase{ - KeyName: "history.queueProcessorEnablePersistQueueStates", - Description: "QueueProcessorEnablePersistQueueStates is indicates whether processing queue states should be persisted", - }, + + KeyName: "history.queueProcessorEnablePersistQueueStates", + Description: "QueueProcessorEnablePersistQueueStates is indicates whether processing queue states should be persisted", DefaultValue: true, }, QueueProcessorEnableLoadQueueStates: DynamicBool{ - DynamicBase: DynamicBase{ - KeyName: "history.queueProcessorEnableLoadQueueStates", - Description: "QueueProcessorEnableLoadQueueStates is indicates whether processing queue states should be loaded", - }, + + KeyName: "history.queueProcessorEnableLoadQueueStates", + Description: "QueueProcessorEnableLoadQueueStates is indicates whether processing queue states should be loaded", DefaultValue: true, }, TransferProcessorEnableValidator: DynamicBool{ - DynamicBase: DynamicBase{ - KeyName: "history.transferProcessorEnableValidator", - Description: "TransferProcessorEnableValidator is whether validator should be enabled for transferQueueProcessor", - }, + + KeyName: "history.transferProcessorEnableValidator", + Description: "TransferProcessorEnableValidator is whether validator should be enabled for transferQueueProcessor", DefaultValue: false, }, EnableAdminProtection: DynamicBool{ - DynamicBase: DynamicBase{ - KeyName: "history.enableAdminProtection", - Description: "EnableAdminProtection is whether to enable admin checking", - }, + + KeyName: "history.enableAdminProtection", + Description: "EnableAdminProtection is whether to enable admin checking", DefaultValue: false, }, EnableParentClosePolicy: DynamicBool{ - DynamicBase: DynamicBase{ - KeyName: "history.enableParentClosePolicy", - Description: "EnableParentClosePolicy is whether to ParentClosePolicy", - }, + + KeyName: "history.enableParentClosePolicy", + Description: "EnableParentClosePolicy is whether to ParentClosePolicy", DefaultValue: true, }, EnableDropStuckTaskByDomainID: DynamicBool{ - DynamicBase: DynamicBase{ - KeyName: "history.DropStuckTaskByDomain", - Description: "EnableDropStuckTaskByDomainID is whether stuck timer/transfer task should be dropped for a domain", - }, + + KeyName: "history.DropStuckTaskByDomain", + Description: "EnableDropStuckTaskByDomainID is whether stuck timer/transfer task should be dropped for a domain", DefaultValue: false, }, EnableConsistentQuery: DynamicBool{ - DynamicBase: DynamicBase{ - KeyName: "history.EnableConsistentQuery", - Description: "EnableConsistentQuery indicates if consistent query is enabled for the cluster", - }, + + KeyName: "history.EnableConsistentQuery", + Description: "EnableConsistentQuery indicates if consistent query is enabled for the cluster", DefaultValue: true, }, EnableConsistentQueryByDomain: DynamicBool{ - DynamicBase: DynamicBase{ - KeyName: "history.EnableConsistentQueryByDomain", - Description: "EnableConsistentQueryByDomain indicates if consistent query is enabled for a domain", - }, + + KeyName: "history.EnableConsistentQueryByDomain", + Description: "EnableConsistentQueryByDomain indicates if consistent query is enabled for a domain", DefaultValue: false, }, EnableCrossClusterOperations: DynamicBool{ - DynamicBase: DynamicBase{ - KeyName: "history.enableCrossClusterOperations", - Description: "EnableCrossClusterOperations indicates if cross cluster operations can be scheduled for a domain", - }, + + KeyName: "history.enableCrossClusterOperations", + Description: "EnableCrossClusterOperations indicates if cross cluster operations can be scheduled for a domain", DefaultValue: false, }, EnableHistoryCorruptionCheck: DynamicBool{ - DynamicBase: DynamicBase{ - KeyName: "history.enableHistoryCorruptionCheck", - Description: "EnableHistoryCorruptionCheck enables additional sanity check for corrupted history. This allows early catches of DB corruptions but potiantally increased latency.", - }, + + KeyName: "history.enableHistoryCorruptionCheck", + Description: "EnableHistoryCorruptionCheck enables additional sanity check for corrupted history. This allows early catches of DB corruptions but potiantally increased latency.", DefaultValue: false, }, EnableActivityLocalDispatchByDomain: DynamicBool{ - DynamicBase: DynamicBase{ - KeyName: "history.enableActivityLocalDispatchByDomain", - Description: "EnableActivityLocalDispatchByDomain is allows worker to dispatch activity tasks through local tunnel after decisions are made. This is an performance optimization to skip activity scheduling efforts", - }, + + KeyName: "history.enableActivityLocalDispatchByDomain", + Description: "EnableActivityLocalDispatchByDomain is allows worker to dispatch activity tasks through local tunnel after decisions are made. This is an performance optimization to skip activity scheduling efforts", DefaultValue: true, }, HistoryEnableTaskInfoLogByDomainID: DynamicBool{ - DynamicBase: DynamicBase{ - KeyName: "history.enableTaskInfoLogByDomainID", - Description: "HistoryEnableTaskInfoLogByDomainID is enables info level logs for decision/activity task based on the request domainID", - }, + + KeyName: "history.enableTaskInfoLogByDomainID", + Description: "HistoryEnableTaskInfoLogByDomainID is enables info level logs for decision/activity task based on the request domainID", DefaultValue: false, }, EnableReplicationTaskGeneration: DynamicBool{ - DynamicBase: DynamicBase{ - KeyName: "history.enableReplicationTaskGeneration", - Description: "EnableReplicationTaskGeneration is the flag to control replication generation", - }, + + KeyName: "history.enableReplicationTaskGeneration", + Description: "EnableReplicationTaskGeneration is the flag to control replication generation", DefaultValue: true, }, AllowArchivingIncompleteHistory: DynamicBool{ - DynamicBase: DynamicBase{ - KeyName: "worker.AllowArchivingIncompleteHistory", - Description: "AllowArchivingIncompleteHistory will continue on when seeing some error like history mutated(usually caused by database consistency issues)", - }, + + KeyName: "worker.AllowArchivingIncompleteHistory", + Description: "AllowArchivingIncompleteHistory will continue on when seeing some error like history mutated(usually caused by database consistency issues)", DefaultValue: false, }, EnableCleaningOrphanTaskInTasklistScavenger: DynamicBool{ - DynamicBase: DynamicBase{ - KeyName: "worker.enableCleaningOrphanTaskInTasklistScavenger", - Description: "EnableCleaningOrphanTaskInTasklistScavenger indicates if enabling the scanner to clean up orphan tasks", - }, + + KeyName: "worker.enableCleaningOrphanTaskInTasklistScavenger", + Description: "EnableCleaningOrphanTaskInTasklistScavenger indicates if enabling the scanner to clean up orphan tasks", DefaultValue: false, }, TaskListScannerEnabled: DynamicBool{ - DynamicBase: DynamicBase{ - KeyName: "worker.taskListScannerEnabled", - Description: "TaskListScannerEnabled is indicates if task list scanner should be started as part of worker.Scanner", - }, + + KeyName: "worker.taskListScannerEnabled", + Description: "TaskListScannerEnabled is indicates if task list scanner should be started as part of worker.Scanner", DefaultValue: true, }, HistoryScannerEnabled: DynamicBool{ - DynamicBase: DynamicBase{ - KeyName: "worker.historyScannerEnabled", - Description: "HistoryScannerEnabled is indicates if history scanner should be started as part of worker.Scanner", - }, + + KeyName: "worker.historyScannerEnabled", + Description: "HistoryScannerEnabled is indicates if history scanner should be started as part of worker.Scanner", DefaultValue: false, }, ConcreteExecutionsScannerEnabled: DynamicBool{ - DynamicBase: DynamicBase{ - KeyName: "worker.executionsScannerEnabled", - Description: "ConcreteExecutionsScannerEnabled is indicates if executions scanner should be started as part of worker.Scanner", - }, + + KeyName: "worker.executionsScannerEnabled", + Description: "ConcreteExecutionsScannerEnabled is indicates if executions scanner should be started as part of worker.Scanner", DefaultValue: false, }, ConcreteExecutionsScannerInvariantCollectionMutableState: DynamicBool{ - DynamicBase: DynamicBase{ - KeyName: "worker.executionsScannerInvariantCollectionMutableState", - Description: "ConcreteExecutionsScannerInvariantCollectionMutableState is indicates if mutable state invariant checks should be run", - }, + + KeyName: "worker.executionsScannerInvariantCollectionMutableState", + Description: "ConcreteExecutionsScannerInvariantCollectionMutableState is indicates if mutable state invariant checks should be run", DefaultValue: true, }, ConcreteExecutionsScannerInvariantCollectionHistory: DynamicBool{ - DynamicBase: DynamicBase{ - KeyName: "worker.executionsScannerInvariantCollectionHistory", - Description: "ConcreteExecutionsScannerInvariantCollectionHistory is indicates if history invariant checks should be run", - }, + + KeyName: "worker.executionsScannerInvariantCollectionHistory", + Description: "ConcreteExecutionsScannerInvariantCollectionHistory is indicates if history invariant checks should be run", DefaultValue: true, }, CurrentExecutionsScannerEnabled: DynamicBool{ - DynamicBase: DynamicBase{ - KeyName: "worker.currentExecutionsScannerEnabled", - Description: "CurrentExecutionsScannerEnabled is indicates if current executions scanner should be started as part of worker.Scanner", - }, + + KeyName: "worker.currentExecutionsScannerEnabled", + Description: "CurrentExecutionsScannerEnabled is indicates if current executions scanner should be started as part of worker.Scanner", DefaultValue: false, }, CurrentExecutionsScannerInvariantCollectionHistory: DynamicBool{ - DynamicBase: DynamicBase{ - KeyName: "worker.currentExecutionsScannerInvariantCollectionHistory", - Description: "CurrentExecutionsScannerInvariantCollectionHistory is indicates if history invariant checks should be run", - }, + + KeyName: "worker.currentExecutionsScannerInvariantCollectionHistory", + Description: "CurrentExecutionsScannerInvariantCollectionHistory is indicates if history invariant checks should be run", DefaultValue: true, }, CurrentExecutionsScannerInvariantCollectionMutableState: DynamicBool{ - DynamicBase: DynamicBase{ - KeyName: "worker.currentExecutionsInvariantCollectionMutableState", - Description: "CurrentExecutionsScannerInvariantCollectionMutableState is indicates if mutable state invariant checks should be run", - }, + + KeyName: "worker.currentExecutionsInvariantCollectionMutableState", + Description: "CurrentExecutionsScannerInvariantCollectionMutableState is indicates if mutable state invariant checks should be run", DefaultValue: true, }, EnableBatcher: DynamicBool{ - DynamicBase: DynamicBase{ - KeyName: "worker.enableBatcher", - Description: "EnableBatcher is decides whether start batcher in our worker", - }, + + KeyName: "worker.enableBatcher", + Description: "EnableBatcher is decides whether start batcher in our worker", DefaultValue: true, }, EnableParentClosePolicyWorker: DynamicBool{ - DynamicBase: DynamicBase{ - KeyName: "system.enableParentClosePolicyWorker", - Description: "EnableParentClosePolicyWorker decides whether or not enable system workers for processing parent close policy task", - }, + + KeyName: "system.enableParentClosePolicyWorker", + Description: "EnableParentClosePolicyWorker decides whether or not enable system workers for processing parent close policy task", DefaultValue: true, }, EnableESAnalyzer: DynamicBool{ - DynamicBase: DynamicBase{ - KeyName: "system.enableESAnalyzer", - Description: "EnableESAnalyzer decides whether to enable system workers for processing ElasticSearch Analyzer", - }, + + KeyName: "system.enableESAnalyzer", + Description: "EnableESAnalyzer decides whether to enable system workers for processing ElasticSearch Analyzer", DefaultValue: false, }, EnableWatchDog: DynamicBool{ - DynamicBase: DynamicBase{ - KeyName: "system.EnableWatchDog", - Description: "EnableWatchDog decides whether to enable watchdog system worker", - }, + + KeyName: "system.EnableWatchDog", + Description: "EnableWatchDog decides whether to enable watchdog system worker", DefaultValue: false, }, EnableStickyQuery: DynamicBool{ - DynamicBase: DynamicBase{ - KeyName: "system.enableStickyQuery", - Description: "EnableStickyQuery is indicates if sticky query should be enabled per domain", - }, + + KeyName: "system.enableStickyQuery", + Description: "EnableStickyQuery is indicates if sticky query should be enabled per domain", DefaultValue: true, }, EnableFailoverManager: DynamicBool{ - DynamicBase: DynamicBase{ - KeyName: "system.enableFailoverManager", - Description: "EnableFailoverManager is indicates if failover manager is enabled", - }, + + KeyName: "system.enableFailoverManager", + Description: "EnableFailoverManager is indicates if failover manager is enabled", DefaultValue: true, }, EnableWorkflowShadower: DynamicBool{ - DynamicBase: DynamicBase{ - KeyName: "system.enableWorkflowShadower", - Description: "EnableWorkflowShadower indicates if workflow shadower is enabled", - }, + + KeyName: "system.enableWorkflowShadower", + Description: "EnableWorkflowShadower indicates if workflow shadower is enabled", DefaultValue: true, }, ConcreteExecutionFixerDomainAllow: DynamicBool{ - DynamicBase: DynamicBase{ - KeyName: "worker.concreteExecutionFixerDomainAllow", - Description: "ConcreteExecutionFixerDomainAllow is which domains are allowed to be fixed by concrete fixer workflow", - }, + + KeyName: "worker.concreteExecutionFixerDomainAllow", + Description: "ConcreteExecutionFixerDomainAllow is which domains are allowed to be fixed by concrete fixer workflow", DefaultValue: false, }, CurrentExecutionFixerDomainAllow: DynamicBool{ - DynamicBase: DynamicBase{ - KeyName: "worker.currentExecutionFixerDomainAllow", - Description: "CurrentExecutionFixerDomainAllow is which domains are allowed to be fixed by current fixer workflow", - }, + + KeyName: "worker.currentExecutionFixerDomainAllow", + Description: "CurrentExecutionFixerDomainAllow is which domains are allowed to be fixed by current fixer workflow", DefaultValue: false, }, TimersScannerEnabled: DynamicBool{ - DynamicBase: DynamicBase{ - KeyName: "worker.timersScannerEnabled", - Description: "TimersScannerEnabled is if timers scanner should be started as part of worker.Scanner", - }, + + KeyName: "worker.timersScannerEnabled", + Description: "TimersScannerEnabled is if timers scanner should be started as part of worker.Scanner", DefaultValue: false, }, TimersFixerEnabled: DynamicBool{ - DynamicBase: DynamicBase{ - KeyName: "worker.timersFixerEnabled", - Description: "TimersFixerEnabled is if timers fixer should be started as part of worker.Scanner", - }, + + KeyName: "worker.timersFixerEnabled", + Description: "TimersFixerEnabled is if timers fixer should be started as part of worker.Scanner", DefaultValue: false, }, TimersFixerDomainAllow: DynamicBool{ - DynamicBase: DynamicBase{ - KeyName: "worker.timersFixerDomainAllow", - Description: "TimersFixerDomainAllow is which domains are allowed to be fixed by timer fixer workflow", - }, + + KeyName: "worker.timersFixerDomainAllow", + Description: "TimersFixerDomainAllow is which domains are allowed to be fixed by timer fixer workflow", DefaultValue: false, }, ConcreteExecutionFixerEnabled: DynamicBool{ - DynamicBase: DynamicBase{ - KeyName: "worker.concreteExecutionFixerEnabled", - Description: "ConcreteExecutionFixerEnabled is if concrete execution fixer workflow is enabled", - }, + + KeyName: "worker.concreteExecutionFixerEnabled", + Description: "ConcreteExecutionFixerEnabled is if concrete execution fixer workflow is enabled", DefaultValue: false, }, CurrentExecutionFixerEnabled: DynamicBool{ - DynamicBase: DynamicBase{ - KeyName: "worker.currentExecutionFixerEnabled", - Description: "CurrentExecutionFixerEnabled is if current execution fixer workflow is enabled", - }, + + KeyName: "worker.currentExecutionFixerEnabled", + Description: "CurrentExecutionFixerEnabled is if current execution fixer workflow is enabled", DefaultValue: false, }, EnableAuthorization: DynamicBool{ - DynamicBase: DynamicBase{ - KeyName: "system.enableAuthorization", - Description: "EnableAuthorization is the key to enable authorization for a domain, only for extension binary:", - }, + + KeyName: "system.enableAuthorization", + Description: "EnableAuthorization is the key to enable authorization for a domain, only for extension binary:", DefaultValue: false, }, EnableServiceAuthorization: DynamicBool{ - DynamicBase: DynamicBase{ - KeyName: "system.enableServiceAuthorization", - Description: "EnableServiceAuthorization is the key to enable authorization for a service, only for extension binary:", - }, + + KeyName: "system.enableServiceAuthorization", + Description: "EnableServiceAuthorization is the key to enable authorization for a service, only for extension binary:", DefaultValue: false, }, EnableServiceAuthorizationLogOnly: DynamicBool{ - DynamicBase: DynamicBase{ - KeyName: "system.enableServiceAuthorizationLogOnly", - Description: "EnableServiceAuthorizationLogOnly is the key to enable authorization logging for a service, only for extension binary:", - }, + + KeyName: "system.enableServiceAuthorizationLogOnly", + Description: "EnableServiceAuthorizationLogOnly is the key to enable authorization logging for a service, only for extension binary:", DefaultValue: false, }, ESAnalyzerPause: DynamicBool{ - DynamicBase: DynamicBase{ - KeyName: "worker.ESAnalyzerPause", - Description: "ESAnalyzerPause defines if we want to dynamically pause the analyzer workflow", - }, + + KeyName: "worker.ESAnalyzerPause", + Description: "ESAnalyzerPause defines if we want to dynamically pause the analyzer workflow", DefaultValue: false, }, EnableArchivalCompression: DynamicBool{ - DynamicBase: DynamicBase{ - KeyName: "worker.EnableArchivalCompression", - Description: "EnableArchivalCompression indicates whether blobs are compressed before they are archived", - }, + + KeyName: "worker.EnableArchivalCompression", + Description: "EnableArchivalCompression indicates whether blobs are compressed before they are archived", DefaultValue: false, }, ESAnalyzerEnableAvgDurationBasedChecks: DynamicBool{ - DynamicBase: DynamicBase{ - KeyName: "worker.ESAnalyzerEnableAvgDurationBasedChecks", - Description: "ESAnalyzerEnableAvgDurationBasedChecks controls if we want to enable avg duration based task refreshes", - }, + + KeyName: "worker.ESAnalyzerEnableAvgDurationBasedChecks", + Description: "ESAnalyzerEnableAvgDurationBasedChecks controls if we want to enable avg duration based task refreshes", DefaultValue: false, }, CorruptWorkflowWatchdogPause: DynamicBool{ - DynamicBase: DynamicBase{ - KeyName: "worker.CorruptWorkflowWatchdogPause", - Description: "CorruptWorkflowWatchdogPause defines if we want to dynamically pause the watchdog workflow", - }, + + KeyName: "worker.CorruptWorkflowWatchdogPause", + Description: "CorruptWorkflowWatchdogPause defines if we want to dynamically pause the watchdog workflow", DefaultValue: false, }, Lockdown: DynamicBool{ - DynamicBase: DynamicBase{ - KeyName: "system.Lockdown", - Description: "Lockdown defines if we want to allow failovers of domains to this cluster", - }, + + KeyName: "system.Lockdown", + Description: "Lockdown defines if we want to allow failovers of domains to this cluster", DefaultValue: false, }, } var FloatKeys = map[FloatKey]DynamicFloat{ TestGetFloat64PropertyKey: DynamicFloat{ - DynamicBase: DynamicBase{ - KeyName: "testGetFloat64PropertyKey", - Description: "", - }, + + KeyName: "testGetFloat64PropertyKey", + Description: "", DefaultValue: 0, }, PersistenceErrorInjectionRate: DynamicFloat{ - DynamicBase: DynamicBase{ - KeyName: "system.persistenceErrorInjectionRate", - Description: "PersistenceErrorInjectionRate is rate for injecting random error in persistence", - }, + + KeyName: "system.persistenceErrorInjectionRate", + Description: "PersistenceErrorInjectionRate is rate for injecting random error in persistence", DefaultValue: 0, }, AdminErrorInjectionRate: DynamicFloat{ - DynamicBase: DynamicBase{ - KeyName: "admin.errorInjectionRate", - Description: "dminErrorInjectionRate is the rate for injecting random error in admin client", - }, + + KeyName: "admin.errorInjectionRate", + Description: "dminErrorInjectionRate is the rate for injecting random error in admin client", DefaultValue: 0, }, DomainFailoverRefreshTimerJitterCoefficient: DynamicFloat{ - DynamicBase: DynamicBase{ - KeyName: "frontend.domainFailoverRefreshTimerJitterCoefficient", - Description: "DomainFailoverRefreshTimerJitterCoefficient is the jitter for domain failover refresh timer jitter", - }, + + KeyName: "frontend.domainFailoverRefreshTimerJitterCoefficient", + Description: "DomainFailoverRefreshTimerJitterCoefficient is the jitter for domain failover refresh timer jitter", DefaultValue: 0.1, }, FrontendErrorInjectionRate: DynamicFloat{ - DynamicBase: DynamicBase{ - KeyName: "frontend.errorInjectionRate", - Description: "FrontendErrorInjectionRate is rate for injecting random error in frontend client", - }, + + KeyName: "frontend.errorInjectionRate", + Description: "FrontendErrorInjectionRate is rate for injecting random error in frontend client", DefaultValue: 0, }, MatchingErrorInjectionRate: DynamicFloat{ - DynamicBase: DynamicBase{ - KeyName: "matching.errorInjectionRate", - Description: "MatchingErrorInjectionRate is rate for injecting random error in matching client", - }, + + KeyName: "matching.errorInjectionRate", + Description: "MatchingErrorInjectionRate is rate for injecting random error in matching client", DefaultValue: 0, }, TaskRedispatchIntervalJitterCoefficient: DynamicFloat{ - DynamicBase: DynamicBase{ - KeyName: "history.taskRedispatchIntervalJitterCoefficient", - Description: "TaskRedispatchIntervalJitterCoefficient is the task redispatch interval jitter coefficient", - }, + + KeyName: "history.taskRedispatchIntervalJitterCoefficient", + Description: "TaskRedispatchIntervalJitterCoefficient is the task redispatch interval jitter coefficient", DefaultValue: 0.15, }, QueueProcessorRandomSplitProbability: DynamicFloat{ - DynamicBase: DynamicBase{ - KeyName: "history.queueProcessorRandomSplitProbability", - Description: "QueueProcessorRandomSplitProbability is the probability for a domain to be split to a new processing queue", - }, + + KeyName: "history.queueProcessorRandomSplitProbability", + Description: "QueueProcessorRandomSplitProbability is the probability for a domain to be split to a new processing queue", DefaultValue: 0.01, }, QueueProcessorPollBackoffIntervalJitterCoefficient: DynamicFloat{ - DynamicBase: DynamicBase{ - KeyName: "history.queueProcessorPollBackoffIntervalJitterCoefficient", - Description: "QueueProcessorPollBackoffIntervalJitterCoefficient is backoff interval jitter coefficient", - }, + + KeyName: "history.queueProcessorPollBackoffIntervalJitterCoefficient", + Description: "QueueProcessorPollBackoffIntervalJitterCoefficient is backoff interval jitter coefficient", DefaultValue: 0.15, }, TimerProcessorUpdateAckIntervalJitterCoefficient: DynamicFloat{ - DynamicBase: DynamicBase{ - KeyName: "history.timerProcessorUpdateAckIntervalJitterCoefficient", - Description: "TimerProcessorUpdateAckIntervalJitterCoefficient is the update interval jitter coefficient", - }, + + KeyName: "history.timerProcessorUpdateAckIntervalJitterCoefficient", + Description: "TimerProcessorUpdateAckIntervalJitterCoefficient is the update interval jitter coefficient", DefaultValue: 0.15, }, TimerProcessorMaxPollIntervalJitterCoefficient: DynamicFloat{ - DynamicBase: DynamicBase{ - KeyName: "history.timerProcessorMaxPollIntervalJitterCoefficient", - Description: "TimerProcessorMaxPollIntervalJitterCoefficient is the max poll interval jitter coefficient", - }, + + KeyName: "history.timerProcessorMaxPollIntervalJitterCoefficient", + Description: "TimerProcessorMaxPollIntervalJitterCoefficient is the max poll interval jitter coefficient", DefaultValue: 0.15, }, TimerProcessorSplitQueueIntervalJitterCoefficient: DynamicFloat{ - DynamicBase: DynamicBase{ - KeyName: "history.timerProcessorSplitQueueIntervalJitterCoefficient", - Description: "TimerProcessorSplitQueueIntervalJitterCoefficient is the split processing queue interval jitter coefficient", - }, + + KeyName: "history.timerProcessorSplitQueueIntervalJitterCoefficient", + Description: "TimerProcessorSplitQueueIntervalJitterCoefficient is the split processing queue interval jitter coefficient", DefaultValue: 0.15, }, TransferProcessorMaxPollIntervalJitterCoefficient: DynamicFloat{ - DynamicBase: DynamicBase{ - KeyName: "history.transferProcessorMaxPollIntervalJitterCoefficient", - Description: "TransferProcessorMaxPollIntervalJitterCoefficient is the max poll interval jitter coefficient", - }, + + KeyName: "history.transferProcessorMaxPollIntervalJitterCoefficient", + Description: "TransferProcessorMaxPollIntervalJitterCoefficient is the max poll interval jitter coefficient", DefaultValue: 0.15, }, TransferProcessorSplitQueueIntervalJitterCoefficient: DynamicFloat{ - DynamicBase: DynamicBase{ - KeyName: "history.transferProcessorSplitQueueIntervalJitterCoefficient", - Description: "TransferProcessorSplitQueueIntervalJitterCoefficient is the split processing queue interval jitter coefficient", - }, + + KeyName: "history.transferProcessorSplitQueueIntervalJitterCoefficient", + Description: "TransferProcessorSplitQueueIntervalJitterCoefficient is the split processing queue interval jitter coefficient", DefaultValue: 0.15, }, TransferProcessorUpdateAckIntervalJitterCoefficient: DynamicFloat{ - DynamicBase: DynamicBase{ - KeyName: "history.transferProcessorUpdateAckIntervalJitterCoefficient", - Description: "TransferProcessorUpdateAckIntervalJitterCoefficient is the update interval jitter coefficient", - }, + + KeyName: "history.transferProcessorUpdateAckIntervalJitterCoefficient", + Description: "TransferProcessorUpdateAckIntervalJitterCoefficient is the update interval jitter coefficient", DefaultValue: 0.15, }, CrossClusterSourceProcessorMaxPollIntervalJitterCoefficient: DynamicFloat{ - DynamicBase: DynamicBase{ - KeyName: "history.crossClusterSourceProcessorMaxPollIntervalJitterCoefficient", - Description: "CrossClusterSourceProcessorMaxPollIntervalJitterCoefficient is the max poll interval jitter coefficient", - }, + + KeyName: "history.crossClusterSourceProcessorMaxPollIntervalJitterCoefficient", + Description: "CrossClusterSourceProcessorMaxPollIntervalJitterCoefficient is the max poll interval jitter coefficient", DefaultValue: 0.15, }, CrossClusterSourceProcessorUpdateAckIntervalJitterCoefficient: DynamicFloat{ - DynamicBase: DynamicBase{ - KeyName: "history.crossClusterSourceProcessorUpdateAckIntervalJitterCoefficient", - Description: "CrossClusterSourceProcessorUpdateAckIntervalJitterCoefficient is the update interval jitter coefficient", - }, + + KeyName: "history.crossClusterSourceProcessorUpdateAckIntervalJitterCoefficient", + Description: "CrossClusterSourceProcessorUpdateAckIntervalJitterCoefficient is the update interval jitter coefficient", DefaultValue: 0.15, }, CrossClusterTargetProcessorJitterCoefficient: DynamicFloat{ - DynamicBase: DynamicBase{ - KeyName: "history.crossClusterTargetProcessorJitterCoefficient", - Description: "CrossClusterTargetProcessorJitterCoefficient is the jitter coefficient used in cross cluster task processor", - }, + + KeyName: "history.crossClusterTargetProcessorJitterCoefficient", + Description: "CrossClusterTargetProcessorJitterCoefficient is the jitter coefficient used in cross cluster task processor", DefaultValue: 0.15, }, CrossClusterFetcherJitterCoefficient: DynamicFloat{ - DynamicBase: DynamicBase{ - KeyName: "history.crossClusterFetcherJitterCoefficient", - Description: "CrossClusterFetcherJitterCoefficient is the jitter coefficient used in cross cluster task fetcher", - }, + + KeyName: "history.crossClusterFetcherJitterCoefficient", + Description: "CrossClusterFetcherJitterCoefficient is the jitter coefficient used in cross cluster task fetcher", DefaultValue: 0.15, }, ReplicationTaskProcessorCleanupJitterCoefficient: DynamicFloat{ - DynamicBase: DynamicBase{ - KeyName: "history.ReplicationTaskProcessorCleanupJitterCoefficient", - Description: "ReplicationTaskProcessorCleanupJitterCoefficient is the jitter for cleanup timer", - }, + + KeyName: "history.ReplicationTaskProcessorCleanupJitterCoefficient", + Description: "ReplicationTaskProcessorCleanupJitterCoefficient is the jitter for cleanup timer", DefaultValue: 0.15, }, ReplicationTaskProcessorStartWaitJitterCoefficient: DynamicFloat{ - DynamicBase: DynamicBase{ - KeyName: "history.ReplicationTaskProcessorStartWaitJitterCoefficient", - Description: "ReplicationTaskProcessorStartWaitJitterCoefficient is the jitter for batch start wait timer", - }, + + KeyName: "history.ReplicationTaskProcessorStartWaitJitterCoefficient", + Description: "ReplicationTaskProcessorStartWaitJitterCoefficient is the jitter for batch start wait timer", DefaultValue: 0.9, }, ReplicationTaskProcessorHostQPS: DynamicFloat{ - DynamicBase: DynamicBase{ - KeyName: "history.ReplicationTaskProcessorHostQPS", - Description: "ReplicationTaskProcessorHostQPS is the qps of task processing rate limiter on host level", - }, + + KeyName: "history.ReplicationTaskProcessorHostQPS", + Description: "ReplicationTaskProcessorHostQPS is the qps of task processing rate limiter on host level", DefaultValue: 1500, }, ReplicationTaskProcessorShardQPS: DynamicFloat{ - DynamicBase: DynamicBase{ - KeyName: "history.ReplicationTaskProcessorShardQPS", - Description: "ReplicationTaskProcessorShardQPS is the qps of task processing rate limiter on shard level", - }, + + KeyName: "history.ReplicationTaskProcessorShardQPS", + Description: "ReplicationTaskProcessorShardQPS is the qps of task processing rate limiter on shard level", DefaultValue: 5, }, ReplicationTaskGenerationQPS: DynamicFloat{ - DynamicBase: DynamicBase{ - KeyName: "history.ReplicationTaskGenerationQPS", - Description: "ReplicationTaskGenerationQPS is the wait time between each replication task generation qps", - }, + + KeyName: "history.ReplicationTaskGenerationQPS", + Description: "ReplicationTaskGenerationQPS is the wait time between each replication task generation qps", DefaultValue: 100, }, MutableStateChecksumInvalidateBefore: DynamicFloat{ - DynamicBase: DynamicBase{ - KeyName: "history.mutableStateChecksumInvalidateBefore", - Description: "MutableStateChecksumInvalidateBefore is the epoch timestamp before which all checksums are to be discarded", - }, + + KeyName: "history.mutableStateChecksumInvalidateBefore", + Description: "MutableStateChecksumInvalidateBefore is the epoch timestamp before which all checksums are to be discarded", DefaultValue: 0, }, NotifyFailoverMarkerTimerJitterCoefficient: DynamicFloat{ - DynamicBase: DynamicBase{ - KeyName: "history.NotifyFailoverMarkerTimerJitterCoefficient", - Description: "NotifyFailoverMarkerTimerJitterCoefficient is the jitter for failover marker notifier timer", - }, + + KeyName: "history.NotifyFailoverMarkerTimerJitterCoefficient", + Description: "NotifyFailoverMarkerTimerJitterCoefficient is the jitter for failover marker notifier timer", DefaultValue: 0.15, }, HistoryErrorInjectionRate: DynamicFloat{ - DynamicBase: DynamicBase{ - KeyName: "history.errorInjectionRate", - Description: "HistoryErrorInjectionRate is rate for injecting random error in history client", - }, + + KeyName: "history.errorInjectionRate", + Description: "HistoryErrorInjectionRate is rate for injecting random error in history client", DefaultValue: 0, }, ReplicationTaskFetcherTimerJitterCoefficient: DynamicFloat{ - DynamicBase: DynamicBase{ - KeyName: "history.ReplicationTaskFetcherTimerJitterCoefficient", - Description: "ReplicationTaskFetcherTimerJitterCoefficient is the jitter for fetcher timer", - }, + + KeyName: "history.ReplicationTaskFetcherTimerJitterCoefficient", + Description: "ReplicationTaskFetcherTimerJitterCoefficient is the jitter for fetcher timer", DefaultValue: 0.15, }, WorkerDeterministicConstructionCheckProbability: DynamicFloat{ - DynamicBase: DynamicBase{ - KeyName: "worker.DeterministicConstructionCheckProbability", - Description: "WorkerDeterministicConstructionCheckProbability controls the probability of running a deterministic construction check for any given archival", - }, + + KeyName: "worker.DeterministicConstructionCheckProbability", + Description: "WorkerDeterministicConstructionCheckProbability controls the probability of running a deterministic construction check for any given archival", DefaultValue: 0.002, }, WorkerBlobIntegrityCheckProbability: DynamicFloat{ - DynamicBase: DynamicBase{ - KeyName: "worker.BlobIntegrityCheckProbability", - Description: "WorkerBlobIntegrityCheckProbability controls the probability of running an integrity check for any given archival", - }, + + KeyName: "worker.BlobIntegrityCheckProbability", + Description: "WorkerBlobIntegrityCheckProbability controls the probability of running an integrity check for any given archival", DefaultValue: 0.002, }, } var StringKeys = map[StringKey]DynamicString{ TestGetStringPropertyKey: DynamicString{ - DynamicBase: DynamicBase{ - KeyName: "testGetStringPropertyKey", - Description: "", - }, + + KeyName: "testGetStringPropertyKey", + Description: "", DefaultValue: "", }, AdvancedVisibilityWritingMode: DynamicString{ - DynamicBase: DynamicBase{ - KeyName: "system.advancedVisibilityWritingMode", - Description: "AdvancedVisibilityWritingMode is key for how to write to advanced visibility. The most useful option is dual, which can be used for seamless migration from db visibility to advanced visibility, usually using with EnableReadVisibilityFromES", - }, + + KeyName: "system.advancedVisibilityWritingMode", + Description: "AdvancedVisibilityWritingMode is key for how to write to advanced visibility. The most useful option is dual, which can be used for seamless migration from db visibility to advanced visibility, usually using with EnableReadVisibilityFromES", DefaultValue: "on", }, HistoryArchivalStatus: DynamicString{ - DynamicBase: DynamicBase{ - KeyName: "system.historyArchivalStatus", - Description: "HistoryArchivalStatus is key for the status of history archival to override the value from static config.", - }, + + KeyName: "system.historyArchivalStatus", + Description: "HistoryArchivalStatus is key for the status of history archival to override the value from static config.", DefaultValue: "enabled", }, VisibilityArchivalStatus: DynamicString{ - DynamicBase: DynamicBase{ - KeyName: "system.visibilityArchivalStatus", - Description: "VisibilityArchivalStatus is key for the status of visibility archival to override the value from static config.", - }, + + KeyName: "system.visibilityArchivalStatus", + Description: "VisibilityArchivalStatus is key for the status of visibility archival to override the value from static config.", DefaultValue: "enabled", }, DefaultEventEncoding: DynamicString{ - DynamicBase: DynamicBase{ - KeyName: "history.defaultEventEncoding", - Description: "DefaultEventEncoding is the encoding type for history events", - }, + + KeyName: "history.defaultEventEncoding", + Description: "DefaultEventEncoding is the encoding type for history events", DefaultValue: string(common.EncodingTypeThriftRW), }, AdminOperationToken: DynamicString{ - DynamicBase: DynamicBase{ - KeyName: "history.adminOperationToken", - Description: "AdminOperationToken is the token to pass admin checking", - }, + + KeyName: "history.adminOperationToken", + Description: "AdminOperationToken is the token to pass admin checking", DefaultValue: "CadenceTeamONLY", }, ESAnalyzerLimitToTypes: DynamicString{ - DynamicBase: DynamicBase{ - KeyName: "worker.ESAnalyzerLimitToTypes", - Description: "ESAnalyzerLimitToTypes controls if we want to limit ESAnalyzer only to some workflow types", - }, + + KeyName: "worker.ESAnalyzerLimitToTypes", + Description: "ESAnalyzerLimitToTypes controls if we want to limit ESAnalyzer only to some workflow types", DefaultValue: "", }, ESAnalyzerLimitToDomains: DynamicString{ - DynamicBase: DynamicBase{ - KeyName: "worker.ESAnalyzerLimitToDomains", - Description: "ESAnalyzerLimitToDomains controls if we want to limit ESAnalyzer only to some domains", - }, + + KeyName: "worker.ESAnalyzerLimitToDomains", + Description: "ESAnalyzerLimitToDomains controls if we want to limit ESAnalyzer only to some domains", DefaultValue: "", }, ESAnalyzerWorkflowDurationWarnThresholds: DynamicString{ - DynamicBase: DynamicBase{ - KeyName: "worker.ESAnalyzerWorkflowDurationWarnThresholds", - Description: "ESAnalyzerWorkflowDurationWarnThresholds defines the warning execution thresholds for workflow types", - }, + + KeyName: "worker.ESAnalyzerWorkflowDurationWarnThresholds", + Description: "ESAnalyzerWorkflowDurationWarnThresholds defines the warning execution thresholds for workflow types", DefaultValue: "", }, } var DurationKeys = map[DurationKey]DynamicDuration{ TestGetDurationPropertyKey: DynamicDuration{ - DynamicBase: DynamicBase{ - KeyName: "testGetDurationPropertyKey", - Description: "", - }, + + KeyName: "testGetDurationPropertyKey", + Description: "", DefaultValue: 0, }, TestGetDurationPropertyFilteredByDomainKey: DynamicDuration{ - DynamicBase: DynamicBase{ - KeyName: "testGetDurationPropertyFilteredByDomainKey", - Description: "", - }, + + KeyName: "testGetDurationPropertyFilteredByDomainKey", + Description: "", DefaultValue: 0, }, TestGetDurationPropertyFilteredByTaskListInfoKey: DynamicDuration{ - DynamicBase: DynamicBase{ - KeyName: "testGetDurationPropertyFilteredByTaskListInfoKey", - Description: "", - }, + + KeyName: "testGetDurationPropertyFilteredByTaskListInfoKey", + Description: "", DefaultValue: 0, }, FrontendShutdownDrainDuration: DynamicDuration{ - DynamicBase: DynamicBase{ - KeyName: "frontend.shutdownDrainDuration", - Description: "FrontendShutdownDrainDuration is the duration of traffic drain during shutdown", - }, + + KeyName: "frontend.shutdownDrainDuration", + Description: "FrontendShutdownDrainDuration is the duration of traffic drain during shutdown", DefaultValue: 0, }, FrontendFailoverCoolDown: DynamicDuration{ - DynamicBase: DynamicBase{ - KeyName: "frontend.failoverCoolDown", - Description: "FrontendFailoverCoolDown is duration between two domain failvoers", - }, + + KeyName: "frontend.failoverCoolDown", + Description: "FrontendFailoverCoolDown is duration between two domain failvoers", DefaultValue: time.Minute, }, DomainFailoverRefreshInterval: DynamicDuration{ - DynamicBase: DynamicBase{ - KeyName: "frontend.domainFailoverRefreshInterval", - Description: "DomainFailoverRefreshInterval is the domain failover refresh timer", - }, + + KeyName: "frontend.domainFailoverRefreshInterval", + Description: "DomainFailoverRefreshInterval is the domain failover refresh timer", DefaultValue: time.Second * 10, }, MatchingLongPollExpirationInterval: DynamicDuration{ - DynamicBase: DynamicBase{ - KeyName: "matching.longPollExpirationInterval", - Description: "MatchingLongPollExpirationInterval is the long poll expiration interval in the matching service", - }, + + KeyName: "matching.longPollExpirationInterval", + Description: "MatchingLongPollExpirationInterval is the long poll expiration interval in the matching service", DefaultValue: time.Minute, }, MatchingUpdateAckInterval: DynamicDuration{ - DynamicBase: DynamicBase{ - KeyName: "matching.updateAckInterval", - Description: "MatchingUpdateAckInterval is the interval for update ack", - }, + + KeyName: "matching.updateAckInterval", + Description: "MatchingUpdateAckInterval is the interval for update ack", DefaultValue: time.Minute, }, MatchingIdleTasklistCheckInterval: DynamicDuration{ - DynamicBase: DynamicBase{ - KeyName: "matching.idleTasklistCheckInterval", - Description: "MatchingIdleTasklistCheckInterval is the IdleTasklistCheckInterval", - }, + + KeyName: "matching.idleTasklistCheckInterval", + Description: "MatchingIdleTasklistCheckInterval is the IdleTasklistCheckInterval", DefaultValue: time.Minute * 5, }, MaxTasklistIdleTime: DynamicDuration{ - DynamicBase: DynamicBase{ - KeyName: "matching.maxTasklistIdleTime", - Description: "MaxTasklistIdleTime is the max time tasklist being idle", - }, + + KeyName: "matching.maxTasklistIdleTime", + Description: "MaxTasklistIdleTime is the max time tasklist being idle", DefaultValue: time.Minute * 5, }, MatchingShutdownDrainDuration: DynamicDuration{ - DynamicBase: DynamicBase{ - KeyName: "matching.shutdownDrainDuration", - Description: "MatchingShutdownDrainDuration is the duration of traffic drain during shutdown", - }, + + KeyName: "matching.shutdownDrainDuration", + Description: "MatchingShutdownDrainDuration is the duration of traffic drain during shutdown", DefaultValue: 0, }, MatchingActivityTaskSyncMatchWaitTime: DynamicDuration{ - DynamicBase: DynamicBase{ - KeyName: "matching.activityTaskSyncMatchWaitTime", - Description: "MatchingActivityTaskSyncMatchWaitTime is the amount of time activity task will wait to be sync matched", - }, + + KeyName: "matching.activityTaskSyncMatchWaitTime", + Description: "MatchingActivityTaskSyncMatchWaitTime is the amount of time activity task will wait to be sync matched", DefaultValue: time.Millisecond * 100, }, HistoryLongPollExpirationInterval: DynamicDuration{ - DynamicBase: DynamicBase{ - KeyName: "history.longPollExpirationInterval", - Description: "HistoryLongPollExpirationInterval is the long poll expiration interval in the history service", - }, - DefaultValue: time.Second * 20, + + KeyName: "history.longPollExpirationInterval", + Description: "HistoryLongPollExpirationInterval is the long poll expiration interval in the history service", + DefaultValue: time.Second * 20, // history client: client/history/client.go set the client timeout 20s }, HistoryCacheTTL: DynamicDuration{ - DynamicBase: DynamicBase{ - KeyName: "history.cacheTTL", - Description: "HistoryCacheTTL is TTL of history cache", - }, + + KeyName: "history.cacheTTL", + Description: "HistoryCacheTTL is TTL of history cache", DefaultValue: time.Hour, }, HistoryShutdownDrainDuration: DynamicDuration{ - DynamicBase: DynamicBase{ - KeyName: "history.shutdownDrainDuration", - Description: "HistoryShutdownDrainDuration is the duration of traffic drain during shutdown", - }, + + KeyName: "history.shutdownDrainDuration", + Description: "HistoryShutdownDrainDuration is the duration of traffic drain during shutdown", DefaultValue: 0, }, EventsCacheTTL: DynamicDuration{ - DynamicBase: DynamicBase{ - KeyName: "history.eventsCacheTTL", - Description: "EventsCacheTTL is TTL of events cache", - }, + + KeyName: "history.eventsCacheTTL", + Description: "EventsCacheTTL is TTL of events cache", DefaultValue: time.Hour, }, AcquireShardInterval: DynamicDuration{ - DynamicBase: DynamicBase{ - KeyName: "history.acquireShardInterval", - Description: "AcquireShardInterval is interval that timer used to acquire shard", - }, + + KeyName: "history.acquireShardInterval", + Description: "AcquireShardInterval is interval that timer used to acquire shard", DefaultValue: time.Minute, }, StandbyClusterDelay: DynamicDuration{ - DynamicBase: DynamicBase{ - KeyName: "history.standbyClusterDelay", - Description: "StandbyClusterDelay is the artificial delay added to standby cluster's view of active cluster's time", - }, + + KeyName: "history.standbyClusterDelay", + Description: "StandbyClusterDelay is the artificial delay added to standby cluster's view of active cluster's time", DefaultValue: time.Minute * 5, }, StandbyTaskMissingEventsResendDelay: DynamicDuration{ - DynamicBase: DynamicBase{ - KeyName: "history.standbyTaskMissingEventsResendDelay", - Description: "StandbyTaskMissingEventsResendDelay is the amount of time standby cluster's will wait (if events are missing)before calling remote for missing events", - }, + + KeyName: "history.standbyTaskMissingEventsResendDelay", + Description: "StandbyTaskMissingEventsResendDelay is the amount of time standby cluster's will wait (if events are missing)before calling remote for missing events", DefaultValue: time.Minute * 15, }, StandbyTaskMissingEventsDiscardDelay: DynamicDuration{ - DynamicBase: DynamicBase{ - KeyName: "history.standbyTaskMissingEventsDiscardDelay", - Description: "StandbyTaskMissingEventsDiscardDelay is the amount of time standby cluster's will wait (if events are missing)before discarding the task", - }, + + KeyName: "history.standbyTaskMissingEventsDiscardDelay", + Description: "StandbyTaskMissingEventsDiscardDelay is the amount of time standby cluster's will wait (if events are missing)before discarding the task", DefaultValue: time.Minute * 25, }, ActiveTaskRedispatchInterval: DynamicDuration{ - DynamicBase: DynamicBase{ - KeyName: "history.activeTaskRedispatchInterval", - Description: "ActiveTaskRedispatchInterval is the active task redispatch interval", - }, + + KeyName: "history.activeTaskRedispatchInterval", + Description: "ActiveTaskRedispatchInterval is the active task redispatch interval", DefaultValue: time.Second * 5, }, StandbyTaskRedispatchInterval: DynamicDuration{ - DynamicBase: DynamicBase{ - KeyName: "history.standbyTaskRedispatchInterval", - Description: "StandbyTaskRedispatchInterval is the standby task redispatch interval", - }, + + KeyName: "history.standbyTaskRedispatchInterval", + Description: "StandbyTaskRedispatchInterval is the standby task redispatch interval", DefaultValue: time.Second * 30, }, StandbyTaskReReplicationContextTimeout: DynamicDuration{ - DynamicBase: DynamicBase{ - KeyName: "history.standbyTaskReReplicationContextTimeout", - Description: "StandbyTaskReReplicationContextTimeout is the context timeout for standby task re-replication", - }, + + KeyName: "history.standbyTaskReReplicationContextTimeout", + Description: "StandbyTaskReReplicationContextTimeout is the context timeout for standby task re-replication", DefaultValue: time.Minute * 3, }, ResurrectionCheckMinDelay: DynamicDuration{ - DynamicBase: DynamicBase{ - KeyName: "history.resurrectionCheckMinDelay", - Description: "ResurrectionCheckMinDelay is the minimal timer processing delay before scanning history to see if there's a resurrected timer/activity", - }, + + KeyName: "history.resurrectionCheckMinDelay", + Description: "ResurrectionCheckMinDelay is the minimal timer processing delay before scanning history to see if there's a resurrected timer/activity", DefaultValue: time.Hour * 24, }, QueueProcessorSplitLookAheadDurationByDomainID: DynamicDuration{ - DynamicBase: DynamicBase{ - KeyName: "history.queueProcessorSplitLookAheadDurationByDomainID", - Description: "QueueProcessorSplitLookAheadDurationByDomainID is the look ahead duration when spliting a domain to a new processing queue", - }, + + KeyName: "history.queueProcessorSplitLookAheadDurationByDomainID", + Description: "QueueProcessorSplitLookAheadDurationByDomainID is the look ahead duration when spliting a domain to a new processing queue", DefaultValue: time.Minute * 20, }, QueueProcessorPollBackoffInterval: DynamicDuration{ - DynamicBase: DynamicBase{ - KeyName: "history.queueProcessorPollBackoffInterval", - Description: "QueueProcessorPollBackoffInterval is the backoff duration when queue processor is throttled", - }, + + KeyName: "history.queueProcessorPollBackoffInterval", + Description: "QueueProcessorPollBackoffInterval is the backoff duration when queue processor is throttled", DefaultValue: time.Second * 5, }, TimerProcessorUpdateAckInterval: DynamicDuration{ - DynamicBase: DynamicBase{ - KeyName: "history.timerProcessorUpdateAckInterval", - Description: "TimerProcessorUpdateAckInterval is update interval for timer processor", - }, + + KeyName: "history.timerProcessorUpdateAckInterval", + Description: "TimerProcessorUpdateAckInterval is update interval for timer processor", DefaultValue: time.Second * 30, }, TimerProcessorCompleteTimerInterval: DynamicDuration{ - DynamicBase: DynamicBase{ - KeyName: "history.timerProcessorCompleteTimerInterval", - Description: "TimerProcessorCompleteTimerInterval is complete timer interval for timer processor", - }, + + KeyName: "history.timerProcessorCompleteTimerInterval", + Description: "TimerProcessorCompleteTimerInterval is complete timer interval for timer processor", DefaultValue: time.Minute, }, TimerProcessorFailoverMaxStartJitterInterval: DynamicDuration{ - DynamicBase: DynamicBase{ - KeyName: "history.timerProcessorFailoverMaxStartJitterInterval", - Description: "TimerProcessorFailoverMaxStartJitterInterval is the max jitter interval for starting timer failover queue processing. The actual jitter interval used will be a random duration between 0 and the max interval so that timer failover queue across different shards won't start at the same time", - }, + + KeyName: "history.timerProcessorFailoverMaxStartJitterInterval", + Description: "TimerProcessorFailoverMaxStartJitterInterval is the max jitter interval for starting timer failover queue processing. The actual jitter interval used will be a random duration between 0 and the max interval so that timer failover queue across different shards won't start at the same time", DefaultValue: 0, }, TimerProcessorMaxPollInterval: DynamicDuration{ - DynamicBase: DynamicBase{ - KeyName: "history.timerProcessorMaxPollInterval", - Description: "TimerProcessorMaxPollInterval is max poll interval for timer processor", - }, + + KeyName: "history.timerProcessorMaxPollInterval", + Description: "TimerProcessorMaxPollInterval is max poll interval for timer processor", DefaultValue: time.Minute * 5, }, TimerProcessorSplitQueueInterval: DynamicDuration{ - DynamicBase: DynamicBase{ - KeyName: "history.timerProcessorSplitQueueInterval", - Description: "TimerProcessorSplitQueueInterval is the split processing queue interval for timer processor", - }, + + KeyName: "history.timerProcessorSplitQueueInterval", + Description: "TimerProcessorSplitQueueInterval is the split processing queue interval for timer processor", DefaultValue: time.Minute, }, TimerProcessorArchivalTimeLimit: DynamicDuration{ - DynamicBase: DynamicBase{ - KeyName: "history.timerProcessorArchivalTimeLimit", - Description: "TimerProcessorArchivalTimeLimit is the upper time limit for inline history archival", - }, + + KeyName: "history.timerProcessorArchivalTimeLimit", + Description: "TimerProcessorArchivalTimeLimit is the upper time limit for inline history archival", DefaultValue: time.Second * 2, }, TimerProcessorMaxTimeShift: DynamicDuration{ - DynamicBase: DynamicBase{ - KeyName: "history.timerProcessorMaxTimeShift", - Description: "TimerProcessorMaxTimeShift is the max shift timer processor can have", - }, + + KeyName: "history.timerProcessorMaxTimeShift", + Description: "TimerProcessorMaxTimeShift is the max shift timer processor can have", DefaultValue: time.Second, }, TransferProcessorFailoverMaxStartJitterInterval: DynamicDuration{ - DynamicBase: DynamicBase{ - KeyName: "history.transferProcessorFailoverMaxStartJitterInterval", - Description: "TransferProcessorFailoverMaxStartJitterInterval is the max jitter interval for starting transfer failover queue processing. The actual jitter interval used will be a random duration between 0 and the max interval so that timer failover queue across different shards won't start at the same time", - }, + + KeyName: "history.transferProcessorFailoverMaxStartJitterInterval", + Description: "TransferProcessorFailoverMaxStartJitterInterval is the max jitter interval for starting transfer failover queue processing. The actual jitter interval used will be a random duration between 0 and the max interval so that timer failover queue across different shards won't start at the same time", DefaultValue: 0, }, TransferProcessorMaxPollInterval: DynamicDuration{ - DynamicBase: DynamicBase{ - KeyName: "history.transferProcessorMaxPollInterval", - Description: "TransferProcessorMaxPollInterval is max poll interval for transferQueueProcessor", - }, + + KeyName: "history.transferProcessorMaxPollInterval", + Description: "TransferProcessorMaxPollInterval is max poll interval for transferQueueProcessor", DefaultValue: time.Minute, }, TransferProcessorSplitQueueInterval: DynamicDuration{ - DynamicBase: DynamicBase{ - KeyName: "history.transferProcessorSplitQueueInterval", - Description: "TransferProcessorSplitQueueInterval is the split processing queue interval for transferQueueProcessor", - }, + + KeyName: "history.transferProcessorSplitQueueInterval", + Description: "TransferProcessorSplitQueueInterval is the split processing queue interval for transferQueueProcessor", DefaultValue: time.Minute, }, TransferProcessorUpdateAckInterval: DynamicDuration{ - DynamicBase: DynamicBase{ - KeyName: "history.transferProcessorUpdateAckInterval", - Description: "TransferProcessorUpdateAckInterval is update interval for transferQueueProcessor", - }, + + KeyName: "history.transferProcessorUpdateAckInterval", + Description: "TransferProcessorUpdateAckInterval is update interval for transferQueueProcessor", DefaultValue: time.Second * 30, }, TransferProcessorCompleteTransferInterval: DynamicDuration{ - DynamicBase: DynamicBase{ - KeyName: "history.transferProcessorCompleteTransferInterval", - Description: "TransferProcessorCompleteTransferInterval is complete timer interval for transferQueueProcessor", - }, + + KeyName: "history.transferProcessorCompleteTransferInterval", + Description: "TransferProcessorCompleteTransferInterval is complete timer interval for transferQueueProcessor", DefaultValue: time.Minute, }, TransferProcessorValidationInterval: DynamicDuration{ - DynamicBase: DynamicBase{ - KeyName: "history.transferProcessorValidationInterval", - Description: "TransferProcessorValidationInterval is interval for performing transfer queue validation", - }, + + KeyName: "history.transferProcessorValidationInterval", + Description: "TransferProcessorValidationInterval is interval for performing transfer queue validation", DefaultValue: time.Second * 30, }, TransferProcessorVisibilityArchivalTimeLimit: DynamicDuration{ - DynamicBase: DynamicBase{ - KeyName: "history.transferProcessorVisibilityArchivalTimeLimit", - Description: "TransferProcessorVisibilityArchivalTimeLimit is the upper time limit for archiving visibility records", - }, + + KeyName: "history.transferProcessorVisibilityArchivalTimeLimit", + Description: "TransferProcessorVisibilityArchivalTimeLimit is the upper time limit for archiving visibility records", DefaultValue: time.Millisecond * 400, }, CrossClusterSourceProcessorMaxPollInterval: DynamicDuration{ - DynamicBase: DynamicBase{ - KeyName: "history.crossClusterSourceProcessorMaxPollInterval", - Description: "CrossClusterSourceProcessorMaxPollInterval is max poll interval for crossClusterQueueProcessor", - }, + + KeyName: "history.crossClusterSourceProcessorMaxPollInterval", + Description: "CrossClusterSourceProcessorMaxPollInterval is max poll interval for crossClusterQueueProcessor", DefaultValue: time.Minute, }, CrossClusterSourceProcessorUpdateAckInterval: DynamicDuration{ - DynamicBase: DynamicBase{ - KeyName: "history.crossClusterSourceProcessorUpdateAckInterval", - Description: "CrossClusterSourceProcessorUpdateAckInterval is update interval for crossClusterQueueProcessor", - }, + + KeyName: "history.crossClusterSourceProcessorUpdateAckInterval", + Description: "CrossClusterSourceProcessorUpdateAckInterval is update interval for crossClusterQueueProcessor", DefaultValue: time.Second * 30, }, CrossClusterTargetProcessorTaskWaitInterval: DynamicDuration{ - DynamicBase: DynamicBase{ - KeyName: "history.crossClusterTargetProcessorTaskWaitInterval", - Description: "CrossClusterTargetProcessorTaskWaitInterval is the duration for waiting a cross-cluster task response before responding to source", - }, + + KeyName: "history.crossClusterTargetProcessorTaskWaitInterval", + Description: "CrossClusterTargetProcessorTaskWaitInterval is the duration for waiting a cross-cluster task response before responding to source", DefaultValue: time.Second * 3, }, CrossClusterTargetProcessorServiceBusyBackoffInterval: DynamicDuration{ - DynamicBase: DynamicBase{ - KeyName: "history.crossClusterTargetProcessorServiceBusyBackoffInterval", - Description: "CrossClusterTargetProcessorServiceBusyBackoffInterval is the backoff duration for cross cluster task processor when getting a service busy error when calling source cluster", - }, + + KeyName: "history.crossClusterTargetProcessorServiceBusyBackoffInterval", + Description: "CrossClusterTargetProcessorServiceBusyBackoffInterval is the backoff duration for cross cluster task processor when getting a service busy error when calling source cluster", DefaultValue: time.Second * 5, }, CrossClusterFetcherAggregationInterval: DynamicDuration{ - DynamicBase: DynamicBase{ - KeyName: "history.crossClusterFetcherAggregationInterval", - Description: "CrossClusterFetcherAggregationInterval determines how frequently the fetch requests are sent", - }, + + KeyName: "history.crossClusterFetcherAggregationInterval", + Description: "CrossClusterFetcherAggregationInterval determines how frequently the fetch requests are sent", DefaultValue: time.Second * 2, }, CrossClusterFetcherServiceBusyBackoffInterval: DynamicDuration{ - DynamicBase: DynamicBase{ - KeyName: "history.crossClusterFetcherServiceBusyBackoffInterval", - Description: "CrossClusterFetcherServiceBusyBackoffInterval is the backoff duration for cross cluster task fetcher when getting", - }, + + KeyName: "history.crossClusterFetcherServiceBusyBackoffInterval", + Description: "CrossClusterFetcherServiceBusyBackoffInterval is the backoff duration for cross cluster task fetcher when getting", DefaultValue: time.Second * 5, }, CrossClusterFetcherErrorBackoffInterval: DynamicDuration{ - DynamicBase: DynamicBase{ - KeyName: "history.crossClusterFetcherErrorBackoffInterval", - Description: "", - }, + + KeyName: "history.crossClusterFetcherErrorBackoffInterval", + Description: "", DefaultValue: time.Second, }, ReplicatorUpperLatency: DynamicDuration{ - DynamicBase: DynamicBase{ - KeyName: "history.replicatorUpperLatency", - Description: "ReplicatorUpperLatency indicates the max allowed replication latency between clusters", - }, + + KeyName: "history.replicatorUpperLatency", + Description: "ReplicatorUpperLatency indicates the max allowed replication latency between clusters", DefaultValue: time.Second * 40, }, ShardUpdateMinInterval: DynamicDuration{ - DynamicBase: DynamicBase{ - KeyName: "history.shardUpdateMinInterval", - Description: "ShardUpdateMinInterval is the minimal time interval which the shard info can be updated", - }, + + KeyName: "history.shardUpdateMinInterval", + Description: "ShardUpdateMinInterval is the minimal time interval which the shard info can be updated", DefaultValue: time.Minute * 5, }, ShardSyncMinInterval: DynamicDuration{ - DynamicBase: DynamicBase{ - KeyName: "history.shardSyncMinInterval", - Description: "ShardSyncMinInterval is the minimal time interval which the shard info should be sync to remote", - }, + + KeyName: "history.shardSyncMinInterval", + Description: "ShardSyncMinInterval is the minimal time interval which the shard info should be sync to remote", DefaultValue: time.Minute * 5, }, StickyTTL: DynamicDuration{ - DynamicBase: DynamicBase{ - KeyName: "history.stickyTTL", - Description: "StickyTTL is to expire a sticky tasklist if no update more than this duration", - }, + + KeyName: "history.stickyTTL", + Description: "StickyTTL is to expire a sticky tasklist if no update more than this duration", DefaultValue: time.Hour * 24 * 365, }, DecisionHeartbeatTimeout: DynamicDuration{ - DynamicBase: DynamicBase{ - KeyName: "history.decisionHeartbeatTimeout", - Description: "DecisionHeartbeatTimeout is for decision heartbeat", - }, - DefaultValue: time.Minute * 30, + + KeyName: "history.decisionHeartbeatTimeout", + Description: "DecisionHeartbeatTimeout is for decision heartbeat", + DefaultValue: time.Minute * 30, // about 30m }, NormalDecisionScheduleToStartTimeout: DynamicDuration{ - DynamicBase: DynamicBase{ - KeyName: "history.normalDecisionScheduleToStartTimeout", - Description: "NormalDecisionScheduleToStartTimeout is scheduleToStart timeout duration for normal (non-sticky) decision task", - }, + + KeyName: "history.normalDecisionScheduleToStartTimeout", + Description: "NormalDecisionScheduleToStartTimeout is scheduleToStart timeout duration for normal (non-sticky) decision task", DefaultValue: time.Minute * 5, }, NotifyFailoverMarkerInterval: DynamicDuration{ - DynamicBase: DynamicBase{ - KeyName: "history.NotifyFailoverMarkerInterval", - Description: "NotifyFailoverMarkerInterval is determines the frequency to notify failover marker", - }, + + KeyName: "history.NotifyFailoverMarkerInterval", + Description: "NotifyFailoverMarkerInterval is determines the frequency to notify failover marker", DefaultValue: time.Second * 5, }, ActivityMaxScheduleToStartTimeoutForRetry: DynamicDuration{ - DynamicBase: DynamicBase{ - KeyName: "history.activityMaxScheduleToStartTimeoutForRetry", - Description: "ActivityMaxScheduleToStartTimeoutForRetry is maximum value allowed when overwritting the schedule to start timeout for activities with retry policy", - }, + + KeyName: "history.activityMaxScheduleToStartTimeoutForRetry", + Description: "ActivityMaxScheduleToStartTimeoutForRetry is maximum value allowed when overwritting the schedule to start timeout for activities with retry policy", DefaultValue: time.Minute * 30, }, ReplicationTaskFetcherAggregationInterval: DynamicDuration{ - DynamicBase: DynamicBase{ - KeyName: "history.ReplicationTaskFetcherAggregationInterval", - Description: "ReplicationTaskFetcherAggregationInterval determines how frequently the fetch requests are sent", - }, + + KeyName: "history.ReplicationTaskFetcherAggregationInterval", + Description: "ReplicationTaskFetcherAggregationInterval determines how frequently the fetch requests are sent", DefaultValue: time.Second * 2, }, ReplicationTaskFetcherErrorRetryWait: DynamicDuration{ - DynamicBase: DynamicBase{ - KeyName: "history.ReplicationTaskFetcherErrorRetryWait", - Description: "ReplicationTaskFetcherErrorRetryWait is the wait time when fetcher encounters error", - }, + + KeyName: "history.ReplicationTaskFetcherErrorRetryWait", + Description: "ReplicationTaskFetcherErrorRetryWait is the wait time when fetcher encounters error", DefaultValue: time.Second, }, ReplicationTaskFetcherServiceBusyWait: DynamicDuration{ - DynamicBase: DynamicBase{ - KeyName: "history.ReplicationTaskFetcherServiceBusyWait", - Description: "ReplicationTaskFetcherServiceBusyWait is the wait time when fetcher encounters service busy error", - }, + + KeyName: "history.ReplicationTaskFetcherServiceBusyWait", + Description: "ReplicationTaskFetcherServiceBusyWait is the wait time when fetcher encounters service busy error", DefaultValue: time.Minute, }, ReplicationTaskProcessorErrorRetryWait: DynamicDuration{ - DynamicBase: DynamicBase{ - KeyName: "history.ReplicationTaskProcessorErrorRetryWait", - Description: "ReplicationTaskProcessorErrorRetryWait is the initial retry wait when we see errors in applying replication tasks", - }, + + KeyName: "history.ReplicationTaskProcessorErrorRetryWait", + Description: "ReplicationTaskProcessorErrorRetryWait is the initial retry wait when we see errors in applying replication tasks", DefaultValue: time.Millisecond * 50, }, ReplicationTaskProcessorErrorSecondRetryWait: DynamicDuration{ - DynamicBase: DynamicBase{ - KeyName: "history.ReplicationTaskProcessorErrorSecondRetryWait", - Description: "ReplicationTaskProcessorErrorSecondRetryWait is the initial retry wait for the second phase retry", - }, + + KeyName: "history.ReplicationTaskProcessorErrorSecondRetryWait", + Description: "ReplicationTaskProcessorErrorSecondRetryWait is the initial retry wait for the second phase retry", DefaultValue: time.Second * 5, }, ReplicationTaskProcessorErrorSecondRetryMaxWait: DynamicDuration{ - DynamicBase: DynamicBase{ - KeyName: "history.ReplicationTaskProcessorErrorSecondRetryMaxWait", - Description: "ReplicationTaskProcessorErrorSecondRetryMaxWait is the max wait time for the second phase retry", - }, + + KeyName: "history.ReplicationTaskProcessorErrorSecondRetryMaxWait", + Description: "ReplicationTaskProcessorErrorSecondRetryMaxWait is the max wait time for the second phase retry", DefaultValue: time.Second * 30, }, ReplicationTaskProcessorErrorSecondRetryExpiration: DynamicDuration{ - DynamicBase: DynamicBase{ - KeyName: "history.ReplicationTaskProcessorErrorSecondRetryExpiration", - Description: "ReplicationTaskProcessorErrorSecondRetryExpiration is the expiration duration for the second phase retry", - }, + + KeyName: "history.ReplicationTaskProcessorErrorSecondRetryExpiration", + Description: "ReplicationTaskProcessorErrorSecondRetryExpiration is the expiration duration for the second phase retry", DefaultValue: time.Minute * 5, }, ReplicationTaskProcessorNoTaskInitialWait: DynamicDuration{ - DynamicBase: DynamicBase{ - KeyName: "history.ReplicationTaskProcessorNoTaskInitialWait", - Description: "ReplicationTaskProcessorNoTaskInitialWait is the wait time when not ask is returned", - }, + + KeyName: "history.ReplicationTaskProcessorNoTaskInitialWait", + Description: "ReplicationTaskProcessorNoTaskInitialWait is the wait time when not ask is returned", DefaultValue: time.Second * 2, }, ReplicationTaskProcessorCleanupInterval: DynamicDuration{ - DynamicBase: DynamicBase{ - KeyName: "history.ReplicationTaskProcessorCleanupInterval", - Description: "ReplicationTaskProcessorCleanupInterval determines how frequently the cleanup replication queue", - }, + + KeyName: "history.ReplicationTaskProcessorCleanupInterval", + Description: "ReplicationTaskProcessorCleanupInterval determines how frequently the cleanup replication queue", DefaultValue: time.Minute, }, ReplicationTaskProcessorStartWait: DynamicDuration{ - DynamicBase: DynamicBase{ - KeyName: "history.ReplicationTaskProcessorStartWait", - Description: "ReplicationTaskProcessorStartWait is the wait time before each task processing batch", - }, + + KeyName: "history.ReplicationTaskProcessorStartWait", + Description: "ReplicationTaskProcessorStartWait is the wait time before each task processing batch", DefaultValue: time.Second * 5, }, WorkerESProcessorFlushInterval: DynamicDuration{ - DynamicBase: DynamicBase{ - KeyName: "worker.ESProcessorFlushInterval", - Description: "WorkerESProcessorFlushInterval is flush interval for esProcessor", - }, + + KeyName: "worker.ESProcessorFlushInterval", + Description: "WorkerESProcessorFlushInterval is flush interval for esProcessor", DefaultValue: time.Second, }, WorkerTimeLimitPerArchivalIteration: DynamicDuration{ - DynamicBase: DynamicBase{ - KeyName: "worker.TimeLimitPerArchivalIteration", - Description: "WorkerTimeLimitPerArchivalIteration is controls the time limit of each iteration of archival workflow", - }, + + KeyName: "worker.TimeLimitPerArchivalIteration", + Description: "WorkerTimeLimitPerArchivalIteration is controls the time limit of each iteration of archival workflow", DefaultValue: time.Hour * 24 * 15, }, WorkerReplicationTaskMaxRetryDuration: DynamicDuration{ - DynamicBase: DynamicBase{ - KeyName: "worker.replicationTaskMaxRetryDuration", - Description: "WorkerReplicationTaskMaxRetryDuration is the max retry duration for any task", - }, + + KeyName: "worker.replicationTaskMaxRetryDuration", + Description: "WorkerReplicationTaskMaxRetryDuration is the max retry duration for any task", DefaultValue: time.Minute * 10, }, ESAnalyzerTimeWindow: DynamicDuration{ - DynamicBase: DynamicBase{ - KeyName: "worker.ESAnalyzerTimeWindow", - Description: "ESAnalyzerTimeWindow defines the time window ElasticSearch Analyzer will consider while taking workflow averages", - }, + + KeyName: "worker.ESAnalyzerTimeWindow", + Description: "ESAnalyzerTimeWindow defines the time window ElasticSearch Analyzer will consider while taking workflow averages", DefaultValue: time.Hour * 24 * 30, }, ESAnalyzerBufferWaitTime: DynamicDuration{ - DynamicBase: DynamicBase{ - KeyName: "worker.ESAnalyzerBufferWaitTime", - Description: "ESAnalyzerBufferWaitTime controls min time required to consider a worklow stuck", - }, + + KeyName: "worker.ESAnalyzerBufferWaitTime", + Description: "ESAnalyzerBufferWaitTime controls min time required to consider a worklow stuck", DefaultValue: time.Minute * 30, }, } var MapKeys = map[MapKey]DynamicMap{ TestGetMapPropertyKey: DynamicMap{ - DynamicBase: DynamicBase{ - KeyName: "testGetMapPropertyKey", - Description: "", - }, + + KeyName: "testGetMapPropertyKey", + Description: "", DefaultValue: nil, }, RequiredDomainDataKeys: DynamicMap{ - DynamicBase: DynamicBase{ - KeyName: "system.requiredDomainDataKeys", - Description: "RequiredDomainDataKeys is the key for the list of data keys required in domain registration", - }, + + KeyName: "system.requiredDomainDataKeys", + Description: "RequiredDomainDataKeys is the key for the list of data keys required in domain registration", DefaultValue: nil, }, ValidSearchAttributes: DynamicMap{ - DynamicBase: DynamicBase{ - KeyName: "frontend.validSearchAttributes", - Description: "ValidSearchAttributes is legal indexed keys that can be used in list APIs. When overriding, ensure to include the existing default attributes of the current release", - }, + + KeyName: "frontend.validSearchAttributes", + Description: "ValidSearchAttributes is legal indexed keys that can be used in list APIs. When overriding, ensure to include the existing default attributes of the current release", DefaultValue: definition.GetDefaultIndexedKeys(), }, TaskSchedulerRoundRobinWeights: DynamicMap{ - DynamicBase: DynamicBase{ - KeyName: "history.taskSchedulerRoundRobinWeight", - Description: "TaskSchedulerRoundRobinWeights is the priority weight for weighted round robin task scheduler", - }, + + KeyName: "history.taskSchedulerRoundRobinWeight", + Description: "TaskSchedulerRoundRobinWeights is the priority weight for weighted round robin task scheduler", DefaultValue: common.ConvertIntMapToDynamicConfigMapProperty(map[int]int{ common.GetTaskPriority(common.HighPriorityClass, common.DefaultPrioritySubclass): 500, common.GetTaskPriority(common.DefaultPriorityClass, common.DefaultPrioritySubclass): 20, @@ -4886,17 +4565,15 @@ var MapKeys = map[MapKey]DynamicMap{ }), }, QueueProcessorPendingTaskSplitThreshold: DynamicMap{ - DynamicBase: DynamicBase{ - KeyName: "history.queueProcessorPendingTaskSplitThreshold", - Description: "QueueProcessorPendingTaskSplitThreshold is the threshold for the number of pending tasks per domain", - }, + + KeyName: "history.queueProcessorPendingTaskSplitThreshold", + Description: "QueueProcessorPendingTaskSplitThreshold is the threshold for the number of pending tasks per domain", DefaultValue: common.ConvertIntMapToDynamicConfigMapProperty(map[int]int{0: 1000, 1: 10000}), }, QueueProcessorStuckTaskSplitThreshold: DynamicMap{ - DynamicBase: DynamicBase{ - KeyName: "history.queueProcessorStuckTaskSplitThreshold", - Description: "QueueProcessorStuckTaskSplitThreshold is the threshold for the number of attempts of a task", - }, + + KeyName: "history.queueProcessorStuckTaskSplitThreshold", + Description: "QueueProcessorStuckTaskSplitThreshold is the threshold for the number of attempts of a task", DefaultValue: common.ConvertIntMapToDynamicConfigMapProperty(map[int]int{0: 100, 1: 10000}), }, } @@ -4904,23 +4581,37 @@ var MapKeys = map[MapKey]DynamicMap{ var _keyNames map[string]Key func init() { + panicIfKeyInvalid := func(name string, key Key) { + if name == "" { + panic(fmt.Sprintf("empty keyName: %T, %v", key, key)) + } + if _, ok := _keyNames[name]; ok { + panic(fmt.Sprintf("duplicate keyName: %v", name)) + } + } _keyNames = make(map[string]Key) for k, v := range IntKeys { + panicIfKeyInvalid(v.KeyName, k) _keyNames[v.KeyName] = k } for k, v := range BoolKeys { + panicIfKeyInvalid(v.KeyName, k) _keyNames[v.KeyName] = k } for k, v := range FloatKeys { + panicIfKeyInvalid(v.KeyName, k) _keyNames[v.KeyName] = k } for k, v := range StringKeys { + panicIfKeyInvalid(v.KeyName, k) _keyNames[v.KeyName] = k } for k, v := range DurationKeys { + panicIfKeyInvalid(v.KeyName, k) _keyNames[v.KeyName] = k } for k, v := range MapKeys { + panicIfKeyInvalid(v.KeyName, k) _keyNames[v.KeyName] = k } } diff --git a/common/dynamicconfig/file_based_client.go b/common/dynamicconfig/file_based_client.go index a4b63f72935..80cbe7d2d8d 100644 --- a/common/dynamicconfig/file_based_client.go +++ b/common/dynamicconfig/file_based_client.go @@ -95,15 +95,16 @@ func NewFileBasedClient(config *FileBasedClientConfig, logger log.Logger, doneCh return client, nil } -func (fc *fileBasedClient) GetValue(name Key, defaultValue interface{}) (interface{}, error) { - return fc.getValueWithFilters(name, nil, defaultValue) +func (fc *fileBasedClient) GetValue(name Key) (interface{}, error) { + return fc.getValueWithFilters(name, nil, name.DefaultValue()) } -func (fc *fileBasedClient) GetValueWithFilters(name Key, filters map[Filter]interface{}, defaultValue interface{}) (interface{}, error) { - return fc.getValueWithFilters(name, filters, defaultValue) +func (fc *fileBasedClient) GetValueWithFilters(name Key, filters map[Filter]interface{}) (interface{}, error) { + return fc.getValueWithFilters(name, filters, name.DefaultValue()) } -func (fc *fileBasedClient) GetIntValue(name IntKey, filters map[Filter]interface{}, defaultValue int) (int, error) { +func (fc *fileBasedClient) GetIntValue(name IntKey, filters map[Filter]interface{}) (int, error) { + defaultValue := name.DefaultInt() val, err := fc.getValueWithFilters(name, filters, defaultValue) if err != nil { return defaultValue, err @@ -112,10 +113,11 @@ func (fc *fileBasedClient) GetIntValue(name IntKey, filters map[Filter]interface if intVal, ok := val.(int); ok { return intVal, nil } - return defaultValue, errors.New("value type is not int") + return defaultValue, fmt.Errorf("value type is not int but is: %T", val) } -func (fc *fileBasedClient) GetFloatValue(name FloatKey, filters map[Filter]interface{}, defaultValue float64) (float64, error) { +func (fc *fileBasedClient) GetFloatValue(name FloatKey, filters map[Filter]interface{}) (float64, error) { + defaultValue := name.DefaultFloat() val, err := fc.getValueWithFilters(name, filters, defaultValue) if err != nil { return defaultValue, err @@ -126,10 +128,11 @@ func (fc *fileBasedClient) GetFloatValue(name FloatKey, filters map[Filter]inter } else if intVal, ok := val.(int); ok { return float64(intVal), nil } - return defaultValue, errors.New("value type is not float64") + return defaultValue, fmt.Errorf("value type is not float64 but is: %T", val) } -func (fc *fileBasedClient) GetBoolValue(name BoolKey, filters map[Filter]interface{}, defaultValue bool) (bool, error) { +func (fc *fileBasedClient) GetBoolValue(name BoolKey, filters map[Filter]interface{}) (bool, error) { + defaultValue := name.DefaultBool() val, err := fc.getValueWithFilters(name, filters, defaultValue) if err != nil { return defaultValue, err @@ -138,10 +141,11 @@ func (fc *fileBasedClient) GetBoolValue(name BoolKey, filters map[Filter]interfa if boolVal, ok := val.(bool); ok { return boolVal, nil } - return defaultValue, errors.New("value type is not bool") + return defaultValue, fmt.Errorf("value type is not bool but is: %T", val) } -func (fc *fileBasedClient) GetStringValue(name StringKey, filters map[Filter]interface{}, defaultValue string) (string, error) { +func (fc *fileBasedClient) GetStringValue(name StringKey, filters map[Filter]interface{}) (string, error) { + defaultValue := name.DefaultString() val, err := fc.getValueWithFilters(name, filters, defaultValue) if err != nil { return defaultValue, err @@ -150,12 +154,11 @@ func (fc *fileBasedClient) GetStringValue(name StringKey, filters map[Filter]int if stringVal, ok := val.(string); ok { return stringVal, nil } - return defaultValue, errors.New("value type is not string") + return defaultValue, fmt.Errorf("value type is not string but is: %T", val) } -func (fc *fileBasedClient) GetMapValue( - name MapKey, filters map[Filter]interface{}, defaultValue map[string]interface{}, -) (map[string]interface{}, error) { +func (fc *fileBasedClient) GetMapValue(name MapKey, filters map[Filter]interface{}) (map[string]interface{}, error) { + defaultValue := name.DefaultMap() val, err := fc.getValueWithFilters(name, filters, defaultValue) if err != nil { return defaultValue, err @@ -163,12 +166,11 @@ func (fc *fileBasedClient) GetMapValue( if mapVal, ok := val.(map[string]interface{}); ok { return mapVal, nil } - return defaultValue, errors.New("value type is not map") + return defaultValue, fmt.Errorf("value type is not map but is: %T", val) } -func (fc *fileBasedClient) GetDurationValue( - name DurationKey, filters map[Filter]interface{}, defaultValue time.Duration, -) (time.Duration, error) { +func (fc *fileBasedClient) GetDurationValue(name DurationKey, filters map[Filter]interface{}) (time.Duration, error) { + defaultValue := name.DefaultDuration() val, err := fc.getValueWithFilters(name, filters, defaultValue) if err != nil { return defaultValue, err @@ -176,7 +178,7 @@ func (fc *fileBasedClient) GetDurationValue( durationString, ok := val.(string) if !ok { - return defaultValue, errors.New("value type is not string") + return defaultValue, fmt.Errorf("value type is not string but is: %T", val) } durationVal, err := time.ParseDuration(durationString) @@ -187,6 +189,9 @@ func (fc *fileBasedClient) GetDurationValue( } func (fc *fileBasedClient) UpdateValue(name Key, value interface{}) error { + if err := ValidateKeyValuePair(name, value); err != nil { + return err + } keyName := name.String() currentValues := make(map[string][]*constrainedValue) diff --git a/common/dynamicconfig/file_based_client_test.go b/common/dynamicconfig/file_based_client_test.go index 0de83924e0b..abb20a485fd 100644 --- a/common/dynamicconfig/file_based_client_test.go +++ b/common/dynamicconfig/file_based_client_test.go @@ -62,30 +62,29 @@ func (s *fileBasedClientSuite) SetupTest() { } func (s *fileBasedClientSuite) TestGetValue() { - v, err := s.client.GetValue(TestGetBoolPropertyKey, true) + v, err := s.client.GetValue(TestGetBoolPropertyKey) s.NoError(err) s.Equal(false, v) } func (s *fileBasedClientSuite) TestGetValue_NonExistKey() { - defaultValue := true - v, err := s.client.GetValue(LastBoolKey, defaultValue) + v, err := s.client.GetValue(EnableVisibilitySampling) s.Error(err) - s.Equal(defaultValue, v) + s.Equal(EnableVisibilitySampling.DefaultBool(), v) } func (s *fileBasedClientSuite) TestGetValueWithFilters() { filters := map[Filter]interface{}{ DomainName: "global-samples-domain", } - v, err := s.client.GetValueWithFilters(TestGetBoolPropertyKey, filters, false) + v, err := s.client.GetValueWithFilters(TestGetBoolPropertyKey, filters) s.NoError(err) s.Equal(true, v) filters = map[Filter]interface{}{ DomainName: "non-exist-domain", } - v, err = s.client.GetValueWithFilters(TestGetBoolPropertyKey, filters, true) + v, err = s.client.GetValueWithFilters(TestGetBoolPropertyKey, filters) s.NoError(err) s.Equal(false, v) @@ -93,7 +92,7 @@ func (s *fileBasedClientSuite) TestGetValueWithFilters() { DomainName: "samples-domain", TaskListName: "non-exist-tasklist", } - v, err = s.client.GetValueWithFilters(TestGetBoolPropertyKey, filters, false) + v, err = s.client.GetValueWithFilters(TestGetBoolPropertyKey, filters) s.NoError(err) s.Equal(true, v) } @@ -103,13 +102,13 @@ func (s *fileBasedClientSuite) TestGetValueWithFilters_UnknownFilter() { DomainName: "global-samples-domain1", UnknownFilter: "unknown-filter1", } - v, err := s.client.GetValueWithFilters(TestGetBoolPropertyKey, filters, false) + v, err := s.client.GetValueWithFilters(TestGetBoolPropertyKey, filters) s.NoError(err) s.Equal(false, v) } func (s *fileBasedClientSuite) TestGetIntValue() { - v, err := s.client.GetIntValue(TestGetIntPropertyKey, nil, 1) + v, err := s.client.GetIntValue(TestGetIntPropertyKey, nil) s.NoError(err) s.Equal(1000, v) } @@ -118,23 +117,22 @@ func (s *fileBasedClientSuite) TestGetIntValue_FilterNotMatch() { filters := map[Filter]interface{}{ DomainName: "samples-domain", } - v, err := s.client.GetIntValue(TestGetIntPropertyKey, filters, 500) + v, err := s.client.GetIntValue(TestGetIntPropertyKey, filters) s.NoError(err) s.Equal(1000, v) } func (s *fileBasedClientSuite) TestGetIntValue_WrongType() { - defaultValue := 2000 filters := map[Filter]interface{}{ DomainName: "global-samples-domain", } - v, err := s.client.GetIntValue(TestGetIntPropertyKey, filters, defaultValue) + v, err := s.client.GetIntValue(TestGetIntPropertyKey, filters) s.Error(err) - s.Equal(defaultValue, v) + s.Equal(TestGetIntPropertyKey.DefaultInt(), v) } func (s *fileBasedClientSuite) TestGetFloatValue() { - v, err := s.client.GetFloatValue(TestGetFloat64PropertyKey, nil, 1) + v, err := s.client.GetFloatValue(TestGetFloat64PropertyKey, nil) s.NoError(err) s.Equal(12.0, v) } @@ -143,14 +141,13 @@ func (s *fileBasedClientSuite) TestGetFloatValue_WrongType() { filters := map[Filter]interface{}{ DomainName: "samples-domain", } - defaultValue := 1.0 - v, err := s.client.GetFloatValue(TestGetFloat64PropertyKey, filters, defaultValue) + v, err := s.client.GetFloatValue(TestGetFloat64PropertyKey, filters) s.Error(err) - s.Equal(defaultValue, v) + s.Equal(TestGetFloat64PropertyKey.DefaultFloat(), v) } func (s *fileBasedClientSuite) TestGetBoolValue() { - v, err := s.client.GetBoolValue(TestGetBoolPropertyKey, nil, true) + v, err := s.client.GetBoolValue(TestGetBoolPropertyKey, nil) s.NoError(err) s.Equal(false, v) } @@ -159,14 +156,13 @@ func (s *fileBasedClientSuite) TestGetStringValue() { filters := map[Filter]interface{}{ TaskListName: "random tasklist", } - v, err := s.client.GetStringValue(TestGetStringPropertyKey, filters, "defaultString") + v, err := s.client.GetStringValue(TestGetStringPropertyKey, filters) s.NoError(err) s.Equal("constrained-string", v) } func (s *fileBasedClientSuite) TestGetMapValue() { - var defaultVal map[string]interface{} - v, err := s.client.GetMapValue(TestGetMapPropertyKey, nil, defaultVal) + v, err := s.client.GetMapValue(TestGetMapPropertyKey, nil) s.NoError(err) expectedVal := map[string]interface{}{ "key1": "1", @@ -183,17 +179,16 @@ func (s *fileBasedClientSuite) TestGetMapValue() { } func (s *fileBasedClientSuite) TestGetMapValue_WrongType() { - var defaultVal map[string]interface{} filters := map[Filter]interface{}{ TaskListName: "random tasklist", } - v, err := s.client.GetMapValue(TestGetMapPropertyKey, filters, defaultVal) + v, err := s.client.GetMapValue(TestGetMapPropertyKey, filters) s.Error(err) - s.Equal(defaultVal, v) + s.Equal(TestGetMapPropertyKey.DefaultMap(), v) } func (s *fileBasedClientSuite) TestGetDurationValue() { - v, err := s.client.GetDurationValue(TestGetDurationPropertyKey, nil, time.Second) + v, err := s.client.GetDurationValue(TestGetDurationPropertyKey, nil) s.NoError(err) s.Equal(time.Minute, v) } @@ -202,9 +197,9 @@ func (s *fileBasedClientSuite) TestGetDurationValue_NotStringRepresentation() { filters := map[Filter]interface{}{ DomainName: "samples-domain", } - v, err := s.client.GetDurationValue(TestGetDurationPropertyKey, filters, time.Second) + v, err := s.client.GetDurationValue(TestGetDurationPropertyKey, filters) s.Error(err) - s.Equal(time.Second, v) + s.Equal(TestGetDurationPropertyKey.DefaultDuration(), v) } func (s *fileBasedClientSuite) TestGetDurationValue_ParseFailed() { @@ -212,9 +207,9 @@ func (s *fileBasedClientSuite) TestGetDurationValue_ParseFailed() { DomainName: "samples-domain", TaskListName: "longIdleTimeTasklist", } - v, err := s.client.GetDurationValue(TestGetDurationPropertyKey, filters, time.Second) + v, err := s.client.GetDurationValue(TestGetDurationPropertyKey, filters) s.Error(err) - s.Equal(time.Second, v) + s.Equal(TestGetDurationPropertyKey.DefaultDuration(), v) } func (s *fileBasedClientSuite) TestValidateConfig_ConfigNotExist() { @@ -319,7 +314,7 @@ func (s *fileBasedClientSuite) TestUpdateConfig() { key := ValidSearchAttributes // pre-check existing config - current, err := client.GetMapValue(key, nil, nil) + current, err := client.GetMapValue(key, nil) s.NoError(err) currentDomainVal, ok := current["DomainID"] s.True(ok) @@ -336,7 +331,7 @@ func (s *fileBasedClientSuite) TestUpdateConfig() { s.NoError(err) // verify update result - current, err = client.GetMapValue(key, nil, nil) + current, err = client.GetMapValue(key, nil) s.NoError(err) currentDomainVal, ok = current["DomainID"] s.True(ok) diff --git a/common/dynamicconfig/inMemoryClient.go b/common/dynamicconfig/inMemoryClient.go index 8b22b9f0ca1..7bb27e548d7 100644 --- a/common/dynamicconfig/inMemoryClient.go +++ b/common/dynamicconfig/inMemoryClient.go @@ -48,98 +48,95 @@ func (mc *inMemoryClient) SetValue(key Key, value interface{}) { mc.globalValues[key] = value } -func (mc *inMemoryClient) GetValue(key Key, defaultValue interface{}) (interface{}, error) { +func (mc *inMemoryClient) GetValue(key Key) (interface{}, error) { mc.RLock() defer mc.RUnlock() if val, ok := mc.globalValues[key]; ok { return val, nil } - return defaultValue, NotFoundError + return key.DefaultValue(), NotFoundError } -func (mc *inMemoryClient) GetValueWithFilters( - name Key, filters map[Filter]interface{}, defaultValue interface{}, -) (interface{}, error) { +func (mc *inMemoryClient) GetValueWithFilters(name Key, filters map[Filter]interface{}) (interface{}, error) { mc.RLock() defer mc.RUnlock() - return mc.GetValue(name, defaultValue) + return mc.GetValue(name) } -func (mc *inMemoryClient) GetIntValue(name IntKey, filters map[Filter]interface{}, defaultValue int) (int, error) { +func (mc *inMemoryClient) GetIntValue(name IntKey, filters map[Filter]interface{}) (int, error) { mc.RLock() defer mc.RUnlock() if val, ok := mc.globalValues[name]; ok { return val.(int), nil } - return defaultValue, NotFoundError + return name.DefaultInt(), NotFoundError } -func (mc *inMemoryClient) GetFloatValue(name FloatKey, filters map[Filter]interface{}, defaultValue float64) (float64, error) { +func (mc *inMemoryClient) GetFloatValue(name FloatKey, filters map[Filter]interface{}) (float64, error) { mc.RLock() defer mc.RUnlock() if val, ok := mc.globalValues[name]; ok { return val.(float64), nil } - return defaultValue, NotFoundError + return name.DefaultFloat(), NotFoundError } -func (mc *inMemoryClient) GetBoolValue(name BoolKey, filters map[Filter]interface{}, defaultValue bool) (bool, error) { +func (mc *inMemoryClient) GetBoolValue(name BoolKey, filters map[Filter]interface{}) (bool, error) { mc.RLock() defer mc.RUnlock() if val, ok := mc.globalValues[name]; ok { return val.(bool), nil } - return defaultValue, NotFoundError + return name.DefaultBool(), NotFoundError } -func (mc *inMemoryClient) GetStringValue(name StringKey, filters map[Filter]interface{}, defaultValue string) (string, error) { +func (mc *inMemoryClient) GetStringValue(name StringKey, filters map[Filter]interface{}) (string, error) { mc.RLock() defer mc.RUnlock() if val, ok := mc.globalValues[name]; ok { return val.(string), nil } - return defaultValue, NotFoundError + return name.DefaultString(), NotFoundError } -func (mc *inMemoryClient) GetMapValue( - name MapKey, filters map[Filter]interface{}, defaultValue map[string]interface{}, -) (map[string]interface{}, error) { +func (mc *inMemoryClient) GetMapValue(name MapKey, filters map[Filter]interface{}) (map[string]interface{}, error) { mc.RLock() defer mc.RUnlock() if val, ok := mc.globalValues[name]; ok { return val.(map[string]interface{}), nil } - return defaultValue, NotFoundError + return name.DefaultMap(), NotFoundError } -func (mc *inMemoryClient) GetDurationValue( - name DurationKey, filters map[Filter]interface{}, defaultValue time.Duration, -) (time.Duration, error) { +func (mc *inMemoryClient) GetDurationValue(name DurationKey, filters map[Filter]interface{}) (time.Duration, error) { mc.RLock() defer mc.RUnlock() if val, ok := mc.globalValues[name]; ok { return val.(time.Duration), nil } - return defaultValue, NotFoundError + return name.DefaultDuration(), NotFoundError } func (mc *inMemoryClient) UpdateValue(key Key, value interface{}) error { + if err := ValidateKeyValuePair(key, value); err != nil { + return err + } mc.SetValue(key, value) return nil } func (mc *inMemoryClient) RestoreValue(name Key, filters map[Filter]interface{}) error { - return errors.New("not supported for file based client") + return errors.New("not supported for in-memory client") } func (mc *inMemoryClient) ListValue(name Key) ([]*types.DynamicConfigEntry, error) { - return nil, errors.New("not supported for file based client") + return nil, errors.New("not supported for in-memory client") } diff --git a/common/dynamicconfig/nopClient.go b/common/dynamicconfig/nopClient.go index d484d9c8701..79f7be9bf62 100644 --- a/common/dynamicconfig/nopClient.go +++ b/common/dynamicconfig/nopClient.go @@ -31,57 +31,51 @@ import ( // nopClient is a dummy implements of dynamicconfig Client interface, all operations will always return default values. type nopClient struct{} -func (mc *nopClient) GetValue(name Key, defaultValue interface{}) (interface{}, error) { +func (mc *nopClient) GetValue(name Key) (interface{}, error) { return nil, NotFoundError } -func (mc *nopClient) GetValueWithFilters( - name Key, filters map[Filter]interface{}, defaultValue interface{}, -) (interface{}, error) { +func (mc *nopClient) GetValueWithFilters(name Key, filters map[Filter]interface{}) (interface{}, error) { return nil, NotFoundError } -func (mc *nopClient) GetIntValue(name IntKey, filters map[Filter]interface{}, defaultValue int) (int, error) { - return defaultValue, NotFoundError +func (mc *nopClient) GetIntValue(name IntKey, filters map[Filter]interface{}) (int, error) { + return name.DefaultInt(), NotFoundError } -func (mc *nopClient) GetFloatValue(name FloatKey, filters map[Filter]interface{}, defaultValue float64) (float64, error) { - return defaultValue, NotFoundError +func (mc *nopClient) GetFloatValue(name FloatKey, filters map[Filter]interface{}) (float64, error) { + return name.DefaultFloat(), NotFoundError } -func (mc *nopClient) GetBoolValue(name BoolKey, filters map[Filter]interface{}, defaultValue bool) (bool, error) { +func (mc *nopClient) GetBoolValue(name BoolKey, filters map[Filter]interface{}) (bool, error) { if filters[DomainName] == "TestRawHistoryDomain" { return true, NotFoundError } - return defaultValue, NotFoundError + return name.DefaultBool(), NotFoundError } -func (mc *nopClient) GetStringValue(name StringKey, filters map[Filter]interface{}, defaultValue string) (string, error) { - return defaultValue, NotFoundError +func (mc *nopClient) GetStringValue(name StringKey, filters map[Filter]interface{}) (string, error) { + return name.DefaultString(), NotFoundError } -func (mc *nopClient) GetMapValue( - name MapKey, filters map[Filter]interface{}, defaultValue map[string]interface{}, -) (map[string]interface{}, error) { - return defaultValue, NotFoundError +func (mc *nopClient) GetMapValue(name MapKey, filters map[Filter]interface{}) (map[string]interface{}, error) { + return name.DefaultMap(), NotFoundError } -func (mc *nopClient) GetDurationValue( - name DurationKey, filters map[Filter]interface{}, defaultValue time.Duration, -) (time.Duration, error) { - return defaultValue, NotFoundError +func (mc *nopClient) GetDurationValue(name DurationKey, filters map[Filter]interface{}) (time.Duration, error) { + return name.DefaultDuration(), NotFoundError } func (mc *nopClient) UpdateValue(name Key, value interface{}) error { - return errors.New("unable to update key") + return errors.New("not supported for nop client") } func (mc *nopClient) RestoreValue(name Key, filters map[Filter]interface{}) error { - return errors.New("not supported for file based client") + return errors.New("not supported for nop client") } func (mc *nopClient) ListValue(name Key) ([]*types.DynamicConfigEntry, error) { - return nil, errors.New("not supported for file based client") + return nil, errors.New("not supported for nop client") } // NewNopClient creates a nop client diff --git a/common/log/loggerimpl/logger_test.go b/common/log/loggerimpl/logger_test.go index d61862e3052..e5cb366b89d 100644 --- a/common/log/loggerimpl/logger_test.go +++ b/common/log/loggerimpl/logger_test.go @@ -86,7 +86,7 @@ func TestThrottleLogger(t *testing.T) { dc := dynamicconfig.NewNopClient() cln := dynamicconfig.NewCollection(dc, NewNopLogger()) - logger := NewThrottledLogger(NewLogger(zapLogger), cln.GetIntProperty(dynamicconfig.FrontendUserRPS, 1)) + logger := NewThrottledLogger(NewLogger(zapLogger), cln.GetIntProperty(dynamicconfig.FrontendUserRPS)) preCaller := caller(1) logger.WithTags(tag.Error(fmt.Errorf("test error"))).WithTags(tag.ComponentShard).Info("test info", tag.WorkflowActionWorkflowStarted) diff --git a/common/persistence/serializer_test.go b/common/persistence/serializer_test.go index 7449849b66d..b2867d8f82a 100644 --- a/common/persistence/serializer_test.go +++ b/common/persistence/serializer_test.go @@ -172,7 +172,7 @@ func (s *cadenceSerializerSuite) TestSerializer() { SchemaVersion: 1, Entries: []*types.DynamicConfigEntry{ { - Name: dynamicconfig.Keys[dynamicconfig.TestGetBoolPropertyKey], + Name: dynamicconfig.TestGetBoolPropertyKey.String(), Values: []*types.DynamicConfigValue{ { Value: &types.DataBlob{ diff --git a/common/task/parallelTaskProcessor_test.go b/common/task/parallelTaskProcessor_test.go index f861d8564b1..e311c8157e3 100644 --- a/common/task/parallelTaskProcessor_test.go +++ b/common/task/parallelTaskProcessor_test.go @@ -210,8 +210,10 @@ func (s *parallelTaskProcessorSuite) TestMonitor() { s.processor.shutdownWG.Add(1) // for monitor dcClient := dynamicconfig.NewInMemoryClient() + err := dcClient.UpdateValue(dynamicconfig.TaskSchedulerWorkerCount, workerCount) + s.NoError(err) dcCollection := dynamicconfig.NewCollection(dcClient, s.processor.logger) - s.processor.options.WorkerCount = dcCollection.GetIntProperty(dynamicconfig.TaskSchedulerWorkerCount, workerCount) + s.processor.options.WorkerCount = dcCollection.GetIntProperty(dynamicconfig.TaskSchedulerWorkerCount) testMonitorTickerDuration := 100 * time.Millisecond go s.processor.workerMonitor(testMonitorTickerDuration) @@ -227,7 +229,8 @@ func (s *parallelTaskProcessorSuite) TestMonitor() { s.processor.shutdownWG.Add(workerCount + 1) newWorkerCount := 3 - dcClient.UpdateValue(dynamicconfig.TaskSchedulerWorkerCount, newWorkerCount) + err = dcClient.UpdateValue(dynamicconfig.TaskSchedulerWorkerCount, newWorkerCount) + s.NoError(err) time.Sleep(2 * testMonitorTickerDuration) for i := 0; i != newWorkerCount+1; i++ { diff --git a/host/dynamicconfig.go b/host/dynamicconfig.go index f511e3f6d7f..5ca2d1e1e02 100644 --- a/host/dynamicconfig.go +++ b/host/dynamicconfig.go @@ -56,29 +56,27 @@ type dynamicClient struct { client dynamicconfig.Client } -func (d *dynamicClient) GetValue(name dynamicconfig.Key, defaultValue interface{}) (interface{}, error) { +func (d *dynamicClient) GetValue(name dynamicconfig.Key) (interface{}, error) { d.RLock() if val, ok := d.overrides[name]; ok { d.RUnlock() return val, nil } d.RUnlock() - return d.client.GetValue(name, defaultValue) + return d.client.GetValue(name) } -func (d *dynamicClient) GetValueWithFilters( - name dynamicconfig.Key, filters map[dynamicconfig.Filter]interface{}, defaultValue interface{}, -) (interface{}, error) { +func (d *dynamicClient) GetValueWithFilters(name dynamicconfig.Key, filters map[dynamicconfig.Filter]interface{}) (interface{}, error) { d.RLock() if val, ok := d.overrides[name]; ok { d.RUnlock() return val, nil } d.RUnlock() - return d.client.GetValueWithFilters(name, filters, defaultValue) + return d.client.GetValueWithFilters(name, filters) } -func (d *dynamicClient) GetIntValue(name dynamicconfig.IntKey, filters map[dynamicconfig.Filter]interface{}, defaultValue int) (int, error) { +func (d *dynamicClient) GetIntValue(name dynamicconfig.IntKey, filters map[dynamicconfig.Filter]interface{}) (int, error) { d.RLock() if val, ok := d.overrides[name]; ok { if intVal, ok := val.(int); ok { @@ -87,10 +85,10 @@ func (d *dynamicClient) GetIntValue(name dynamicconfig.IntKey, filters map[dynam } } d.RUnlock() - return d.client.GetIntValue(name, filters, defaultValue) + return d.client.GetIntValue(name, filters) } -func (d *dynamicClient) GetFloatValue(name dynamicconfig.FloatKey, filters map[dynamicconfig.Filter]interface{}, defaultValue float64) (float64, error) { +func (d *dynamicClient) GetFloatValue(name dynamicconfig.FloatKey, filters map[dynamicconfig.Filter]interface{}) (float64, error) { d.RLock() if val, ok := d.overrides[name]; ok { if floatVal, ok := val.(float64); ok { @@ -99,10 +97,10 @@ func (d *dynamicClient) GetFloatValue(name dynamicconfig.FloatKey, filters map[d } } d.RUnlock() - return d.client.GetFloatValue(name, filters, defaultValue) + return d.client.GetFloatValue(name, filters) } -func (d *dynamicClient) GetBoolValue(name dynamicconfig.BoolKey, filters map[dynamicconfig.Filter]interface{}, defaultValue bool) (bool, error) { +func (d *dynamicClient) GetBoolValue(name dynamicconfig.BoolKey, filters map[dynamicconfig.Filter]interface{}) (bool, error) { d.RLock() if val, ok := d.overrides[name]; ok { if boolVal, ok := val.(bool); ok { @@ -111,10 +109,10 @@ func (d *dynamicClient) GetBoolValue(name dynamicconfig.BoolKey, filters map[dyn } } d.RUnlock() - return d.client.GetBoolValue(name, filters, defaultValue) + return d.client.GetBoolValue(name, filters) } -func (d *dynamicClient) GetStringValue(name dynamicconfig.StringKey, filters map[dynamicconfig.Filter]interface{}, defaultValue string) (string, error) { +func (d *dynamicClient) GetStringValue(name dynamicconfig.StringKey, filters map[dynamicconfig.Filter]interface{}) (string, error) { d.RLock() if val, ok := d.overrides[name]; ok { if stringVal, ok := val.(string); ok { @@ -123,12 +121,10 @@ func (d *dynamicClient) GetStringValue(name dynamicconfig.StringKey, filters map } } d.RUnlock() - return d.client.GetStringValue(name, filters, defaultValue) + return d.client.GetStringValue(name, filters) } -func (d *dynamicClient) GetMapValue( - name dynamicconfig.MapKey, filters map[dynamicconfig.Filter]interface{}, defaultValue map[string]interface{}, -) (map[string]interface{}, error) { +func (d *dynamicClient) GetMapValue(name dynamicconfig.MapKey, filters map[dynamicconfig.Filter]interface{}) (map[string]interface{}, error) { d.RLock() if val, ok := d.overrides[name]; ok { if mapVal, ok := val.(map[string]interface{}); ok { @@ -137,12 +133,10 @@ func (d *dynamicClient) GetMapValue( } } d.RUnlock() - return d.client.GetMapValue(name, filters, defaultValue) + return d.client.GetMapValue(name, filters) } -func (d *dynamicClient) GetDurationValue( - name dynamicconfig.DurationKey, filters map[dynamicconfig.Filter]interface{}, defaultValue time.Duration, -) (time.Duration, error) { +func (d *dynamicClient) GetDurationValue(name dynamicconfig.DurationKey, filters map[dynamicconfig.Filter]interface{}) (time.Duration, error) { d.RLock() if val, ok := d.overrides[name]; ok { if durationVal, ok := val.(time.Duration); ok { @@ -151,7 +145,7 @@ func (d *dynamicClient) GetDurationValue( } } d.RUnlock() - return d.client.GetDurationValue(name, filters, defaultValue) + return d.client.GetDurationValue(name, filters) } func (d *dynamicClient) UpdateValue(name dynamicconfig.Key, value interface{}) error { diff --git a/service/frontend/adminHandler.go b/service/frontend/adminHandler.go index 3487c388eb6..10bc1ddc5c9 100644 --- a/service/frontend/adminHandler.go +++ b/service/frontend/adminHandler.go @@ -221,8 +221,7 @@ func (adh *adminHandlerImpl) AddSearchAttribute( } searchAttr := request.GetSearchAttribute() - currentValidAttr, err := adh.params.DynamicConfig.GetMapValue( - dc.ValidSearchAttributes, nil, definition.GetDefaultIndexedKeys()) + currentValidAttr, err := adh.params.DynamicConfig.GetMapValue(dc.ValidSearchAttributes, nil) if err != nil { return adh.error(&types.InternalServiceError{Message: fmt.Sprintf("Failed to get dynamic config, err: %v", err)}, scope) } @@ -1606,7 +1605,7 @@ func (adh *adminHandlerImpl) GetDynamicConfig(ctx context.Context, request *type var value interface{} if request.Filters == nil { - value, err = adh.params.DynamicConfig.GetValue(keyVal, nil) + value, err = adh.params.DynamicConfig.GetValue(keyVal) if err != nil { return nil, adh.error(err, scope) } @@ -1615,7 +1614,7 @@ func (adh *adminHandlerImpl) GetDynamicConfig(ctx context.Context, request *type if err != nil { return nil, adh.error(err, scope) } - value, err = adh.params.DynamicConfig.GetValueWithFilters(keyVal, convFilters, nil) + value, err = adh.params.DynamicConfig.GetValueWithFilters(keyVal, convFilters) if err != nil { return nil, adh.error(err, scope) } diff --git a/service/frontend/adminHandler_test.go b/service/frontend/adminHandler_test.go index e62d57f5ebf..021cd7ce9fe 100644 --- a/service/frontend/adminHandler_test.go +++ b/service/frontend/adminHandler_test.go @@ -38,7 +38,6 @@ import ( "github.com/uber/cadence/common" "github.com/uber/cadence/common/cache" "github.com/uber/cadence/common/config" - "github.com/uber/cadence/common/definition" "github.com/uber/cadence/common/dynamicconfig" esmock "github.com/uber/cadence/common/elasticsearch/mocks" "github.com/uber/cadence/common/membership" @@ -563,7 +562,7 @@ func (s *adminHandlerSuite) Test_AddSearchAttribute_Validate() { mockValidAttr := map[string]interface{}{ "testkey": types.IndexedValueTypeKeyword, } - dynamicConfig.EXPECT().GetMapValue(dynamicconfig.ValidSearchAttributes, nil, definition.GetDefaultIndexedKeys()). + dynamicConfig.EXPECT().GetMapValue(dynamicconfig.ValidSearchAttributes, nil). Return(mockValidAttr, nil).AnyTimes() testCases2 := []test{ @@ -688,19 +687,19 @@ func (s *adminHandlerSuite) Test_ConfigStore_InvalidKey() { handler := s.handler _, err := handler.GetDynamicConfig(ctx, &types.GetDynamicConfigRequest{ - ConfigName: dynamicconfig.UnknownKey.String(), + ConfigName: "invalid key", Filters: nil, }) s.Error(err) err = handler.UpdateDynamicConfig(ctx, &types.UpdateDynamicConfigRequest{ - ConfigName: dynamicconfig.UnknownKey.String(), + ConfigName: "invalid key", ConfigValues: nil, }) s.Error(err) err = handler.RestoreDynamicConfig(ctx, &types.RestoreDynamicConfigRequest{ - ConfigName: dynamicconfig.UnknownKey.String(), + ConfigName: "invalid key", Filters: nil, }) s.Error(err) @@ -713,7 +712,7 @@ func (s *adminHandlerSuite) Test_GetDynamicConfig_NoFilter() { handler.params.DynamicConfig = dynamicConfig dynamicConfig.EXPECT(). - GetValue(dynamicconfig.TestGetBoolPropertyKey, nil). + GetValue(dynamicconfig.TestGetBoolPropertyKey). Return(true, nil).AnyTimes() resp, err := handler.GetDynamicConfig(ctx, &types.GetDynamicConfigRequest{ @@ -736,7 +735,7 @@ func (s *adminHandlerSuite) Test_GetDynamicConfig_FilterMatch() { dynamicConfig.EXPECT(). GetValueWithFilters(dynamicconfig.TestGetBoolPropertyKey, map[dynamicconfig.Filter]interface{}{ dynamicconfig.DomainName: "samples_domain", - }, nil). + }). Return(true, nil).AnyTimes() encDomainName, err := json.Marshal("samples_domain") diff --git a/service/frontend/workflowHandler_test.go b/service/frontend/workflowHandler_test.go index 3244ec10880..b3c7c717aa9 100644 --- a/service/frontend/workflowHandler_test.go +++ b/service/frontend/workflowHandler_test.go @@ -485,7 +485,8 @@ func (s *workflowHandlerSuite) TestStartWorkflowExecution_Failed_InvalidTaskStar func (s *workflowHandlerSuite) TestRegisterDomain_Failure_MissingDomainDataKey() { dynamicClient := dc.NewInMemoryClient() - dynamicClient.UpdateValue(dc.RequiredDomainDataKeys, map[string]interface{}{"Tier": true}) + err := dynamicClient.UpdateValue(dc.RequiredDomainDataKeys, map[string]interface{}{"Tier": true}) + s.NoError(err) cfg := s.newConfig(dynamicClient) wh := s.getWorkflowHandler(cfg) @@ -495,7 +496,7 @@ func (s *workflowHandlerSuite) TestRegisterDomain_Failure_MissingDomainDataKey() types.ArchivalStatusEnabled.Ptr(), testVisibilityArchivalURI, ) - err := wh.RegisterDomain(context.Background(), req) + err = wh.RegisterDomain(context.Background(), req) s.Error(err) s.Contains(err.Error(), "domain data error, missing required key") } @@ -898,7 +899,8 @@ func (s *workflowHandlerSuite) TestUpdateDomain_Success_FailOver() { func (s *workflowHandlerSuite) TestUpdateDomain_Failure_FailoverLockdown() { dynamicClient := dc.NewInMemoryClient() - dynamicClient.UpdateValue(dc.Lockdown, map[string]interface{}{"Lockdown": true}) + err := dynamicClient.UpdateValue(dc.Lockdown, true) + s.NoError(err) wh := s.getWorkflowHandler(s.newConfig(dynamicClient)) updateReq := updateFailoverRequest( diff --git a/service/history/config/config.go b/service/history/config/config.go index 2268cf166a4..4b6fefd71a9 100644 --- a/service/history/config/config.go +++ b/service/history/config/config.go @@ -315,30 +315,30 @@ type Config struct { // New returns new service config with default values func New(dc *dynamicconfig.Collection, numberOfShards int, storeType string, isAdvancedVisConfigExist bool) *Config { cfg := &Config{ - NumberOfShards: numberOfShards, - IsAdvancedVisConfigExist: isAdvancedVisConfigExist, - RPS: dc.GetIntProperty(dynamicconfig.HistoryRPS), - MaxIDLengthWarnLimit: dc.GetIntProperty(dynamicconfig.MaxIDLengthWarnLimit), - DomainNameMaxLength: dc.GetIntPropertyFilteredByDomain(dynamicconfig.DomainNameMaxLength), - IdentityMaxLength: dc.GetIntPropertyFilteredByDomain(dynamicconfig.IdentityMaxLength), - WorkflowIDMaxLength: dc.GetIntPropertyFilteredByDomain(dynamicconfig.WorkflowIDMaxLength), - SignalNameMaxLength: dc.GetIntPropertyFilteredByDomain(dynamicconfig.SignalNameMaxLength), - WorkflowTypeMaxLength: dc.GetIntPropertyFilteredByDomain(dynamicconfig.WorkflowTypeMaxLength), - RequestIDMaxLength: dc.GetIntPropertyFilteredByDomain(dynamicconfig.RequestIDMaxLength), - TaskListNameMaxLength: dc.GetIntPropertyFilteredByDomain(dynamicconfig.TaskListNameMaxLength), - ActivityIDMaxLength: dc.GetIntPropertyFilteredByDomain(dynamicconfig.ActivityIDMaxLength), - ActivityTypeMaxLength: dc.GetIntPropertyFilteredByDomain(dynamicconfig.ActivityTypeMaxLength), - MarkerNameMaxLength: dc.GetIntPropertyFilteredByDomain(dynamicconfig.MarkerNameMaxLength), - TimerIDMaxLength: dc.GetIntPropertyFilteredByDomain(dynamicconfig.TimerIDMaxLength), - PersistenceMaxQPS: dc.GetIntProperty(dynamicconfig.HistoryPersistenceMaxQPS), - PersistenceGlobalMaxQPS: dc.GetIntProperty(dynamicconfig.HistoryPersistenceGlobalMaxQPS), - ShutdownDrainDuration: dc.GetDurationProperty(dynamicconfig.HistoryShutdownDrainDuration), - EnableVisibilitySampling: dc.GetBoolProperty(dynamicconfig.EnableVisibilitySampling), - EnableReadFromClosedExecutionV2: dc.GetBoolProperty(dynamicconfig.EnableReadFromClosedExecutionV2), - VisibilityOpenMaxQPS: dc.GetIntPropertyFilteredByDomain(dynamicconfig.HistoryVisibilityOpenMaxQPS), - VisibilityClosedMaxQPS: dc.GetIntPropertyFilteredByDomain(dynamicconfig.HistoryVisibilityClosedMaxQPS), - MaxAutoResetPoints: dc.GetIntPropertyFilteredByDomain(dynamicconfig.HistoryMaxAutoResetPoints), - MaxDecisionStartToCloseSeconds: dc.GetIntPropertyFilteredByDomain(dynamicconfig.MaxDecisionStartToCloseSeconds), + NumberOfShards: numberOfShards, + IsAdvancedVisConfigExist: isAdvancedVisConfigExist, + RPS: dc.GetIntProperty(dynamicconfig.HistoryRPS), + MaxIDLengthWarnLimit: dc.GetIntProperty(dynamicconfig.MaxIDLengthWarnLimit), + DomainNameMaxLength: dc.GetIntPropertyFilteredByDomain(dynamicconfig.DomainNameMaxLength), + IdentityMaxLength: dc.GetIntPropertyFilteredByDomain(dynamicconfig.IdentityMaxLength), + WorkflowIDMaxLength: dc.GetIntPropertyFilteredByDomain(dynamicconfig.WorkflowIDMaxLength), + SignalNameMaxLength: dc.GetIntPropertyFilteredByDomain(dynamicconfig.SignalNameMaxLength), + WorkflowTypeMaxLength: dc.GetIntPropertyFilteredByDomain(dynamicconfig.WorkflowTypeMaxLength), + RequestIDMaxLength: dc.GetIntPropertyFilteredByDomain(dynamicconfig.RequestIDMaxLength), + TaskListNameMaxLength: dc.GetIntPropertyFilteredByDomain(dynamicconfig.TaskListNameMaxLength), + ActivityIDMaxLength: dc.GetIntPropertyFilteredByDomain(dynamicconfig.ActivityIDMaxLength), + ActivityTypeMaxLength: dc.GetIntPropertyFilteredByDomain(dynamicconfig.ActivityTypeMaxLength), + MarkerNameMaxLength: dc.GetIntPropertyFilteredByDomain(dynamicconfig.MarkerNameMaxLength), + TimerIDMaxLength: dc.GetIntPropertyFilteredByDomain(dynamicconfig.TimerIDMaxLength), + PersistenceMaxQPS: dc.GetIntProperty(dynamicconfig.HistoryPersistenceMaxQPS), + PersistenceGlobalMaxQPS: dc.GetIntProperty(dynamicconfig.HistoryPersistenceGlobalMaxQPS), + ShutdownDrainDuration: dc.GetDurationProperty(dynamicconfig.HistoryShutdownDrainDuration), + EnableVisibilitySampling: dc.GetBoolProperty(dynamicconfig.EnableVisibilitySampling), + EnableReadFromClosedExecutionV2: dc.GetBoolProperty(dynamicconfig.EnableReadFromClosedExecutionV2), + VisibilityOpenMaxQPS: dc.GetIntPropertyFilteredByDomain(dynamicconfig.HistoryVisibilityOpenMaxQPS), + VisibilityClosedMaxQPS: dc.GetIntPropertyFilteredByDomain(dynamicconfig.HistoryVisibilityClosedMaxQPS), + MaxAutoResetPoints: dc.GetIntPropertyFilteredByDomain(dynamicconfig.HistoryMaxAutoResetPoints), + MaxDecisionStartToCloseSeconds: dc.GetIntPropertyFilteredByDomain(dynamicconfig.MaxDecisionStartToCloseSeconds), AdvancedVisibilityWritingMode: dc.GetStringProperty(dynamicconfig.AdvancedVisibilityWritingMode), EmitShardDiffLog: dc.GetBoolProperty(dynamicconfig.EmitShardDiffLog), HistoryCacheInitialSize: dc.GetIntProperty(dynamicconfig.HistoryCacheInitialSize), @@ -376,7 +376,7 @@ func New(dc *dynamicconfig.Collection, numberOfShards int, storeType string, isA ResurrectionCheckMinDelay: dc.GetDurationPropertyFilteredByDomain(dynamicconfig.ResurrectionCheckMinDelay), QueueProcessorEnableSplit: dc.GetBoolProperty(dynamicconfig.QueueProcessorEnableSplit), - QueueProcessorSplitMaxLevel: dc.GetIntProperty(dynamicconfig.QueueProcessorSplitMaxLevel), // 3 levels, start from 0 + QueueProcessorSplitMaxLevel: dc.GetIntProperty(dynamicconfig.QueueProcessorSplitMaxLevel), QueueProcessorEnableRandomSplitByDomainID: dc.GetBoolPropertyFilteredByDomainID(dynamicconfig.QueueProcessorEnableRandomSplitByDomainID), QueueProcessorRandomSplitProbability: dc.GetFloat64Property(dynamicconfig.QueueProcessorRandomSplitProbability), QueueProcessorEnablePendingTaskSplitByDomainID: dc.GetBoolPropertyFilteredByDomainID(dynamicconfig.QueueProcessorEnablePendingTaskSplitByDomainID), @@ -454,10 +454,9 @@ func New(dc *dynamicconfig.Collection, numberOfShards int, storeType string, isA ReplicatorProcessorFetchTasksBatchSize: dc.GetIntPropertyFilteredByShardID(dynamicconfig.ReplicatorTaskBatchSize), ReplicatorUpperLatency: dc.GetDurationProperty(dynamicconfig.ReplicatorUpperLatency), - ExecutionMgrNumConns: dc.GetIntProperty(dynamicconfig.ExecutionMgrNumConns), - HistoryMgrNumConns: dc.GetIntProperty(dynamicconfig.HistoryMgrNumConns), - MaximumBufferedEventsBatch: dc.GetIntProperty(dynamicconfig.MaximumBufferedEventsBatch), - // 10K signals should big enough given workflow execution has 200K history lengh limit. It needs to be non-zero to protect continueAsNew from infinit loop + ExecutionMgrNumConns: dc.GetIntProperty(dynamicconfig.ExecutionMgrNumConns), + HistoryMgrNumConns: dc.GetIntProperty(dynamicconfig.HistoryMgrNumConns), + MaximumBufferedEventsBatch: dc.GetIntProperty(dynamicconfig.MaximumBufferedEventsBatch), MaximumSignalsPerExecution: dc.GetIntPropertyFilteredByDomain(dynamicconfig.MaximumSignalsPerExecution), ShardUpdateMinInterval: dc.GetDurationProperty(dynamicconfig.ShardUpdateMinInterval), ShardSyncMinInterval: dc.GetDurationProperty(dynamicconfig.ShardSyncMinInterval), @@ -472,7 +471,7 @@ func New(dc *dynamicconfig.Collection, numberOfShards int, storeType string, isA ParentClosePolicyThreshold: dc.GetIntPropertyFilteredByDomain(dynamicconfig.ParentClosePolicyThreshold), NumArchiveSystemWorkflows: dc.GetIntProperty(dynamicconfig.NumArchiveSystemWorkflows), - ArchiveRequestRPS: dc.GetIntProperty(dynamicconfig.ArchiveRequestRPS), // should be much smaller than frontend RPS + ArchiveRequestRPS: dc.GetIntProperty(dynamicconfig.ArchiveRequestRPS), ArchiveInlineHistoryRPS: dc.GetIntProperty(dynamicconfig.ArchiveInlineHistoryRPS), ArchiveInlineHistoryGlobalRPS: dc.GetIntProperty(dynamicconfig.ArchiveInlineHistoryGlobalRPS), ArchiveInlineVisibilityRPS: dc.GetIntProperty(dynamicconfig.ArchiveInlineVisibilityRPS), @@ -495,7 +494,7 @@ func New(dc *dynamicconfig.Collection, numberOfShards int, storeType string, isA SearchAttributesTotalSizeLimit: dc.GetIntPropertyFilteredByDomain(dynamicconfig.SearchAttributesTotalSizeLimit), StickyTTL: dc.GetDurationPropertyFilteredByDomain(dynamicconfig.StickyTTL), DecisionHeartbeatTimeout: dc.GetDurationPropertyFilteredByDomain(dynamicconfig.DecisionHeartbeatTimeout), - DecisionRetryCriticalAttempts: dc.GetIntProperty(dynamicconfig.DecisionRetryCriticalAttempts), // about 30m + DecisionRetryCriticalAttempts: dc.GetIntProperty(dynamicconfig.DecisionRetryCriticalAttempts), DecisionRetryMaxAttempts: dc.GetIntPropertyFilteredByDomain(dynamicconfig.DecisionRetryMaxAttempts), NormalDecisionScheduleToStartMaxAttempts: dc.GetIntPropertyFilteredByDomain(dynamicconfig.NormalDecisionScheduleToStartMaxAttempts), NormalDecisionScheduleToStartTimeout: dc.GetDurationPropertyFilteredByDomain(dynamicconfig.NormalDecisionScheduleToStartTimeout), @@ -554,16 +553,21 @@ func NewForTest() *Config { // NewForTestByShardNumber create new history service config for test func NewForTestByShardNumber(shardNumber int) *Config { + panicIfErr := func(err error) { + if err != nil { + panic(err) + } + } inMem := dynamicconfig.NewInMemoryClient() - inMem.UpdateValue(dynamicconfig.HistoryLongPollExpirationInterval, 10*time.Second) - inMem.UpdateValue(dynamicconfig.EnableConsistentQueryByDomain, true) - inMem.UpdateValue(dynamicconfig.ReplicationTaskProcessorHostQPS, 10000) - inMem.UpdateValue(dynamicconfig.ReplicationTaskProcessorShardQPS, 10000) - inMem.UpdateValue(dynamicconfig.ReplicationTaskProcessorStartWait, time.Nanosecond) - inMem.UpdateValue(dynamicconfig.EnableActivityLocalDispatchByDomain, true) - inMem.UpdateValue(dynamicconfig.MaxActivityCountDispatchByDomain, 0) - inMem.UpdateValue(dynamicconfig.EnableCrossClusterOperations, true) - inMem.UpdateValue(dynamicconfig.NormalDecisionScheduleToStartMaxAttempts, 3) + panicIfErr(inMem.UpdateValue(dynamicconfig.HistoryLongPollExpirationInterval, 10*time.Second)) + panicIfErr(inMem.UpdateValue(dynamicconfig.EnableConsistentQueryByDomain, true)) + panicIfErr(inMem.UpdateValue(dynamicconfig.ReplicationTaskProcessorHostQPS, float64(10000))) + panicIfErr(inMem.UpdateValue(dynamicconfig.ReplicationTaskProcessorShardQPS, float64(10000))) + panicIfErr(inMem.UpdateValue(dynamicconfig.ReplicationTaskProcessorStartWait, time.Nanosecond)) + panicIfErr(inMem.UpdateValue(dynamicconfig.EnableActivityLocalDispatchByDomain, true)) + panicIfErr(inMem.UpdateValue(dynamicconfig.MaxActivityCountDispatchByDomain, 0)) + panicIfErr(inMem.UpdateValue(dynamicconfig.EnableCrossClusterOperations, true)) + panicIfErr(inMem.UpdateValue(dynamicconfig.NormalDecisionScheduleToStartMaxAttempts, 3)) dc := dynamicconfig.NewCollection(inMem, log.NewNoop()) config := New(dc, shardNumber, config.StoreTypeCassandra, false) // reduce the duration of long poll to increase test speed diff --git a/service/history/execution/history_builder_test.go b/service/history/execution/history_builder_test.go index 90ebb8136e6..5fbe392ad7d 100644 --- a/service/history/execution/history_builder_test.go +++ b/service/history/execution/history_builder_test.go @@ -1007,7 +1007,7 @@ func (s *historyBuilderSuite) addDecisionTaskCompletedEvent( event, err := s.msBuilder.AddDecisionTaskCompletedEvent(scheduleID, startedID, &types.RespondDecisionTaskCompletedRequest{ ExecutionContext: context, Identity: identity, - }, config.DefaultHistoryMaxAutoResetPoints) + }, common.DefaultHistoryMaxAutoResetPoints) s.Nil(err) return event diff --git a/service/history/task/priority_assigner_test.go b/service/history/task/priority_assigner_test.go index 5abec312386..25a6379d2b6 100644 --- a/service/history/task/priority_assigner_test.go +++ b/service/history/task/priority_assigner_test.go @@ -29,12 +29,12 @@ import ( "github.com/stretchr/testify/suite" "github.com/uber-go/tally" + "github.com/uber/cadence/common" "github.com/uber/cadence/common/cache" "github.com/uber/cadence/common/cluster" "github.com/uber/cadence/common/dynamicconfig" "github.com/uber/cadence/common/log" "github.com/uber/cadence/common/metrics" - "github.com/uber/cadence/common/task" "github.com/uber/cadence/common/types" "github.com/uber/cadence/service/history/config" "github.com/uber/cadence/service/history/constants" @@ -66,9 +66,12 @@ func (s *taskPriorityAssignerSuite) SetupTest() { s.mockDomainCache = cache.NewMockDomainCache(s.controller) s.testTaskProcessRPS = 10 - dc := dynamicconfig.NewNopCollection() + client := dynamicconfig.NewInMemoryClient() + err := client.UpdateValue(dynamicconfig.TaskProcessRPS, s.testTaskProcessRPS) + s.NoError(err) + dc := dynamicconfig.NewCollection(client, log.NewNoop()) s.config = config.NewForTest() - s.config.TaskProcessRPS = dc.GetIntPropertyFilteredByDomain(dynamicconfig.TaskProcessRPS, s.testTaskProcessRPS) + s.config.TaskProcessRPS = dc.GetIntPropertyFilteredByDomain(dynamicconfig.TaskProcessRPS) s.priorityAssigner = NewPriorityAssigner( cluster.TestCurrentClusterName, @@ -141,8 +144,8 @@ func (s *taskPriorityAssignerSuite) TestGetDomainInfo_Fail_UnknownError() { func (s *taskPriorityAssignerSuite) TestAssign_ReplicationTask() { mockTask := NewMockTask(s.controller) mockTask.EXPECT().GetQueueType().Return(QueueTypeReplication).Times(1) - mockTask.EXPECT().Priority().Return(task.NoPriority).Times(1) - mockTask.EXPECT().SetPriority(task.GetTaskPriority(task.LowPriorityClass, task.DefaultPrioritySubclass)).Times(1) + mockTask.EXPECT().Priority().Return(common.NoPriority).Times(1) + mockTask.EXPECT().SetPriority(common.GetTaskPriority(common.LowPriorityClass, common.DefaultPrioritySubclass)).Times(1) err := s.priorityAssigner.Assign(mockTask) s.NoError(err) @@ -158,8 +161,8 @@ func (s *taskPriorityAssignerSuite) TestAssign_StandbyTask_StandbyDomain() { mockTask := NewMockTask(s.controller) mockTask.EXPECT().GetQueueType().Return(QueueTypeStandbyTransfer).AnyTimes() mockTask.EXPECT().GetDomainID().Return(constants.TestDomainID).Times(1) - mockTask.EXPECT().Priority().Return(task.NoPriority).Times(1) - mockTask.EXPECT().SetPriority(task.GetTaskPriority(task.LowPriorityClass, task.DefaultPrioritySubclass)).Times(1) + mockTask.EXPECT().Priority().Return(common.NoPriority).Times(1) + mockTask.EXPECT().SetPriority(common.GetTaskPriority(common.LowPriorityClass, common.DefaultPrioritySubclass)).Times(1) err := s.priorityAssigner.Assign(mockTask) s.NoError(err) @@ -171,8 +174,8 @@ func (s *taskPriorityAssignerSuite) TestAssign_StandbyTask_ActiveDomain() { mockTask := NewMockTask(s.controller) mockTask.EXPECT().GetQueueType().Return(QueueTypeStandbyTransfer).AnyTimes() mockTask.EXPECT().GetDomainID().Return(constants.TestDomainID).Times(1) - mockTask.EXPECT().Priority().Return(task.NoPriority).Times(1) - mockTask.EXPECT().SetPriority(task.GetTaskPriority(task.HighPriorityClass, task.DefaultPrioritySubclass)).Times(1) + mockTask.EXPECT().Priority().Return(common.NoPriority).Times(1) + mockTask.EXPECT().SetPriority(common.GetTaskPriority(common.HighPriorityClass, common.DefaultPrioritySubclass)).Times(1) err := s.priorityAssigner.Assign(mockTask) s.NoError(err) @@ -188,8 +191,8 @@ func (s *taskPriorityAssignerSuite) TestAssign_ActiveTask_StandbyDomain() { mockTask := NewMockTask(s.controller) mockTask.EXPECT().GetQueueType().Return(QueueTypeActiveTimer).AnyTimes() mockTask.EXPECT().GetDomainID().Return(constants.TestDomainID).Times(1) - mockTask.EXPECT().Priority().Return(task.NoPriority).Times(1) - mockTask.EXPECT().SetPriority(task.GetTaskPriority(task.HighPriorityClass, task.DefaultPrioritySubclass)).Times(1) + mockTask.EXPECT().Priority().Return(common.NoPriority).Times(1) + mockTask.EXPECT().SetPriority(common.GetTaskPriority(common.HighPriorityClass, common.DefaultPrioritySubclass)).Times(1) err := s.priorityAssigner.Assign(mockTask) s.NoError(err) @@ -201,8 +204,8 @@ func (s *taskPriorityAssignerSuite) TestAssign_ActiveTransferTask_ActiveDomain() mockTask := NewMockTask(s.controller) mockTask.EXPECT().GetQueueType().Return(QueueTypeActiveTransfer).AnyTimes() mockTask.EXPECT().GetDomainID().Return(constants.TestDomainID).Times(1) - mockTask.EXPECT().Priority().Return(task.NoPriority).Times(1) - mockTask.EXPECT().SetPriority(task.GetTaskPriority(task.HighPriorityClass, task.DefaultPrioritySubclass)).Times(1) + mockTask.EXPECT().Priority().Return(common.NoPriority).Times(1) + mockTask.EXPECT().SetPriority(common.GetTaskPriority(common.HighPriorityClass, common.DefaultPrioritySubclass)).Times(1) err := s.priorityAssigner.Assign(mockTask) s.NoError(err) @@ -214,8 +217,8 @@ func (s *taskPriorityAssignerSuite) TestAssign_ActiveTimerTask_ActiveDomain() { mockTask := NewMockTask(s.controller) mockTask.EXPECT().GetQueueType().Return(QueueTypeActiveTimer).AnyTimes() mockTask.EXPECT().GetDomainID().Return(constants.TestDomainID).Times(1) - mockTask.EXPECT().Priority().Return(task.NoPriority).Times(1) - mockTask.EXPECT().SetPriority(task.GetTaskPriority(task.HighPriorityClass, task.DefaultPrioritySubclass)).Times(1) + mockTask.EXPECT().Priority().Return(common.NoPriority).Times(1) + mockTask.EXPECT().SetPriority(common.GetTaskPriority(common.HighPriorityClass, common.DefaultPrioritySubclass)).Times(1) err := s.priorityAssigner.Assign(mockTask) s.NoError(err) @@ -228,11 +231,11 @@ func (s *taskPriorityAssignerSuite) TestAssign_ThrottledTask() { mockTask := NewMockTask(s.controller) mockTask.EXPECT().GetQueueType().Return(QueueTypeActiveTimer).AnyTimes() mockTask.EXPECT().GetDomainID().Return(constants.TestDomainID).Times(1) - mockTask.EXPECT().Priority().Return(task.NoPriority).Times(1) + mockTask.EXPECT().Priority().Return(common.NoPriority).Times(1) if i < s.testTaskProcessRPS { - mockTask.EXPECT().SetPriority(task.GetTaskPriority(task.HighPriorityClass, task.DefaultPrioritySubclass)).Times(1) + mockTask.EXPECT().SetPriority(common.GetTaskPriority(common.HighPriorityClass, common.DefaultPrioritySubclass)).Times(1) } else { - mockTask.EXPECT().SetPriority(task.GetTaskPriority(task.DefaultPriorityClass, task.DefaultPrioritySubclass)).Times(1) + mockTask.EXPECT().SetPriority(common.GetTaskPriority(common.DefaultPriorityClass, common.DefaultPrioritySubclass)).Times(1) } err := s.priorityAssigner.Assign(mockTask) @@ -265,23 +268,23 @@ func (s *taskPriorityAssignerSuite) TestGetTaskPriority() { expectedPriority int }{ { - class: task.HighPriorityClass, - subClass: task.DefaultPrioritySubclass, + class: common.HighPriorityClass, + subClass: common.DefaultPrioritySubclass, expectedPriority: 1, }, { - class: task.DefaultPriorityClass, - subClass: task.LowPrioritySubclass, + class: common.DefaultPriorityClass, + subClass: common.LowPrioritySubclass, expectedPriority: 10, }, { - class: task.LowPriorityClass, - subClass: task.HighPrioritySubclass, + class: common.LowPriorityClass, + subClass: common.HighPrioritySubclass, expectedPriority: 16, }, } for _, tc := range testCases { - s.Equal(tc.expectedPriority, task.GetTaskPriority(tc.class, tc.subClass)) + s.Equal(tc.expectedPriority, common.GetTaskPriority(tc.class, tc.subClass)) } } diff --git a/service/history/testing/events_util.go b/service/history/testing/events_util.go index a10055a1cb4..498bd72c442 100644 --- a/service/history/testing/events_util.go +++ b/service/history/testing/events_util.go @@ -24,7 +24,6 @@ import ( "github.com/uber/cadence/common" "github.com/uber/cadence/common/persistence" "github.com/uber/cadence/common/types" - "github.com/uber/cadence/service/history/config" "github.com/uber/cadence/service/history/constants" "github.com/uber/cadence/service/history/execution" ) @@ -124,7 +123,7 @@ func AddDecisionTaskCompletedEvent( event, _ := builder.AddDecisionTaskCompletedEvent(scheduleID, startedID, &types.RespondDecisionTaskCompletedRequest{ ExecutionContext: context, Identity: identity, - }, config.DefaultHistoryMaxAutoResetPoints) + }, common.DefaultHistoryMaxAutoResetPoints) builder.FlushBufferedEvents() //nolint:errcheck diff --git a/service/worker/service.go b/service/worker/service.go index 332b6ddb976..9b486b6afc4 100644 --- a/service/worker/service.go +++ b/service/worker/service.go @@ -196,7 +196,7 @@ func NewConfig(params *resource.Params) *Config { IndexerConcurrency: dc.GetIntProperty(dynamicconfig.WorkerIndexerConcurrency), ESProcessorNumOfWorkers: dc.GetIntProperty(dynamicconfig.WorkerESProcessorNumOfWorkers), ESProcessorBulkActions: dc.GetIntProperty(dynamicconfig.WorkerESProcessorBulkActions), - ESProcessorBulkSize: dc.GetIntProperty(dynamicconfig.WorkerESProcessorBulkSize), // 16MB + ESProcessorBulkSize: dc.GetIntProperty(dynamicconfig.WorkerESProcessorBulkSize), ESProcessorFlushInterval: dc.GetDurationProperty(dynamicconfig.WorkerESProcessorFlushInterval), ValidSearchAttributes: dc.GetMapProperty(dynamicconfig.ValidSearchAttributes), } diff --git a/tools/cli/adminConfigStoreCommands.go b/tools/cli/adminConfigStoreCommands.go index d4e4eb33314..e59dd1fb375 100644 --- a/tools/cli/adminConfigStoreCommands.go +++ b/tools/cli/adminConfigStoreCommands.go @@ -185,7 +185,9 @@ func AdminListDynamicConfig(c *cli.Context) { ctx, cancel := newContext(c) defer cancel() - req := &types.ListDynamicConfigRequest{} + req := &types.ListDynamicConfigRequest{ + ConfigName: "", // empty string means all config values + } val, err := adminClient.ListDynamicConfig(ctx, req) if err != nil { diff --git a/tools/cli/domainUtils.go b/tools/cli/domainUtils.go index c9029bf5c50..1cd1ebd28c3 100644 --- a/tools/cli/domainUtils.go +++ b/tools/cli/domainUtils.go @@ -277,7 +277,7 @@ func initializeDomainHandler( domainConfig := domain.Config{ MinRetentionDays: dynamicconfig.GetIntPropertyFn(dynamicconfig.MinRetentionDays.DefaultInt()), - MaxBadBinaryCount: dynamicconfig.GetIntPropertyFilteredByDomain(dynamicconfig.MaxRetentionDays.DefaultInt()), + MaxBadBinaryCount: dynamicconfig.GetIntPropertyFilteredByDomain(dynamicconfig.FrontendMaxBadBinaries.DefaultInt()), FailoverCoolDown: dynamicconfig.GetDurationPropertyFnFilteredByDomain(dynamicconfig.FrontendFailoverCoolDown.DefaultDuration()), } return domain.NewHandler(