Skip to content

Commit

Permalink
fixing config name
Browse files Browse the repository at this point in the history
Signed-off-by: Chengxuan Xing <chengxuan.xing@kaleido.io>
  • Loading branch information
Chengxuan committed Aug 13, 2024
1 parent e01f542 commit 8d5a8ed
Show file tree
Hide file tree
Showing 3 changed files with 11 additions and 11 deletions.
10 changes: 5 additions & 5 deletions internal/signermsgs/en_config_descriptions.go
Original file line number Diff line number Diff line change
Expand Up @@ -54,9 +54,9 @@ var (
ConfigBackendURL = ffc("config.backend.url", "URL for the backend JSON/RPC server / blockchain node", "url")
ConfigBackendProxyURL = ffc("config.backend.proxy.url", "Optional HTTP proxy URL", "url")

ConfigRPCBatchMaxConcurrentRequest = ffc("config.maxConcurrentRequest", "The maximum number of concurrent JSON-RPC requests get processed at a time", i18n.IntType)
ConfigRPCBatchEnabled = ffc("config.batch.enabled", "Whether to enable batching JSON-RPC requests", i18n.BooleanType)
ConfigRPCBatchSize = ffc("config.batch.size", "When the amount of queued requests reaches this number, they will be batched and dispatched", i18n.IntType)
ConfigRPCBatchTimeout = ffc("config.batch.timeout", "When the time since the first request was queued reaches this timeout, all requests in the queue will be batched and dispatched", i18n.TimeDurationType)
ConfigRPCBatchDispatchConcurrency = ffc("config.batch.dispatchConcurrency", "The maximum number of concurrent batch dispatching process", i18n.IntType)
ConfigRPCBatchMaxConcurrentRequests = ffc("config.maxConcurrentRequests", "The maximum number of concurrent JSON-RPC requests get processed at a time", i18n.IntType)
ConfigRPCBatchEnabled = ffc("config.batch.enabled", "Whether to enable batching JSON-RPC requests", i18n.BooleanType)
ConfigRPCBatchSize = ffc("config.batch.size", "When the amount of queued requests reaches this number, they will be batched and dispatched", i18n.IntType)
ConfigRPCBatchTimeout = ffc("config.batch.timeout", "When the time since the first request was queued reaches this timeout, all requests in the queue will be batched and dispatched", i18n.TimeDurationType)
ConfigRPCBatchDispatchConcurrency = ffc("config.batch.dispatchConcurrency", "The maximum number of concurrent batch dispatching process", i18n.IntType)
)
6 changes: 3 additions & 3 deletions pkg/rpcbackend/backend_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -680,7 +680,7 @@ func TestBatchRequestsOKWithBatchSize(t *testing.T) {
rpcConfig := config.RootSection("unittest")
InitConfig(rpcConfig)
rpcConfig.Set(ConfigBatchEnabled, true)
rpcConfig.Set(ConfigMaxConcurrentRequest, 10)
rpcConfig.Set(ConfigMaxConcurrentRequests, 10)
rpcConfig.Set(ConfigBatchTimeout, "2h") // very long delay, so need to rely on batch size to be hit for sending a batch
rpcConfig.Set(ConfigBatchSize, 2)

Expand Down Expand Up @@ -764,7 +764,7 @@ func TestBatchRequestsTestWorkerCounts(t *testing.T) {
rpcConfig := config.RootSection("unittest")
InitConfig(rpcConfig)
rpcConfig.Set(ConfigBatchEnabled, true)
rpcConfig.Set(ConfigMaxConcurrentRequest, 10)
rpcConfig.Set(ConfigMaxConcurrentRequests, 10)
rpcConfig.Set(ConfigBatchTimeout, "2h") // very long delay, so need to rely on batch size to be hit for sending a batch
rpcConfig.Set(ConfigBatchSize, 2)
rpcConfig.Set(ConfigBatchMaxDispatchConcurrency, 1)
Expand Down Expand Up @@ -879,7 +879,7 @@ func TestBatchRequestsOKWithBatchDelay(t *testing.T) {
rpcConfig := config.RootSection("ut_fs_config")
InitConfig(rpcConfig)
rpcConfig.Set(ConfigBatchEnabled, true)
rpcConfig.Set(ConfigMaxConcurrentRequest, 10)
rpcConfig.Set(ConfigMaxConcurrentRequests, 10)
rpcConfig.Set(ConfigBatchTimeout, "100ms") // very long delay, so need to rely on batch size to be hit for sending a batch
rpcConfig.Set(ConfigBatchSize, 2000)
rpcConfig.Set(ConfigBatchMaxDispatchConcurrency, 1)
Expand Down
6 changes: 3 additions & 3 deletions pkg/rpcbackend/config.go
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ import (

const (
// ConfigMaxRequestConcurrency the maximum number of concurrent JSON-RPC requests get processed at a time
ConfigMaxConcurrentRequest = "maxConcurrentRequest"
ConfigMaxConcurrentRequests = "maxConcurrentRequests"
// ConfigBatchEnabled whether to enable batching JSON-RPC requests: https://www.jsonrpc.org/specification#batch
ConfigBatchEnabled = "batch.enabled"
// ConfigBatchSize when the amount of queued requests reaches this number, they will be batched and dispatched
Expand Down Expand Up @@ -57,15 +57,15 @@ type RPCClientOptions struct {

func InitConfig(section config.Section) {
section.AddKnownKey(ConfigBatchEnabled, false)
section.AddKnownKey(ConfigMaxConcurrentRequest, 0)
section.AddKnownKey(ConfigMaxConcurrentRequests, 0)
section.AddKnownKey(ConfigBatchSize, DefaultConfigBatchSize)
section.AddKnownKey(ConfigBatchTimeout, DefaultConfigTimeout)
section.AddKnownKey(ConfigBatchMaxDispatchConcurrency, DefaultConfigDispatchConcurrency)
}

func ReadConfig(batchDispatcherContext context.Context, section config.Section) RPCClientOptions {
return RPCClientOptions{
MaxConcurrentRequest: section.GetInt64(ConfigMaxConcurrentRequest),
MaxConcurrentRequest: section.GetInt64(ConfigMaxConcurrentRequests),
BatchOptions: &RPCClientBatchOptions{
Enabled: section.GetBool(ConfigBatchEnabled),
BatchTimeout: section.GetDuration(ConfigBatchTimeout),
Expand Down

0 comments on commit 8d5a8ed

Please sign in to comment.