mirror of https://github.com/tikv/client-go.git
support grpc's shard buffer pool (#1132)
Signed-off-by: Weizhen Wang <wangweizhen@pingcap.com>
This commit is contained in:
parent
99ea0d4d9b
commit
f90605363e
|
|
@ -59,6 +59,8 @@ type TiKVClient struct {
|
|||
GrpcKeepAliveTimeout uint `toml:"grpc-keepalive-timeout" json:"grpc-keepalive-timeout"`
|
||||
// GrpcCompressionType is the compression type for gRPC channel: none or gzip.
|
||||
GrpcCompressionType string `toml:"grpc-compression-type" json:"grpc-compression-type"`
|
||||
// GrpcSharedBufferPool is the flag to control whether to share the buffer pool in the TiKV gRPC clients.
|
||||
GrpcSharedBufferPool bool `toml:"grpc-shared-buffer-pool" json:"ggrpc-shared-buffer-pool"`
|
||||
// CommitTimeout is the max time which command 'commit' will wait.
|
||||
CommitTimeout string `toml:"commit-timeout" json:"commit-timeout"`
|
||||
AsyncCommit AsyncCommit `toml:"async-commit" json:"async-commit"`
|
||||
|
|
@ -125,6 +127,7 @@ func DefaultTiKVClient() TiKVClient {
|
|||
GrpcKeepAliveTime: 10,
|
||||
GrpcKeepAliveTimeout: 3,
|
||||
GrpcCompressionType: "none",
|
||||
GrpcSharedBufferPool: false,
|
||||
CommitTimeout: "41s",
|
||||
AsyncCommit: AsyncCommit{
|
||||
// FIXME: Find an appropriate default limit.
|
||||
|
|
|
|||
|
|
@ -69,6 +69,7 @@ import (
|
|||
"google.golang.org/grpc/credentials"
|
||||
"google.golang.org/grpc/credentials/insecure"
|
||||
"google.golang.org/grpc/encoding/gzip"
|
||||
"google.golang.org/grpc/experimental"
|
||||
"google.golang.org/grpc/keepalive"
|
||||
"google.golang.org/grpc/metadata"
|
||||
)
|
||||
|
|
@ -286,7 +287,9 @@ func (a *connArray) Init(addr string, security config.Security, idleNotify *uint
|
|||
Timeout: time.Duration(keepAliveTimeout) * time.Second,
|
||||
}),
|
||||
}, opts...)
|
||||
|
||||
if cfg.TiKVClient.GrpcSharedBufferPool {
|
||||
opts = append(opts, experimental.WithRecvBufferPool(grpc.NewSharedBufferPool()))
|
||||
}
|
||||
conn, err := a.monitoredDial(
|
||||
ctx,
|
||||
fmt.Sprintf("%s-%d", a.target, i),
|
||||
|
|
|
|||
Loading…
Reference in New Issue