mirror of https://github.com/tikv/client-java.git
Merge branch 'master' into discard-row-checksum
This commit is contained in:
commit
eecfdcf994
|
@ -44,23 +44,20 @@ jobs:
|
||||||
- name: Start TiUP Playground
|
- name: Start TiUP Playground
|
||||||
run: |
|
run: |
|
||||||
# Start TiKV in APIV1TTL
|
# Start TiKV in APIV1TTL
|
||||||
/home/runner/.tiup/bin/tiup playground ${{ matrix.tikv_version }} --mode tikv-slim --kv 1 --without-monitor --kv.config /home/runner/work/client-java/client-java/.github/config/tikv_rawkv.toml --pd.config /home/runner/work/client-java/client-java/.github/config/pd.toml &> raw.out 2>&1 &
|
/home/runner/.tiup/bin/tiup playground ${{ matrix.tikv_version }} --mode tikv-slim --kv 1 --without-monitor --kv.config /home/runner/work/client-java/client-java/.github/config/tikv_rawkv.toml --pd.config /home/runner/work/client-java/client-java/.github/config/pd.toml --pd.port 2379 2>&1 &
|
||||||
|
|
||||||
# The first run of `tiup` has to download all components so it'll take longer.
|
# The first run of `tiup` has to download all components so it'll take longer.
|
||||||
sleep 1m 30s
|
sleep 1m 30s
|
||||||
|
|
||||||
# Start TiKV in APIV1
|
# Start TiKV in APIV1
|
||||||
/home/runner/.tiup/bin/tiup playground ${{ matrix.tikv_version }} --mode tikv-slim --kv 1 --without-monitor --kv.config /home/runner/work/client-java/client-java/.github/config/tikv_txnkv.toml --pd.config /home/runner/work/client-java/client-java/.github/config/pd.toml &> txn.out 2>&1 &
|
/home/runner/.tiup/bin/tiup playground ${{ matrix.tikv_version }} --mode tikv-slim --kv 1 --without-monitor --kv.config /home/runner/work/client-java/client-java/.github/config/tikv_txnkv.toml --pd.config /home/runner/work/client-java/client-java/.github/config/pd.toml --pd.port 2381 2>&1 &
|
||||||
|
|
||||||
sleep 30s
|
sleep 30s
|
||||||
|
|
||||||
# Parse PD address from `tiup` output
|
# Get PD address
|
||||||
echo "RAWKV_PD_ADDRESSES=$(cat raw.out | grep -oP '(?<=PD client endpoints: \[)[0-9\.:]+(?=\])')" >> $GITHUB_ENV
|
echo "RAWKV_PD_ADDRESSES=127.0.0.1:2379" >> $GITHUB_ENV
|
||||||
echo "TXNKV_PD_ADDRESSES=$(cat txn.out | grep -oP '(?<=PD client endpoints: \[)[0-9\.:]+(?=\])')" >> $GITHUB_ENV
|
echo "TXNKV_PD_ADDRESSES=127.0.0.1:2381" >> $GITHUB_ENV
|
||||||
|
|
||||||
# Log the output
|
|
||||||
echo "$(cat raw.out)" >&2
|
|
||||||
echo "$(cat txn.out)" >&2
|
|
||||||
- name: Run Integration Test
|
- name: Run Integration Test
|
||||||
run: mvn clean test
|
run: mvn clean test
|
||||||
- name: Upload coverage
|
- name: Upload coverage
|
||||||
|
|
4
pom.xml
4
pom.xml
|
@ -3,7 +3,7 @@
|
||||||
<modelVersion>4.0.0</modelVersion>
|
<modelVersion>4.0.0</modelVersion>
|
||||||
<groupId>org.tikv</groupId>
|
<groupId>org.tikv</groupId>
|
||||||
<artifactId>tikv-client-java</artifactId>
|
<artifactId>tikv-client-java</artifactId>
|
||||||
<version>3.3.0-SNAPSHOT</version>
|
<version>3.3.4-SNAPSHOT</version>
|
||||||
<packaging>jar</packaging>
|
<packaging>jar</packaging>
|
||||||
<name>TiKV Java Client</name>
|
<name>TiKV Java Client</name>
|
||||||
<description>A Java Client for TiKV</description>
|
<description>A Java Client for TiKV</description>
|
||||||
|
@ -62,7 +62,7 @@
|
||||||
<gson.version>2.8.9</gson.version>
|
<gson.version>2.8.9</gson.version>
|
||||||
<powermock.version>1.6.6</powermock.version>
|
<powermock.version>1.6.6</powermock.version>
|
||||||
<jackson-annotations.version>2.13.2</jackson-annotations.version>
|
<jackson-annotations.version>2.13.2</jackson-annotations.version>
|
||||||
<jackson.version>2.13.2.2</jackson.version>
|
<jackson.version>2.13.4.2</jackson.version>
|
||||||
<trove4j.version>3.0.1</trove4j.version>
|
<trove4j.version>3.0.1</trove4j.version>
|
||||||
<jetcd.version>0.4.1</jetcd.version>
|
<jetcd.version>0.4.1</jetcd.version>
|
||||||
<joda-time.version>2.9.9</joda-time.version>
|
<joda-time.version>2.9.9</joda-time.version>
|
||||||
|
|
|
@ -177,8 +177,13 @@ public class RegionManager {
|
||||||
Pair<Metapb.Region, Metapb.Peer> regionAndLeader = pdClient.getRegionByKey(backOffer, key);
|
Pair<Metapb.Region, Metapb.Peer> regionAndLeader = pdClient.getRegionByKey(backOffer, key);
|
||||||
region =
|
region =
|
||||||
cache.putRegion(createRegion(regionAndLeader.first, regionAndLeader.second, backOffer));
|
cache.putRegion(createRegion(regionAndLeader.first, regionAndLeader.second, backOffer));
|
||||||
|
logger.debug(
|
||||||
|
String.format(
|
||||||
|
"get region id: %d with leader: %d",
|
||||||
|
region.getId(), region.getLeader().getStoreId()));
|
||||||
}
|
}
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
|
logger.warn("Get region failed: ", e);
|
||||||
return null;
|
return null;
|
||||||
} finally {
|
} finally {
|
||||||
requestTimer.observeDuration();
|
requestTimer.observeDuration();
|
||||||
|
@ -228,17 +233,31 @@ public class RegionManager {
|
||||||
|
|
||||||
TiStore store = null;
|
TiStore store = null;
|
||||||
if (storeType == TiStoreType.TiKV) {
|
if (storeType == TiStoreType.TiKV) {
|
||||||
Peer peer = region.getCurrentReplica();
|
// check from the first replica in case it recovers
|
||||||
|
List<Peer> replicaList = region.getReplicaList();
|
||||||
|
for (int i = 0; i < replicaList.size(); i++) {
|
||||||
|
Peer peer = replicaList.get(i);
|
||||||
store = getStoreById(peer.getStoreId(), backOffer);
|
store = getStoreById(peer.getStoreId(), backOffer);
|
||||||
|
if (store.isReachable()) {
|
||||||
|
// update replica's index
|
||||||
|
region.setReplicaIdx(i);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
logger.info("Store {} is unreachable, try to get the next replica", peer.getStoreId());
|
||||||
|
}
|
||||||
|
// Does not set unreachable store to null in case it is incompatible with GrpcForward
|
||||||
|
if (store == null || !store.isReachable()) {
|
||||||
|
logger.warn("No TiKV store available for region: " + region);
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
List<TiStore> tiflashStores = new ArrayList<>();
|
List<TiStore> tiflashStores = new ArrayList<>();
|
||||||
for (Peer peer : region.getLearnerList()) {
|
for (Peer peer : region.getLearnerList()) {
|
||||||
TiStore s = getStoreById(peer.getStoreId(), backOffer);
|
TiStore s = getStoreById(peer.getStoreId(), backOffer);
|
||||||
for (Metapb.StoreLabel label : s.getStore().getLabelsList()) {
|
if (!s.isReachable()) {
|
||||||
if (label.getKey().equals(storeType.getLabelKey())
|
continue;
|
||||||
&& label.getValue().equals(storeType.getLabelValue())) {
|
|
||||||
tiflashStores.add(s);
|
|
||||||
}
|
}
|
||||||
|
if (s.isTiFlash()) {
|
||||||
|
tiflashStores.add(s);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// select a tiflash with Round-Robin strategy
|
// select a tiflash with Round-Robin strategy
|
||||||
|
|
|
@ -20,17 +20,22 @@ import io.grpc.ManagedChannel;
|
||||||
import io.grpc.health.v1.HealthCheckRequest;
|
import io.grpc.health.v1.HealthCheckRequest;
|
||||||
import io.grpc.health.v1.HealthCheckResponse;
|
import io.grpc.health.v1.HealthCheckResponse;
|
||||||
import io.grpc.health.v1.HealthGrpc;
|
import io.grpc.health.v1.HealthGrpc;
|
||||||
|
import io.grpc.stub.ClientCalls;
|
||||||
import java.util.LinkedList;
|
import java.util.LinkedList;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.concurrent.BlockingQueue;
|
import java.util.concurrent.BlockingQueue;
|
||||||
import java.util.concurrent.LinkedBlockingQueue;
|
import java.util.concurrent.LinkedBlockingQueue;
|
||||||
import java.util.concurrent.TimeUnit;
|
import java.util.concurrent.TimeUnit;
|
||||||
|
import java.util.function.Supplier;
|
||||||
import org.slf4j.Logger;
|
import org.slf4j.Logger;
|
||||||
import org.slf4j.LoggerFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
import org.tikv.common.ReadOnlyPDClient;
|
import org.tikv.common.ReadOnlyPDClient;
|
||||||
import org.tikv.common.util.ChannelFactory;
|
import org.tikv.common.util.ChannelFactory;
|
||||||
import org.tikv.common.util.ConcreteBackOffer;
|
import org.tikv.common.util.ConcreteBackOffer;
|
||||||
import org.tikv.kvproto.Metapb;
|
import org.tikv.kvproto.Metapb;
|
||||||
|
import org.tikv.kvproto.Mpp;
|
||||||
|
import org.tikv.kvproto.Mpp.IsAliveRequest;
|
||||||
|
import org.tikv.kvproto.TikvGrpc;
|
||||||
|
|
||||||
public class StoreHealthyChecker implements Runnable {
|
public class StoreHealthyChecker implements Runnable {
|
||||||
private static final Logger logger = LoggerFactory.getLogger(StoreHealthyChecker.class);
|
private static final Logger logger = LoggerFactory.getLogger(StoreHealthyChecker.class);
|
||||||
|
@ -75,6 +80,30 @@ public class StoreHealthyChecker implements Runnable {
|
||||||
|
|
||||||
private boolean checkStoreHealth(TiStore store) {
|
private boolean checkStoreHealth(TiStore store) {
|
||||||
String addressStr = store.getStore().getAddress();
|
String addressStr = store.getStore().getAddress();
|
||||||
|
if (store.isTiFlash()) {
|
||||||
|
return checkTiFlashHealth(addressStr);
|
||||||
|
}
|
||||||
|
return checkTiKVHealth(addressStr);
|
||||||
|
}
|
||||||
|
|
||||||
|
private boolean checkTiFlashHealth(String addressStr) {
|
||||||
|
try {
|
||||||
|
ManagedChannel channel = channelFactory.getChannel(addressStr, pdClient.getHostMapping());
|
||||||
|
TikvGrpc.TikvBlockingStub stub =
|
||||||
|
TikvGrpc.newBlockingStub(channel).withDeadlineAfter(timeout, TimeUnit.MILLISECONDS);
|
||||||
|
Supplier<IsAliveRequest> factory = () -> Mpp.IsAliveRequest.newBuilder().build();
|
||||||
|
Mpp.IsAliveResponse resp =
|
||||||
|
ClientCalls.blockingUnaryCall(
|
||||||
|
stub.getChannel(), TikvGrpc.getIsAliveMethod(), stub.getCallOptions(), factory.get());
|
||||||
|
return resp != null && resp.getAvailable();
|
||||||
|
} catch (Exception e) {
|
||||||
|
logger.info(
|
||||||
|
"fail to check TiFlash health, regard as unhealthy. TiFlash address: " + addressStr, e);
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private boolean checkTiKVHealth(String addressStr) {
|
||||||
try {
|
try {
|
||||||
ManagedChannel channel = channelFactory.getChannel(addressStr, pdClient.getHostMapping());
|
ManagedChannel channel = channelFactory.getChannel(addressStr, pdClient.getHostMapping());
|
||||||
HealthGrpc.HealthBlockingStub stub =
|
HealthGrpc.HealthBlockingStub stub =
|
||||||
|
@ -83,6 +112,7 @@ public class StoreHealthyChecker implements Runnable {
|
||||||
HealthCheckResponse resp = stub.check(req);
|
HealthCheckResponse resp = stub.check(req);
|
||||||
return resp.getStatus() == HealthCheckResponse.ServingStatus.SERVING;
|
return resp.getStatus() == HealthCheckResponse.ServingStatus.SERVING;
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
|
logger.info("fail to check TiKV health, regard as unhealthy. TiKV address: " + addressStr, e);
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -126,6 +126,14 @@ public class TiRegion implements Serializable {
|
||||||
return getCurrentReplica();
|
return getCurrentReplica();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public void setReplicaIdx(int idx) {
|
||||||
|
replicaIdx = idx;
|
||||||
|
}
|
||||||
|
|
||||||
|
public List<Peer> getReplicaList() {
|
||||||
|
return replicaList;
|
||||||
|
}
|
||||||
|
|
||||||
private boolean isLeader(Peer peer) {
|
private boolean isLeader(Peer peer) {
|
||||||
return getLeader().equals(peer);
|
return getLeader().equals(peer);
|
||||||
}
|
}
|
||||||
|
|
|
@ -105,4 +105,14 @@ public class TiStore implements Serializable {
|
||||||
public long getId() {
|
public long getId() {
|
||||||
return this.store.getId();
|
return this.store.getId();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public boolean isTiFlash() {
|
||||||
|
for (Metapb.StoreLabel label : store.getLabelsList()) {
|
||||||
|
if (label.getKey().equals(TiStoreType.TiFlash.getLabelKey())
|
||||||
|
&& label.getValue().equals(TiStoreType.TiFlash.getLabelValue())) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in New Issue