mirror of https://github.com/tikv/client-java.git
Initial java client for TiKV
This commit is contained in:
commit
47af16be9e
|
|
@ -0,0 +1,59 @@
|
|||
#ignore idea configuration
|
||||
.idea
|
||||
*.iml
|
||||
pub.sh
|
||||
|
||||
# ignore compiled classes
|
||||
target
|
||||
|
||||
# ignore version info
|
||||
src/main/java/com/pingcap/tikv/TiVersion.java
|
||||
|
||||
# ignore eclipse configuration
|
||||
.project
|
||||
|
||||
# ignore maven related files
|
||||
dependency-reduced-pom.xml
|
||||
# ignore emacs configuation
|
||||
.meghanada
|
||||
# Compiled class file
|
||||
*.class
|
||||
|
||||
# Log file
|
||||
*.log
|
||||
|
||||
# BlueJ files
|
||||
*.ctxt
|
||||
|
||||
# Mobile Tools for Java (J2ME)
|
||||
.mtj.tmp/
|
||||
|
||||
# Package Files #
|
||||
*.jar
|
||||
*.war
|
||||
*.ear
|
||||
*.zip
|
||||
*.tar.gz
|
||||
*.rar
|
||||
|
||||
# virtual machine crash logs, see http://www.java.com/en/download/help/error_hotspot.xml
|
||||
hs_err_pid*
|
||||
|
||||
# ignore Mac files
|
||||
.DS_Store
|
||||
|
||||
# ignore gradle
|
||||
.gradle
|
||||
|
||||
metastore_db/
|
||||
.java-version
|
||||
|
||||
# ignore tikv-client proto files
|
||||
kvproto
|
||||
tipb
|
||||
proto
|
||||
raft-rs
|
||||
|
||||
# ignore tikv-client bazel build
|
||||
bazel-*
|
||||
|
||||
|
|
@ -0,0 +1,49 @@
|
|||
load(
|
||||
"@com_github_zhexuany_bazel_shade//:java_shade.bzl",
|
||||
"java_shade"
|
||||
)
|
||||
package(default_visibility = ["//visibility:public"])
|
||||
|
||||
java_binary(
|
||||
name = "tikv-java-client",
|
||||
main_class = "com.pingcap.tikv.Main",
|
||||
runtime_deps = [
|
||||
"//src/main/java/com/pingcap/tikv:tikv-java-client-lib",
|
||||
":shaded_scalding",
|
||||
],
|
||||
)
|
||||
java_shade(
|
||||
name = "shaded_args",
|
||||
input_jar = "@io_netty_netty_codec_socks//jar",
|
||||
rules = "shading_rule"
|
||||
)
|
||||
|
||||
java_import(
|
||||
name = "shaded_scalding",
|
||||
jars = ["shaded_args.jar"]
|
||||
)
|
||||
|
||||
filegroup(
|
||||
name = "protos",
|
||||
srcs = glob([
|
||||
"kvproto/proto/*.proto",
|
||||
"kvproto/_vendor/src/github.com/gogo/protobuf/gogoproto/*.proto",
|
||||
"tipb/proto/*.proto",
|
||||
]),
|
||||
)
|
||||
|
||||
load("@org_pubref_rules_protobuf//java:rules.bzl", "java_proto_library")
|
||||
|
||||
java_proto_library(
|
||||
name = "java",
|
||||
imports = [
|
||||
"external/com_google_protobuf/src/",
|
||||
"kvproto/proto",
|
||||
"kvproto/_vendor/src/github.com/gogo/protobuf",
|
||||
"tipb/proto",
|
||||
],
|
||||
inputs = ["@com_google_protobuf//:well_known_protos"],
|
||||
protos = [":protos"],
|
||||
verbose = 0, # 0=no output, 1=show protoc command, 2+ more...
|
||||
with_grpc = True,
|
||||
)
|
||||
4
LICENSE
4
LICENSE
|
|
@ -178,7 +178,7 @@
|
|||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "[]"
|
||||
boilerplate notice, with the fields enclosed by brackets "{}"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
|
|
@ -186,7 +186,7 @@
|
|||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright [yyyy] [name of copyright owner]
|
||||
Copyright {2017} {PingCap.Inc}
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
|
|
|
|||
|
|
@ -0,0 +1,6 @@
|
|||
run:
|
||||
bazel run :tikv-java-client
|
||||
uber_jar:
|
||||
bazel build :tikv-java-client_deploy.jar
|
||||
test:
|
||||
bazel test //src/test/java/com/pingcap/tikv:tikv-client-java-test --test_output=errors --test_timeout=3600
|
||||
|
|
@ -0,0 +1,189 @@
|
|||
## Ti-Client in Java [Under Construction]
|
||||
|
||||
A Java client for [TiDB](https://github.com/pingcap/tidb)/[TiKV](https://github.com/pingcap/tikv).
|
||||
It is supposed to:
|
||||
+ Communicate via [gRPC](http://www.grpc.io/)
|
||||
+ Talk to Placement Driver searching for a region
|
||||
+ Talk to TiKV for reading/writing data and the resulted data is encoded/decoded just like what we do in TiDB.
|
||||
+ Talk to Coprocessor for calculation pushdown
|
||||
|
||||
## How to build
|
||||
|
||||
The alternative way to build a usable jar for testing will be
|
||||
```
|
||||
mvn clean install -Dmaven.test.skip=true
|
||||
```
|
||||
|
||||
The following command can install dependencies for you.
|
||||
```
|
||||
mvn package
|
||||
```
|
||||
|
||||
Alternatively, you can use `bazel` for much faster build. When you try this approach, you should run `git submodule update --init --recursive` before you build project.
|
||||
|
||||
Making a uber jar:
|
||||
```
|
||||
make uber_jar
|
||||
```
|
||||
run Main class:
|
||||
```
|
||||
make run
|
||||
```
|
||||
|
||||
run test cases:
|
||||
```
|
||||
make test
|
||||
```
|
||||
|
||||
this project is designed to hook with `pd` and `tikv` which you can find in `PingCap` github page.
|
||||
|
||||
When you work with this project, you have to communicate with `pd` and `tikv`. There is a script taking care of this. By executing the following commands, `pd` and `tikv` can be executed on background.
|
||||
```
|
||||
cd scripts
|
||||
make pd
|
||||
make tikv
|
||||
```
|
||||
|
||||
## How to use for now
|
||||
Since it's not quite complete, a usage sample for now can be given is:
|
||||
```java
|
||||
// Init tidb cluster configuration
|
||||
TiConfiguration conf = TiConfiguration.createDefault("127.0.0.1:2379");
|
||||
TiSession session = TiSession.create(conf);
|
||||
Catalog cat = session.getCatalog();
|
||||
TiDBInfo db = cat.getDatabase("tpch_test");
|
||||
TiTableInfo table = cat.getTable(db, "customer");
|
||||
Snapshot snapshot = session.createSnapshot();
|
||||
|
||||
// Generate select ranges
|
||||
ByteString startKey = TableCodec.encodeRowKeyWithHandle(table.getId(), Long.MIN_VALUE);
|
||||
ByteString endKey = TableCodec.encodeRowKeyWithHandle(table.getId(), Long.MAX_VALUE);
|
||||
Coprocessor.KeyRange keyRange = Coprocessor.KeyRange.newBuilder().setStart(startKey).setEnd(endKey).build();
|
||||
List<Coprocessor.KeyRange> ranges = new ArrayList<>();
|
||||
ranges.add(keyRange);
|
||||
|
||||
|
||||
// Create select request
|
||||
TiSelectRequest selectRequest = new TiSelectRequest();
|
||||
selectRequest.addRanges(ranges);
|
||||
selectRequest.addField(TiColumnRef.create("c_mktsegment", table));
|
||||
selectRequest.setTableInfo(table);
|
||||
selectRequest.setStartTs(session.getTimestamp().getVersion());
|
||||
selectRequest.addWhere(new GreaterEqual(TiConstant.create(5), TiConstant.create(5)));
|
||||
selectRequest.addGroupByItem(TiByItem.create(TiColumnRef.create("c_mktsegment"), false));
|
||||
selectRequest.setLimit(10);
|
||||
selectRequest.bind();
|
||||
|
||||
// Fetch data
|
||||
Iterator<Row> iterator = snapshot.select(selectRequest);
|
||||
System.out.println("Show result:");
|
||||
while (iterator.hasNext()) {
|
||||
Row rowData = iterator.next();
|
||||
for (int i = 0; i < rowData.fieldCount(); i++) {
|
||||
System.out.print(rowData.get(i, null) + "\t");
|
||||
}
|
||||
System.out.println();
|
||||
}
|
||||
|
||||
```
|
||||
Result:
|
||||
```java
|
||||
Show result:
|
||||
BUILDING
|
||||
AUTOMOBILE
|
||||
MACHINERY
|
||||
HOUSEHOLD
|
||||
FURNITURE
|
||||
```
|
||||
|
||||
## Raw TiKV-Client in Java
|
||||
Java Implementation of Raw TiKV-Client
|
||||
|
||||
Demo is avaliable in [KVRawClientTest](https://github.com/birdstorm/KVRawClientTest/)
|
||||
|
||||
### Build
|
||||
```
|
||||
mvn clean install -Dmaven.test.skip=true
|
||||
```
|
||||
|
||||
### Use as maven dependency
|
||||
After building, add following lines into your `pom.xml`
|
||||
```xml
|
||||
<dependency>
|
||||
<groupId>com.pingcap.tikv</groupId>
|
||||
<artifactId>tikv-client-java</artifactId>
|
||||
<version>2.0-SNAPSHOT</version>
|
||||
</dependency>
|
||||
```
|
||||
|
||||
### Entrance
|
||||
`com.pingcap.tikv.RawKVClient`
|
||||
|
||||
### API
|
||||
|
||||
```java
|
||||
/**
|
||||
* create a RawKVClient using specific pd addresses
|
||||
*
|
||||
* @param address pd addresses(comma seperated)
|
||||
*/
|
||||
static RawKVClient create(String address)
|
||||
```
|
||||
|
||||
```java
|
||||
/**
|
||||
* Put a raw key-value pair to TiKV
|
||||
*
|
||||
* @param key raw key
|
||||
* @param value raw value
|
||||
*/
|
||||
void put(ByteString key, ByteString value)
|
||||
```
|
||||
|
||||
```java
|
||||
/**
|
||||
* Get a raw key-value pair from TiKV if key exists
|
||||
*
|
||||
* @param key raw key
|
||||
* @return a ByteString value if key exists, ByteString.EMPTY if key does not exist
|
||||
*/
|
||||
ByteString get(ByteString key)
|
||||
```
|
||||
|
||||
```java
|
||||
/**
|
||||
* Scan raw key-value pairs from TiKV in range [startKey, endKey)
|
||||
*
|
||||
* @param startKey raw start key, inclusive
|
||||
* @param endKey raw end key, exclusive
|
||||
* @return list of key-value pairs in range
|
||||
*/
|
||||
List<Kvrpcpb.KvPair> scan(ByteString startKey, ByteString endKey)
|
||||
```
|
||||
|
||||
```java
|
||||
/**
|
||||
* Scan raw key-value pairs from TiKV in range [startKey, endKey)
|
||||
*
|
||||
* @param startKey raw start key, inclusive
|
||||
* @param limit limit of key-value pairs
|
||||
* @return list of key-value pairs in range
|
||||
*/
|
||||
List<Kvrpcpb.KvPair> scan(ByteString startKey, int limit)
|
||||
```
|
||||
|
||||
```java
|
||||
/**
|
||||
* Delete a raw key-value pair from TiKV if key exists
|
||||
*
|
||||
* @param key raw key to be deleted
|
||||
*/
|
||||
void delete(ByteString key)
|
||||
```
|
||||
|
||||
|
||||
## TODO
|
||||
Contributions are welcomed. Here is a [TODO](https://github.com/pingcap/tikv-client-java/wiki/TODO-Lists) and you might contact maxiaoyu@pingcap.com if needed.
|
||||
|
||||
## License
|
||||
Apache 2.0 license. See the [LICENSE](./LICENSE) file for details.
|
||||
|
|
@ -0,0 +1,112 @@
|
|||
maven_jar(
|
||||
name = "com_fasterxml_jackson_core_jackson_annotations",
|
||||
artifact = "com.fasterxml.jackson.core:jackson-annotations:2.6.6",
|
||||
)
|
||||
|
||||
maven_jar(
|
||||
name = "com_fasterxml_jackson_core_jackson_databind",
|
||||
artifact = "com.fasterxml.jackson.core:jackson-databind:2.6.6",
|
||||
)
|
||||
|
||||
maven_jar(
|
||||
name = "com_fasterxml_jackson_core_jackson_core",
|
||||
artifact = "com.fasterxml.jackson.core:jackson-core:2.6.6",
|
||||
)
|
||||
|
||||
maven_jar(
|
||||
name = "org_slf4j_slf4j_api",
|
||||
artifact = "org.slf4j:slf4j-api:1.7.16",
|
||||
)
|
||||
|
||||
maven_jar(
|
||||
name = "org_slf4j_jcl_over_slf4j",
|
||||
artifact = "org.slf4j:jcl-over-slf4j:1.7.16",
|
||||
)
|
||||
|
||||
maven_jar(
|
||||
name = "org_slf4j_jul_to_slf4j",
|
||||
artifact = "org.slf4j:jul-to-slf4j:1.7.16",
|
||||
)
|
||||
|
||||
maven_jar(
|
||||
name = "log4j_log4j",
|
||||
artifact = "log4j:log4j:1.2.17",
|
||||
)
|
||||
|
||||
maven_jar(
|
||||
name = "joda_time",
|
||||
artifact = "joda-time:joda-time:2.9.9",
|
||||
)
|
||||
|
||||
maven_jar(
|
||||
name = "junit_junit",
|
||||
artifact = "junit:junit:4.12",
|
||||
)
|
||||
|
||||
maven_jar(
|
||||
name = "org_hamcrest_hamcrest_core",
|
||||
artifact = "org.hamcrest:hamcrest-core:1.3",
|
||||
)
|
||||
|
||||
maven_jar(
|
||||
name = "org_javassist_javassist",
|
||||
artifact = "org.javassist:javassist:3.21.0-GA",
|
||||
)
|
||||
|
||||
maven_jar(
|
||||
name = "org_powermock_powermock_reflect",
|
||||
artifact = "org.powermock:powermock-reflect:1.6.6",
|
||||
)
|
||||
|
||||
maven_jar(
|
||||
name = "org_powermock_powermock_api_mockito",
|
||||
artifact = "org.powermock:powermock-api-mockito:1.6.6",
|
||||
)
|
||||
|
||||
maven_jar(
|
||||
name = "org_mockito_mockito_core",
|
||||
artifact = "org.mockito:mockito-core:1.10.19",
|
||||
)
|
||||
|
||||
maven_jar(
|
||||
name = "org_objenesis_objenesis",
|
||||
artifact = "org.objenesis:objenesis:2.1",
|
||||
)
|
||||
|
||||
maven_jar(
|
||||
name = "org_powermock_powermock_api_mockito_common",
|
||||
artifact = "org.powermock:powermock-api-mockito-common:1.6.6",
|
||||
)
|
||||
|
||||
maven_jar(
|
||||
name = "org_powermock_powermock_api_support",
|
||||
artifact = "org.powermock:powermock-api-support:1.6.6",
|
||||
)
|
||||
|
||||
maven_jar(
|
||||
name = "net_sf_trove4j_trove4j",
|
||||
artifact = "net.sf.trove4j:trove4j:3.0.1",
|
||||
)
|
||||
|
||||
git_repository(
|
||||
name = "org_pubref_rules_protobuf",
|
||||
remote = "https://github.com/pubref/rules_protobuf",
|
||||
tag = "v0.8.1",
|
||||
)
|
||||
|
||||
load("@org_pubref_rules_protobuf//java:rules.bzl", "java_proto_repositories")
|
||||
java_proto_repositories()
|
||||
|
||||
bazel_shade_version = "master"
|
||||
http_archive(
|
||||
name = "com_github_zhexuany_bazel_shade",
|
||||
url = "https://github.com/zhexuany/bazel_shade_plugin/archive/%s.zip"%bazel_shade_version,
|
||||
type = "zip",
|
||||
strip_prefix= "bazel_shade_plugin-%s"%bazel_shade_version
|
||||
)
|
||||
load(
|
||||
"@com_github_zhexuany_bazel_shade//:java_shade.bzl",
|
||||
"java_shade_repositories",
|
||||
"java_shade"
|
||||
)
|
||||
java_shade_repositories()
|
||||
|
|
@ -0,0 +1,458 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
|
||||
<groupId>com.pingcap.tikv</groupId>
|
||||
<artifactId>tikv-client-java</artifactId>
|
||||
<version>2.0-SNAPSHOT</version>
|
||||
<packaging>jar</packaging>
|
||||
<name>TiSpark Project TiKV Java Client</name>
|
||||
|
||||
<properties>
|
||||
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
|
||||
<project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding>
|
||||
<protobuf.version>3.1.0</protobuf.version>
|
||||
<junit.version>4.12</junit.version>
|
||||
<log4j.version>1.2.17</log4j.version>
|
||||
<slf4j.version>1.7.16</slf4j.version>
|
||||
<grpc.version>1.7.0</grpc.version>
|
||||
<powermock.version>1.6.6</powermock.version>
|
||||
<jackson.version>2.8.11</jackson.version>
|
||||
<jackson.databind.version>2.8.11.1</jackson.databind.version>
|
||||
<trove4j.version>3.0.1</trove4j.version>
|
||||
<joda-time.version>2.9.9</joda-time.version>
|
||||
<joda-convert.version>1.9.2</joda-convert.version>
|
||||
<spark.version>2.3.2</spark.version>
|
||||
<proto.folder>${basedir}/proto</proto.folder>
|
||||
<scala.binary.version>2.11</scala.binary.version>
|
||||
<scala.version>2.11</scala.version>
|
||||
<scalatest.version>3.0.4</scalatest.version>
|
||||
<gpg.skip>true</gpg.skip>
|
||||
<javadoc.skip>true</javadoc.skip>
|
||||
</properties>
|
||||
|
||||
<dependencies>
|
||||
<dependency>
|
||||
<groupId>org.apache.logging.log4j</groupId>
|
||||
<artifactId>log4j-api</artifactId>
|
||||
<version>2.8.1</version>
|
||||
<scope>provided</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.logging.log4j</groupId>
|
||||
<artifactId>log4j-core</artifactId>
|
||||
<version>2.8.1</version>
|
||||
<scope>provided</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.spark</groupId>
|
||||
<artifactId>spark-core_${scala.binary.version}</artifactId>
|
||||
<version>${spark.version}</version>
|
||||
<scope>provided</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.spark</groupId>
|
||||
<artifactId>spark-catalyst_${scala.binary.version}</artifactId>
|
||||
<version>${spark.version}</version>
|
||||
<scope>provided</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.spark</groupId>
|
||||
<artifactId>spark-sql_${scala.binary.version}</artifactId>
|
||||
<version>${spark.version}</version>
|
||||
<scope>provided</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.spark</groupId>
|
||||
<artifactId>spark-hive_${scala.binary.version}</artifactId>
|
||||
<version>${spark.version}</version>
|
||||
<scope>provided</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.spark</groupId>
|
||||
<artifactId>spark-hive-thriftserver_${scala.binary.version}</artifactId>
|
||||
<version>${spark.version}</version>
|
||||
<scope>provided</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.spark</groupId>
|
||||
<artifactId>spark-unsafe_${scala.binary.version}</artifactId>
|
||||
<version>${spark.version}</version>
|
||||
<scope>provided</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.slf4j</groupId>
|
||||
<artifactId>slf4j-api</artifactId>
|
||||
<version>${slf4j.version}</version>
|
||||
<scope>provided</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.slf4j</groupId>
|
||||
<artifactId>slf4j-log4j12</artifactId>
|
||||
<version>${slf4j.version}</version>
|
||||
<scope>provided</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.slf4j</groupId>
|
||||
<artifactId>jul-to-slf4j</artifactId>
|
||||
<version>${slf4j.version}</version>
|
||||
<scope>provided</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.slf4j</groupId>
|
||||
<artifactId>jcl-over-slf4j</artifactId>
|
||||
<version>${slf4j.version}</version>
|
||||
<scope>provided</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>log4j</groupId>
|
||||
<artifactId>log4j</artifactId>
|
||||
<version>${log4j.version}</version>
|
||||
</dependency>
|
||||
<!-- https://mvnrepository.com/artifact/net.sf.trove4j/trove4j -->
|
||||
<dependency>
|
||||
<groupId>net.sf.trove4j</groupId>
|
||||
<artifactId>trove4j</artifactId>
|
||||
<version>${trove4j.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.sangupta</groupId>
|
||||
<artifactId>murmur</artifactId>
|
||||
<version>1.0.0</version>
|
||||
</dependency>
|
||||
<!-- grpc dependencies -->
|
||||
<dependency>
|
||||
<groupId>io.grpc</groupId>
|
||||
<artifactId>grpc-netty</artifactId>
|
||||
<version>${grpc.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>io.grpc</groupId>
|
||||
<artifactId>grpc-protobuf</artifactId>
|
||||
<version>${grpc.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>io.grpc</groupId>
|
||||
<artifactId>grpc-stub</artifactId>
|
||||
<version>${grpc.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.fasterxml.jackson.core</groupId>
|
||||
<artifactId>jackson-annotations</artifactId>
|
||||
<version>${jackson.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.fasterxml.jackson.core</groupId>
|
||||
<artifactId>jackson-databind</artifactId>
|
||||
<version>${jackson.databind.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.fasterxml.jackson.module</groupId>
|
||||
<artifactId>jackson-module-scala_${scala.binary.version}</artifactId>
|
||||
<version>${jackson.version}</version>
|
||||
<exclusions>
|
||||
<exclusion>
|
||||
<groupId>com.google.guava</groupId>
|
||||
<artifactId>guava</artifactId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>joda-time</groupId>
|
||||
<artifactId>joda-time</artifactId>
|
||||
<version>${joda-time.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.joda</groupId>
|
||||
<artifactId>joda-convert</artifactId>
|
||||
<version>${joda-convert.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>io.grpc</groupId>
|
||||
<artifactId>grpc-testing</artifactId>
|
||||
<version>${grpc.version}</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
<build>
|
||||
<resources>
|
||||
<resource>
|
||||
<directory>src/main/resources</directory>
|
||||
</resource>
|
||||
</resources>
|
||||
<extensions>
|
||||
<extension>
|
||||
<groupId>kr.motd.maven</groupId>
|
||||
<artifactId>os-maven-plugin</artifactId>
|
||||
<version>1.4.1.Final</version>
|
||||
</extension>
|
||||
</extensions>
|
||||
<plugins>
|
||||
<plugin>
|
||||
<groupId>org.codehaus.mojo</groupId>
|
||||
<artifactId>exec-maven-plugin</artifactId>
|
||||
<version>1.6.0</version>
|
||||
<executions>
|
||||
<execution>
|
||||
<id>clone proto files</id>
|
||||
<configuration>
|
||||
<executable>${basedir}/scripts/proto.sh</executable>
|
||||
</configuration>
|
||||
<phase>validate</phase>
|
||||
<goals>
|
||||
<goal>exec</goal>
|
||||
</goals>
|
||||
</execution>
|
||||
</executions>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<artifactId>maven-resources-plugin</artifactId>
|
||||
<version>2.5</version>
|
||||
<executions>
|
||||
<execution>
|
||||
<id>copy-resources</id>
|
||||
<phase>validate</phase>
|
||||
<goals>
|
||||
<goal>copy-resources</goal>
|
||||
</goals>
|
||||
<configuration>
|
||||
<outputDirectory>${proto.folder}</outputDirectory>
|
||||
<resources>
|
||||
<resource>
|
||||
<directory>${basedir}/kvproto/_vendor/src/github.com/gogo/protobuf</directory>
|
||||
<includes>
|
||||
<include>**/gogoproto/**</include>
|
||||
</includes>
|
||||
</resource>
|
||||
<resource>
|
||||
<directory>${basedir}/kvproto/proto</directory>
|
||||
<filtering>true</filtering>
|
||||
</resource>
|
||||
<resource>
|
||||
<directory>${basedir}/raft-rs/proto</directory>
|
||||
<filtering>true</filtering>
|
||||
</resource>
|
||||
<resource>
|
||||
<directory>${basedir}/tipb/proto</directory>
|
||||
<filtering>true</filtering>
|
||||
</resource>
|
||||
</resources>
|
||||
</configuration>
|
||||
</execution>
|
||||
</executions>
|
||||
</plugin>
|
||||
|
||||
<plugin>
|
||||
<groupId>org.xolstice.maven.plugins</groupId>
|
||||
<artifactId>protobuf-maven-plugin</artifactId>
|
||||
<version>0.5.0</version>
|
||||
<configuration>
|
||||
<protocArtifact>com.google.protobuf:protoc:${protobuf.version}:exe:${os.detected.classifier}</protocArtifact>
|
||||
<protoSourceRoot>${proto.folder}</protoSourceRoot>
|
||||
<includes>
|
||||
<param>**/*.proto</param>
|
||||
</includes>
|
||||
<pluginId>grpc-java</pluginId>
|
||||
<pluginArtifact>io.grpc:protoc-gen-grpc-java:1.4.0:exe:${os.detected.classifier}</pluginArtifact>
|
||||
</configuration>
|
||||
<executions>
|
||||
<execution>
|
||||
<goals>
|
||||
<goal>compile</goal>
|
||||
<goal>compile-custom</goal>
|
||||
</goals>
|
||||
</execution>
|
||||
</executions>
|
||||
</plugin>
|
||||
|
||||
<!-- Compiler Plug-in -->
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-compiler-plugin</artifactId>
|
||||
<version>3.7.0</version>
|
||||
<configuration>
|
||||
<source>1.8</source>
|
||||
<target>1.8</target>
|
||||
<encoding>UTF-8</encoding>
|
||||
<showWarnings>true</showWarnings>
|
||||
<showDeprecation>true</showDeprecation>
|
||||
</configuration>
|
||||
</plugin>
|
||||
<!-- Jar Plug-in -->
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-jar-plugin</artifactId>
|
||||
<version>3.0.2</version>
|
||||
<configuration>
|
||||
<archive>
|
||||
<addMavenDescriptor>false</addMavenDescriptor>
|
||||
</archive>
|
||||
</configuration>
|
||||
</plugin>
|
||||
<!-- Source Plug-in -->
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-source-plugin</artifactId>
|
||||
<version>3.0.1</version>
|
||||
<executions>
|
||||
<execution>
|
||||
<id>attach-sources</id>
|
||||
<phase>deploy</phase>
|
||||
<goals>
|
||||
<goal>jar-no-fork</goal>
|
||||
</goals>
|
||||
</execution>
|
||||
</executions>
|
||||
</plugin>
|
||||
<!-- Clean Plug-in -->
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-clean-plugin</artifactId>
|
||||
<version>3.0.0</version>
|
||||
<configuration>
|
||||
<filesets>
|
||||
<fileset>
|
||||
<directory>${proto.folder}</directory>
|
||||
</fileset>
|
||||
</filesets>
|
||||
</configuration>
|
||||
</plugin>
|
||||
<!-- Javadoc Plug-in -->
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-javadoc-plugin</artifactId>
|
||||
<version>2.9.1</version>
|
||||
<configuration>
|
||||
<skip>${javadoc.skip}</skip>
|
||||
</configuration>
|
||||
<executions>
|
||||
<execution>
|
||||
<id>attach-javadocs</id>
|
||||
<goals>
|
||||
<goal>jar</goal>
|
||||
</goals>
|
||||
<configuration> <!-- add this to disable checking -->
|
||||
<additionalparam>-Xdoclint:none</additionalparam>
|
||||
</configuration>
|
||||
</execution>
|
||||
</executions>
|
||||
</plugin>
|
||||
<!--GPG Signed Components-->
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-gpg-plugin</artifactId>
|
||||
<version>1.5</version>
|
||||
<configuration>
|
||||
<keyname>Yifei Wu</keyname>
|
||||
<skip>${gpg.skip}</skip>
|
||||
</configuration>
|
||||
<executions>
|
||||
<execution>
|
||||
<id>sign-artifacts</id>
|
||||
<phase>verify</phase>
|
||||
<goals>
|
||||
<goal>sign</goal>
|
||||
</goals>
|
||||
</execution>
|
||||
</executions>
|
||||
</plugin>
|
||||
<!-- Assembly Plug-in -->
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-assembly-plugin</artifactId>
|
||||
<version>3.1.0</version>
|
||||
<configuration>
|
||||
<descriptorRefs>
|
||||
<descriptorRef>jar-with-dependencies</descriptorRef>
|
||||
</descriptorRefs>
|
||||
</configuration>
|
||||
<executions>
|
||||
<execution>
|
||||
<id>assemble-all</id>
|
||||
<phase>package</phase>
|
||||
<goals>
|
||||
<goal>single</goal>
|
||||
</goals>
|
||||
</execution>
|
||||
</executions>
|
||||
</plugin>
|
||||
<!--- Needs to shade Protobuf 3 since other projects might use other version -->
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-shade-plugin</artifactId>
|
||||
<version>3.1.0</version>
|
||||
<executions>
|
||||
<execution>
|
||||
<phase>package</phase>
|
||||
<goals>
|
||||
<goal>shade</goal>
|
||||
</goals>
|
||||
<configuration>
|
||||
<transformers>
|
||||
<transformer implementation="org.apache.maven.plugins.shade.resource.ServicesResourceTransformer"/>
|
||||
</transformers>
|
||||
<relocations>
|
||||
<relocation>
|
||||
<pattern>com.fasterxml</pattern>
|
||||
<shadedPattern>shade.com.fasterxml.jackson</shadedPattern>
|
||||
</relocation>
|
||||
<relocation>
|
||||
<pattern>io.grpc</pattern>
|
||||
<shadedPattern>shade.io.grpc</shadedPattern>
|
||||
</relocation>
|
||||
<relocation>
|
||||
<pattern>com.google.protobuf</pattern>
|
||||
<shadedPattern>shade.com.google.protobuf</shadedPattern>
|
||||
</relocation>
|
||||
<relocation>
|
||||
<pattern>io.netty</pattern>
|
||||
<shadedPattern>shade.io.netty</shadedPattern>
|
||||
</relocation>
|
||||
<relocation>
|
||||
<pattern>com.google.common</pattern>
|
||||
<shadedPattern>shade.com.google.common</shadedPattern>
|
||||
</relocation>
|
||||
</relocations>
|
||||
</configuration>
|
||||
</execution>
|
||||
</executions>
|
||||
</plugin>
|
||||
<!-- Code coverage test -->
|
||||
<plugin>
|
||||
<groupId>org.codehaus.mojo</groupId>
|
||||
<artifactId>cobertura-maven-plugin</artifactId>
|
||||
<version>2.7</version>
|
||||
<configuration>
|
||||
<formats>
|
||||
<format>html</format>
|
||||
<format>xml</format>
|
||||
</formats>
|
||||
<check />
|
||||
</configuration>
|
||||
</plugin>
|
||||
<!-- Google-java-format -->
|
||||
<plugin>
|
||||
<groupId>com.coveo</groupId>
|
||||
<artifactId>fmt-maven-plugin</artifactId>
|
||||
<version>2.6.0</version>
|
||||
<configuration>
|
||||
<sourceDirectory>src/main/java</sourceDirectory>
|
||||
<testSourceDirectory>src/test/java</testSourceDirectory>
|
||||
<verbose>true</verbose>
|
||||
<filesNamePattern>.*\.java</filesNamePattern>
|
||||
<skip>false</skip>
|
||||
<skipSortingImports>false</skipSortingImports>
|
||||
<style>google</style>
|
||||
</configuration>
|
||||
<executions>
|
||||
<execution>
|
||||
<goals>
|
||||
<goal>format</goal>
|
||||
</goals>
|
||||
</execution>
|
||||
</executions>
|
||||
</plugin>
|
||||
</plugins>
|
||||
</build>
|
||||
</project>
|
||||
|
|
@ -0,0 +1,34 @@
|
|||
#!/bin/sh
|
||||
#
|
||||
# Copyright 2017 PingCAP, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
|
||||
if [ -d "kvproto" ]; then
|
||||
# cd kvproto; git pull origin master; cd ..
|
||||
cd kvproto; git checkout 497dd34f807bccdb5bd01009c62c5fd5ced404f3; cd ..
|
||||
else
|
||||
git clone https://github.com/pingcap/kvproto; cd kvproto; git checkout 497dd34f807bccdb5bd01009c62c5fd5ced404f3; cd ..
|
||||
fi
|
||||
|
||||
if [ -d "raft-rs" ]; then
|
||||
cd raft-rs; git pull origin master; cd ..
|
||||
else
|
||||
git clone https://github.com/pingcap/raft-rs
|
||||
fi
|
||||
|
||||
if [ -d "tipb" ]; then
|
||||
cd tipb; git pull origin master; cd ..
|
||||
else
|
||||
git clone https://github.com/pingcap/tipb
|
||||
fi
|
||||
|
|
@ -0,0 +1 @@
|
|||
rule io.netty.** io.netty.netty4pingcap.@1
|
||||
|
|
@ -0,0 +1,152 @@
|
|||
/*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package com.pingcap.tikv;
|
||||
|
||||
import static io.grpc.stub.ClientCalls.asyncBidiStreamingCall;
|
||||
import static io.grpc.stub.ClientCalls.blockingServerStreamingCall;
|
||||
|
||||
import com.pingcap.tikv.operation.ErrorHandler;
|
||||
import com.pingcap.tikv.policy.RetryMaxMs.Builder;
|
||||
import com.pingcap.tikv.policy.RetryPolicy;
|
||||
import com.pingcap.tikv.streaming.StreamingResponse;
|
||||
import com.pingcap.tikv.util.BackOffer;
|
||||
import io.grpc.MethodDescriptor;
|
||||
import io.grpc.stub.AbstractStub;
|
||||
import io.grpc.stub.ClientCalls;
|
||||
import io.grpc.stub.StreamObserver;
|
||||
import java.util.function.Supplier;
|
||||
import org.apache.log4j.Logger;
|
||||
|
||||
public abstract class AbstractGRPCClient<
|
||||
BlockingStubT extends AbstractStub<BlockingStubT>, StubT extends AbstractStub<StubT>>
|
||||
implements AutoCloseable {
|
||||
protected final Logger logger = Logger.getLogger(this.getClass());
|
||||
protected TiSession session;
|
||||
protected TiConfiguration conf;
|
||||
|
||||
protected AbstractGRPCClient(TiSession session) {
|
||||
this.session = session;
|
||||
this.conf = session.getConf();
|
||||
}
|
||||
|
||||
public TiSession getSession() {
|
||||
return session;
|
||||
}
|
||||
|
||||
public TiConfiguration getConf() {
|
||||
return conf;
|
||||
}
|
||||
|
||||
// TODO: Seems a little bit messy for lambda part
|
||||
public <ReqT, RespT> RespT callWithRetry(
|
||||
BackOffer backOffer,
|
||||
MethodDescriptor<ReqT, RespT> method,
|
||||
Supplier<ReqT> requestFactory,
|
||||
ErrorHandler<RespT> handler) {
|
||||
if (logger.isTraceEnabled()) {
|
||||
logger.trace(String.format("Calling %s...", method.getFullMethodName()));
|
||||
}
|
||||
RetryPolicy.Builder<RespT> builder = new Builder<>(backOffer);
|
||||
RespT resp =
|
||||
builder
|
||||
.create(handler)
|
||||
.callWithRetry(
|
||||
() -> {
|
||||
BlockingStubT stub = getBlockingStub();
|
||||
return ClientCalls.blockingUnaryCall(
|
||||
stub.getChannel(), method, stub.getCallOptions(), requestFactory.get());
|
||||
},
|
||||
method.getFullMethodName());
|
||||
|
||||
if (logger.isTraceEnabled()) {
|
||||
logger.trace(String.format("leaving %s...", method.getFullMethodName()));
|
||||
}
|
||||
return resp;
|
||||
}
|
||||
|
||||
protected <ReqT, RespT> void callAsyncWithRetry(
|
||||
BackOffer backOffer,
|
||||
MethodDescriptor<ReqT, RespT> method,
|
||||
Supplier<ReqT> requestFactory,
|
||||
StreamObserver<RespT> responseObserver,
|
||||
ErrorHandler<RespT> handler) {
|
||||
logger.debug(String.format("Calling %s...", method.getFullMethodName()));
|
||||
|
||||
RetryPolicy.Builder<RespT> builder = new Builder<>(backOffer);
|
||||
builder
|
||||
.create(handler)
|
||||
.callWithRetry(
|
||||
() -> {
|
||||
StubT stub = getAsyncStub();
|
||||
ClientCalls.asyncUnaryCall(
|
||||
stub.getChannel().newCall(method, stub.getCallOptions()),
|
||||
requestFactory.get(),
|
||||
responseObserver);
|
||||
return null;
|
||||
},
|
||||
method.getFullMethodName());
|
||||
logger.debug(String.format("leaving %s...", method.getFullMethodName()));
|
||||
}
|
||||
|
||||
<ReqT, RespT> StreamObserver<ReqT> callBidiStreamingWithRetry(
|
||||
BackOffer backOffer,
|
||||
MethodDescriptor<ReqT, RespT> method,
|
||||
StreamObserver<RespT> responseObserver,
|
||||
ErrorHandler<StreamObserver<ReqT>> handler) {
|
||||
logger.debug(String.format("Calling %s...", method.getFullMethodName()));
|
||||
|
||||
RetryPolicy.Builder<StreamObserver<ReqT>> builder = new Builder<>(backOffer);
|
||||
StreamObserver<ReqT> observer =
|
||||
builder
|
||||
.create(handler)
|
||||
.callWithRetry(
|
||||
() -> {
|
||||
StubT stub = getAsyncStub();
|
||||
return asyncBidiStreamingCall(
|
||||
stub.getChannel().newCall(method, stub.getCallOptions()), responseObserver);
|
||||
},
|
||||
method.getFullMethodName());
|
||||
logger.debug(String.format("leaving %s...", method.getFullMethodName()));
|
||||
return observer;
|
||||
}
|
||||
|
||||
public <ReqT, RespT> StreamingResponse callServerStreamingWithRetry(
|
||||
BackOffer backOffer,
|
||||
MethodDescriptor<ReqT, RespT> method,
|
||||
Supplier<ReqT> requestFactory,
|
||||
ErrorHandler<StreamingResponse> handler) {
|
||||
logger.debug(String.format("Calling %s...", method.getFullMethodName()));
|
||||
|
||||
RetryPolicy.Builder<StreamingResponse> builder = new Builder<>(backOffer);
|
||||
StreamingResponse response =
|
||||
builder
|
||||
.create(handler)
|
||||
.callWithRetry(
|
||||
() -> {
|
||||
BlockingStubT stub = getBlockingStub();
|
||||
return new StreamingResponse(
|
||||
blockingServerStreamingCall(
|
||||
stub.getChannel(), method, stub.getCallOptions(), requestFactory.get()));
|
||||
},
|
||||
method.getFullMethodName());
|
||||
logger.debug(String.format("leaving %s...", method.getFullMethodName()));
|
||||
return response;
|
||||
}
|
||||
|
||||
protected abstract BlockingStubT getBlockingStub();
|
||||
|
||||
protected abstract StubT getAsyncStub();
|
||||
}
|
||||
|
|
@ -0,0 +1,34 @@
|
|||
package(default_visibility = ["//visibility:public"])
|
||||
|
||||
java_library(
|
||||
name = "tikv-java-client-lib",
|
||||
srcs = glob(
|
||||
["**/*.java"],
|
||||
),
|
||||
deps = [
|
||||
"//:java",
|
||||
"@com_fasterxml_jackson_core_jackson_annotations//jar",
|
||||
"@com_fasterxml_jackson_core_jackson_core//jar",
|
||||
"@com_fasterxml_jackson_core_jackson_databind//jar",
|
||||
"@com_google_code_findbugs_jsr305//jar",
|
||||
"@com_google_code_gson_gson//jar",
|
||||
"@com_google_errorprone_error_prone_annotations//jar",
|
||||
"@com_google_guava_guava//jar",
|
||||
"@com_google_protobuf_protobuf_java//jar",
|
||||
"@joda_time//jar",
|
||||
# the following are defined in rules_protobuf
|
||||
"@org_pubref_rules_protobuf//java:grpc_compiletime_deps",
|
||||
"@org_pubref_rules_protobuf//java:netty_runtime_deps",
|
||||
|
||||
"@org_slf4j_slf4j_api//jar",
|
||||
"@org_slf4j_jcl_over_slf4j//jar",
|
||||
"@org_slf4j_jul_to_slf4j//jar",
|
||||
"@log4j_log4j//jar",
|
||||
"@net_sf_trove4j_trove4j//jar",
|
||||
],
|
||||
)
|
||||
|
||||
filegroup(
|
||||
name = "srcs",
|
||||
srcs = ["BUILD"] + glob(["**/*.java"]),
|
||||
)
|
||||
|
|
@ -0,0 +1,5 @@
|
|||
package com.pingcap.tikv;
|
||||
|
||||
public class Main {
|
||||
public static void main(String args[]) throws Exception {}
|
||||
}
|
||||
|
|
@ -0,0 +1,374 @@
|
|||
/*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package com.pingcap.tikv;
|
||||
|
||||
import static com.google.common.base.Preconditions.checkNotNull;
|
||||
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
import com.google.common.net.HostAndPort;
|
||||
import com.google.common.util.concurrent.ThreadFactoryBuilder;
|
||||
import com.google.protobuf.ByteString;
|
||||
import com.pingcap.tikv.codec.Codec.BytesCodec;
|
||||
import com.pingcap.tikv.codec.CodecDataOutput;
|
||||
import com.pingcap.tikv.exception.GrpcException;
|
||||
import com.pingcap.tikv.exception.TiClientInternalException;
|
||||
import com.pingcap.tikv.kvproto.Kvrpcpb.IsolationLevel;
|
||||
import com.pingcap.tikv.kvproto.Metapb.Store;
|
||||
import com.pingcap.tikv.kvproto.PDGrpc;
|
||||
import com.pingcap.tikv.kvproto.PDGrpc.PDBlockingStub;
|
||||
import com.pingcap.tikv.kvproto.PDGrpc.PDStub;
|
||||
import com.pingcap.tikv.kvproto.Pdpb.GetMembersRequest;
|
||||
import com.pingcap.tikv.kvproto.Pdpb.GetMembersResponse;
|
||||
import com.pingcap.tikv.kvproto.Pdpb.GetRegionByIDRequest;
|
||||
import com.pingcap.tikv.kvproto.Pdpb.GetRegionRequest;
|
||||
import com.pingcap.tikv.kvproto.Pdpb.GetRegionResponse;
|
||||
import com.pingcap.tikv.kvproto.Pdpb.GetStoreRequest;
|
||||
import com.pingcap.tikv.kvproto.Pdpb.GetStoreResponse;
|
||||
import com.pingcap.tikv.kvproto.Pdpb.RequestHeader;
|
||||
import com.pingcap.tikv.kvproto.Pdpb.Timestamp;
|
||||
import com.pingcap.tikv.kvproto.Pdpb.TsoRequest;
|
||||
import com.pingcap.tikv.kvproto.Pdpb.TsoResponse;
|
||||
import com.pingcap.tikv.meta.TiTimestamp;
|
||||
import com.pingcap.tikv.operation.PDErrorHandler;
|
||||
import com.pingcap.tikv.region.TiRegion;
|
||||
import com.pingcap.tikv.util.BackOffer;
|
||||
import com.pingcap.tikv.util.FutureObserver;
|
||||
import io.grpc.ManagedChannel;
|
||||
import java.net.MalformedURLException;
|
||||
import java.net.URL;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.Executors;
|
||||
import java.util.concurrent.Future;
|
||||
import java.util.concurrent.ScheduledExecutorService;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.function.Supplier;
|
||||
|
||||
public class PDClient extends AbstractGRPCClient<PDBlockingStub, PDStub>
|
||||
implements ReadOnlyPDClient {
|
||||
private RequestHeader header;
|
||||
private TsoRequest tsoReq;
|
||||
private volatile LeaderWrapper leaderWrapper;
|
||||
private ScheduledExecutorService service;
|
||||
private IsolationLevel isolationLevel;
|
||||
private List<HostAndPort> pdAddrs;
|
||||
|
||||
@Override
|
||||
public TiTimestamp getTimestamp(BackOffer backOffer) {
|
||||
Supplier<TsoRequest> request = () -> tsoReq;
|
||||
|
||||
PDErrorHandler<TsoResponse> handler =
|
||||
new PDErrorHandler<>(r -> r.getHeader().hasError() ? r.getHeader().getError() : null, this);
|
||||
|
||||
TsoResponse resp = callWithRetry(backOffer, PDGrpc.METHOD_TSO, request, handler);
|
||||
Timestamp timestamp = resp.getTimestamp();
|
||||
return new TiTimestamp(timestamp.getPhysical(), timestamp.getLogical());
|
||||
}
|
||||
|
||||
@Override
|
||||
public TiRegion getRegionByKey(BackOffer backOffer, ByteString key) {
|
||||
CodecDataOutput cdo = new CodecDataOutput();
|
||||
BytesCodec.writeBytes(cdo, key.toByteArray());
|
||||
ByteString encodedKey = cdo.toByteString();
|
||||
|
||||
Supplier<GetRegionRequest> request =
|
||||
() -> GetRegionRequest.newBuilder().setHeader(header).setRegionKey(encodedKey).build();
|
||||
|
||||
PDErrorHandler<GetRegionResponse> handler =
|
||||
new PDErrorHandler<>(r -> r.getHeader().hasError() ? r.getHeader().getError() : null, this);
|
||||
|
||||
GetRegionResponse resp = callWithRetry(backOffer, PDGrpc.METHOD_GET_REGION, request, handler);
|
||||
return new TiRegion(
|
||||
resp.getRegion(), resp.getLeader(), conf.getIsolationLevel(), conf.getCommandPriority());
|
||||
}
|
||||
|
||||
@Override
|
||||
public Future<TiRegion> getRegionByKeyAsync(BackOffer backOffer, ByteString key) {
|
||||
FutureObserver<TiRegion, GetRegionResponse> responseObserver =
|
||||
new FutureObserver<>(
|
||||
resp ->
|
||||
new TiRegion(
|
||||
resp.getRegion(),
|
||||
resp.getLeader(),
|
||||
conf.getIsolationLevel(),
|
||||
conf.getCommandPriority()));
|
||||
Supplier<GetRegionRequest> request =
|
||||
() -> GetRegionRequest.newBuilder().setHeader(header).setRegionKey(key).build();
|
||||
|
||||
PDErrorHandler<GetRegionResponse> handler =
|
||||
new PDErrorHandler<>(r -> r.getHeader().hasError() ? r.getHeader().getError() : null, this);
|
||||
|
||||
callAsyncWithRetry(backOffer, PDGrpc.METHOD_GET_REGION, request, responseObserver, handler);
|
||||
return responseObserver.getFuture();
|
||||
}
|
||||
|
||||
@Override
|
||||
public TiRegion getRegionByID(BackOffer backOffer, long id) {
|
||||
Supplier<GetRegionByIDRequest> request =
|
||||
() -> GetRegionByIDRequest.newBuilder().setHeader(header).setRegionId(id).build();
|
||||
PDErrorHandler<GetRegionResponse> handler =
|
||||
new PDErrorHandler<>(r -> r.getHeader().hasError() ? r.getHeader().getError() : null, this);
|
||||
|
||||
GetRegionResponse resp =
|
||||
callWithRetry(backOffer, PDGrpc.METHOD_GET_REGION_BY_ID, request, handler);
|
||||
// Instead of using default leader instance, explicitly set no leader to null
|
||||
return new TiRegion(
|
||||
resp.getRegion(), resp.getLeader(), conf.getIsolationLevel(), conf.getCommandPriority());
|
||||
}
|
||||
|
||||
@Override
|
||||
public Future<TiRegion> getRegionByIDAsync(BackOffer backOffer, long id) {
|
||||
FutureObserver<TiRegion, GetRegionResponse> responseObserver =
|
||||
new FutureObserver<>(
|
||||
resp ->
|
||||
new TiRegion(
|
||||
resp.getRegion(),
|
||||
resp.getLeader(),
|
||||
conf.getIsolationLevel(),
|
||||
conf.getCommandPriority()));
|
||||
|
||||
Supplier<GetRegionByIDRequest> request =
|
||||
() -> GetRegionByIDRequest.newBuilder().setHeader(header).setRegionId(id).build();
|
||||
PDErrorHandler<GetRegionResponse> handler =
|
||||
new PDErrorHandler<>(r -> r.getHeader().hasError() ? r.getHeader().getError() : null, this);
|
||||
|
||||
callAsyncWithRetry(
|
||||
backOffer, PDGrpc.METHOD_GET_REGION_BY_ID, request, responseObserver, handler);
|
||||
return responseObserver.getFuture();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Store getStore(BackOffer backOffer, long storeId) {
|
||||
Supplier<GetStoreRequest> request =
|
||||
() -> GetStoreRequest.newBuilder().setHeader(header).setStoreId(storeId).build();
|
||||
PDErrorHandler<GetStoreResponse> handler =
|
||||
new PDErrorHandler<>(r -> r.getHeader().hasError() ? r.getHeader().getError() : null, this);
|
||||
|
||||
GetStoreResponse resp = callWithRetry(backOffer, PDGrpc.METHOD_GET_STORE, request, handler);
|
||||
return resp.getStore();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Future<Store> getStoreAsync(BackOffer backOffer, long storeId) {
|
||||
FutureObserver<Store, GetStoreResponse> responseObserver =
|
||||
new FutureObserver<>(GetStoreResponse::getStore);
|
||||
|
||||
Supplier<GetStoreRequest> request =
|
||||
() -> GetStoreRequest.newBuilder().setHeader(header).setStoreId(storeId).build();
|
||||
PDErrorHandler<GetStoreResponse> handler =
|
||||
new PDErrorHandler<>(r -> r.getHeader().hasError() ? r.getHeader().getError() : null, this);
|
||||
|
||||
callAsyncWithRetry(backOffer, PDGrpc.METHOD_GET_STORE, request, responseObserver, handler);
|
||||
return responseObserver.getFuture();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() throws InterruptedException {
|
||||
if (service != null) {
|
||||
service.shutdownNow();
|
||||
}
|
||||
if (getLeaderWrapper() != null) {
|
||||
getLeaderWrapper().close();
|
||||
}
|
||||
}
|
||||
|
||||
public static ReadOnlyPDClient create(TiSession session) {
|
||||
return createRaw(session);
|
||||
}
|
||||
|
||||
@VisibleForTesting
|
||||
RequestHeader getHeader() {
|
||||
return header;
|
||||
}
|
||||
|
||||
@VisibleForTesting
|
||||
LeaderWrapper getLeaderWrapper() {
|
||||
return leaderWrapper;
|
||||
}
|
||||
|
||||
class LeaderWrapper {
|
||||
private final String leaderInfo;
|
||||
private final PDBlockingStub blockingStub;
|
||||
private final PDStub asyncStub;
|
||||
private final long createTime;
|
||||
|
||||
LeaderWrapper(
|
||||
String leaderInfo,
|
||||
PDGrpc.PDBlockingStub blockingStub,
|
||||
PDGrpc.PDStub asyncStub,
|
||||
long createTime) {
|
||||
this.leaderInfo = leaderInfo;
|
||||
this.blockingStub = blockingStub;
|
||||
this.asyncStub = asyncStub;
|
||||
this.createTime = createTime;
|
||||
}
|
||||
|
||||
String getLeaderInfo() {
|
||||
return leaderInfo;
|
||||
}
|
||||
|
||||
PDBlockingStub getBlockingStub() {
|
||||
return blockingStub;
|
||||
}
|
||||
|
||||
PDStub getAsyncStub() {
|
||||
return asyncStub;
|
||||
}
|
||||
|
||||
long getCreateTime() {
|
||||
return createTime;
|
||||
}
|
||||
|
||||
void close() {}
|
||||
}
|
||||
|
||||
public GetMembersResponse getMembers(HostAndPort url) {
|
||||
try {
|
||||
ManagedChannel probChan = session.getChannel(url.getHostText() + ":" + url.getPort());
|
||||
PDGrpc.PDBlockingStub stub = PDGrpc.newBlockingStub(probChan);
|
||||
GetMembersRequest request =
|
||||
GetMembersRequest.newBuilder().setHeader(RequestHeader.getDefaultInstance()).build();
|
||||
return stub.getMembers(request);
|
||||
} catch (Exception e) {
|
||||
logger.warn("failed to get member from pd server.", e);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
private synchronized boolean switchLeader(List<String> leaderURLs) {
|
||||
if (leaderURLs.isEmpty()) return false;
|
||||
String leaderUrlStr = leaderURLs.get(0);
|
||||
// TODO: Why not strip protocol info on server side since grpc does not need it
|
||||
if (leaderWrapper != null && leaderUrlStr.equals(leaderWrapper.getLeaderInfo())) {
|
||||
return true;
|
||||
}
|
||||
// switch leader
|
||||
return createLeaderWrapper(leaderUrlStr);
|
||||
}
|
||||
|
||||
private boolean createLeaderWrapper(String leaderUrlStr) {
|
||||
try {
|
||||
URL tURL = new URL(leaderUrlStr);
|
||||
HostAndPort newLeader = HostAndPort.fromParts(tURL.getHost(), tURL.getPort());
|
||||
leaderUrlStr = newLeader.toString();
|
||||
if (leaderWrapper != null && leaderUrlStr.equals(leaderWrapper.getLeaderInfo())) {
|
||||
return true;
|
||||
}
|
||||
|
||||
// create new Leader
|
||||
ManagedChannel clientChannel = session.getChannel(leaderUrlStr);
|
||||
leaderWrapper =
|
||||
new LeaderWrapper(
|
||||
leaderUrlStr,
|
||||
PDGrpc.newBlockingStub(clientChannel),
|
||||
PDGrpc.newStub(clientChannel),
|
||||
System.nanoTime());
|
||||
} catch (MalformedURLException e) {
|
||||
logger.error("Error updating leader.", e);
|
||||
return false;
|
||||
}
|
||||
logger.info(String.format("Switched to new leader: %s", leaderWrapper));
|
||||
return true;
|
||||
}
|
||||
|
||||
public void updateLeader() {
|
||||
for (HostAndPort url : this.pdAddrs) {
|
||||
// since resp is null, we need update leader's address by walking through all pd server.
|
||||
GetMembersResponse resp = getMembers(url);
|
||||
if (resp == null) {
|
||||
continue;
|
||||
}
|
||||
// if leader is switched, just return.
|
||||
if (switchLeader(resp.getLeader().getClientUrlsList())) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
throw new TiClientInternalException(
|
||||
"already tried all address on file, but not leader found yet.");
|
||||
}
|
||||
|
||||
@Override
|
||||
protected PDBlockingStub getBlockingStub() {
|
||||
if (leaderWrapper == null) {
|
||||
throw new GrpcException("PDClient may not be initialized");
|
||||
}
|
||||
return leaderWrapper
|
||||
.getBlockingStub()
|
||||
.withDeadlineAfter(getConf().getTimeout(), getConf().getTimeoutUnit());
|
||||
}
|
||||
|
||||
@Override
|
||||
protected PDStub getAsyncStub() {
|
||||
if (leaderWrapper == null) {
|
||||
throw new GrpcException("PDClient may not be initialized");
|
||||
}
|
||||
return leaderWrapper
|
||||
.getAsyncStub()
|
||||
.withDeadlineAfter(getConf().getTimeout(), getConf().getTimeoutUnit());
|
||||
}
|
||||
|
||||
private PDClient(TiSession session) {
|
||||
super(session);
|
||||
}
|
||||
|
||||
private void initCluster() {
|
||||
GetMembersResponse resp = null;
|
||||
List<HostAndPort> pdAddrs = getSession().getConf().getPdAddrs();
|
||||
for (HostAndPort u : pdAddrs) {
|
||||
resp = getMembers(u);
|
||||
if (resp != null) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
checkNotNull(resp, "Failed to init client for PD cluster.");
|
||||
long clusterId = resp.getHeader().getClusterId();
|
||||
header = RequestHeader.newBuilder().setClusterId(clusterId).build();
|
||||
tsoReq = TsoRequest.newBuilder().setHeader(header).setCount(1).build();
|
||||
this.pdAddrs = pdAddrs;
|
||||
createLeaderWrapper(resp.getLeader().getClientUrls(0));
|
||||
service =
|
||||
Executors.newSingleThreadScheduledExecutor(
|
||||
new ThreadFactoryBuilder().setDaemon(true).build());
|
||||
service.scheduleAtFixedRate(
|
||||
() -> {
|
||||
// Wrap this with a try catch block in case schedule update fails
|
||||
try {
|
||||
updateLeader();
|
||||
} catch (Exception e) {
|
||||
logger.warn("Update leader failed", e);
|
||||
}
|
||||
},
|
||||
1,
|
||||
1,
|
||||
TimeUnit.MINUTES);
|
||||
}
|
||||
|
||||
static PDClient createRaw(TiSession session) {
|
||||
PDClient client = null;
|
||||
try {
|
||||
client = new PDClient(session);
|
||||
client.initCluster();
|
||||
} catch (Exception e) {
|
||||
if (client != null) {
|
||||
try {
|
||||
client.close();
|
||||
} catch (InterruptedException ignore) {
|
||||
}
|
||||
}
|
||||
throw e;
|
||||
}
|
||||
return client;
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,120 @@
|
|||
package com.pingcap.tikv;
|
||||
|
||||
import com.google.protobuf.ByteString;
|
||||
import com.pingcap.tikv.kvproto.Kvrpcpb;
|
||||
import com.pingcap.tikv.kvproto.Metapb;
|
||||
import com.pingcap.tikv.operation.iterator.RawScanIterator;
|
||||
import com.pingcap.tikv.region.RegionManager;
|
||||
import com.pingcap.tikv.region.RegionStoreClient;
|
||||
import com.pingcap.tikv.region.TiRegion;
|
||||
import com.pingcap.tikv.util.BackOffer;
|
||||
import com.pingcap.tikv.util.ConcreteBackOffer;
|
||||
import com.pingcap.tikv.util.Pair;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
|
||||
public class RawKVClient {
|
||||
private static final String DEFAULT_PD_ADDRESS = "127.0.0.1:2379";
|
||||
private final TiSession session;
|
||||
private final RegionManager regionManager;
|
||||
|
||||
private RawKVClient(String addresses) {
|
||||
session = TiSession.create(TiConfiguration.createDefault(addresses));
|
||||
regionManager = session.getRegionManager();
|
||||
}
|
||||
|
||||
private RawKVClient() {
|
||||
this(DEFAULT_PD_ADDRESS);
|
||||
}
|
||||
|
||||
public static RawKVClient create() {
|
||||
return new RawKVClient();
|
||||
}
|
||||
|
||||
public static RawKVClient create(String address) {
|
||||
return new RawKVClient(address);
|
||||
}
|
||||
|
||||
/**
|
||||
* Put a raw key-value pair to TiKV
|
||||
*
|
||||
* @param key raw key
|
||||
* @param value raw value
|
||||
*/
|
||||
public void put(ByteString key, ByteString value) {
|
||||
Pair<TiRegion, Metapb.Store> pair = regionManager.getRegionStorePairByRawKey(key);
|
||||
RegionStoreClient client = RegionStoreClient.create(pair.first, pair.second, session);
|
||||
client.rawPut(defaultBackOff(), key, value);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a raw key-value pair from TiKV if key exists
|
||||
*
|
||||
* @param key raw key
|
||||
* @return a ByteString value if key exists, ByteString.EMPTY if key does not exist
|
||||
*/
|
||||
public ByteString get(ByteString key) {
|
||||
Pair<TiRegion, Metapb.Store> pair = regionManager.getRegionStorePairByRawKey(key);
|
||||
RegionStoreClient client = RegionStoreClient.create(pair.first, pair.second, session);
|
||||
return client.rawGet(defaultBackOff(), key);
|
||||
}
|
||||
|
||||
/**
|
||||
* Scan raw key-value pairs from TiKV in range [startKey, endKey)
|
||||
*
|
||||
* @param startKey raw start key, inclusive
|
||||
* @param endKey raw end key, exclusive
|
||||
* @return list of key-value pairs in range
|
||||
*/
|
||||
public List<Kvrpcpb.KvPair> scan(ByteString startKey, ByteString endKey) {
|
||||
Iterator<Kvrpcpb.KvPair> iterator = rawScanIterator(startKey, endKey);
|
||||
List<Kvrpcpb.KvPair> result = new ArrayList<>();
|
||||
iterator.forEachRemaining(result::add);
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Scan raw key-value pairs from TiKV in range [startKey, endKey)
|
||||
*
|
||||
* @param startKey raw start key, inclusive
|
||||
* @param limit limit of key-value pairs
|
||||
* @return list of key-value pairs in range
|
||||
*/
|
||||
public List<Kvrpcpb.KvPair> scan(ByteString startKey, int limit) {
|
||||
Iterator<Kvrpcpb.KvPair> iterator = rawScanIterator(startKey, limit);
|
||||
List<Kvrpcpb.KvPair> result = new ArrayList<>();
|
||||
iterator.forEachRemaining(result::add);
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete a raw key-value pair from TiKV if key exists
|
||||
*
|
||||
* @param key raw key to be deleted
|
||||
*/
|
||||
public void delete(ByteString key) {
|
||||
TiRegion region = regionManager.getRegionByRawKey(key);
|
||||
Kvrpcpb.Context context =
|
||||
Kvrpcpb.Context.newBuilder()
|
||||
.setRegionId(region.getId())
|
||||
.setRegionEpoch(region.getRegionEpoch())
|
||||
.setPeer(region.getLeader())
|
||||
.build();
|
||||
Pair<TiRegion, Metapb.Store> pair = regionManager.getRegionStorePairByRawKey(key);
|
||||
RegionStoreClient client = RegionStoreClient.create(pair.first, pair.second, session);
|
||||
client.rawDelete(defaultBackOff(), key, context);
|
||||
}
|
||||
|
||||
private Iterator<Kvrpcpb.KvPair> rawScanIterator(ByteString startKey, ByteString endKey) {
|
||||
return new RawScanIterator(startKey, endKey, Integer.MAX_VALUE, session);
|
||||
}
|
||||
|
||||
private Iterator<Kvrpcpb.KvPair> rawScanIterator(ByteString startKey, int limit) {
|
||||
return new RawScanIterator(startKey, ByteString.EMPTY, limit, session);
|
||||
}
|
||||
|
||||
private BackOffer defaultBackOff() {
|
||||
return ConcreteBackOffer.newCustomBackOff(1000);
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,69 @@
|
|||
/*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package com.pingcap.tikv;
|
||||
|
||||
import com.google.protobuf.ByteString;
|
||||
import com.pingcap.tikv.kvproto.Metapb.Store;
|
||||
import com.pingcap.tikv.meta.TiTimestamp;
|
||||
import com.pingcap.tikv.region.TiRegion;
|
||||
import com.pingcap.tikv.util.BackOffer;
|
||||
import java.util.concurrent.Future;
|
||||
|
||||
/** Readonly PD client including only reading related interface Supposed for TiDB-like use cases */
|
||||
public interface ReadOnlyPDClient {
|
||||
/**
|
||||
* Get Timestamp from Placement Driver
|
||||
*
|
||||
* @return a timestamp object
|
||||
*/
|
||||
TiTimestamp getTimestamp(BackOffer backOffer);
|
||||
|
||||
/**
|
||||
* Get Region from PD by key specified
|
||||
*
|
||||
* @param key key in bytes for locating a region
|
||||
* @return the region whose startKey and endKey range covers the given key
|
||||
*/
|
||||
TiRegion getRegionByKey(BackOffer backOffer, ByteString key);
|
||||
|
||||
Future<TiRegion> getRegionByKeyAsync(BackOffer backOffer, ByteString key);
|
||||
|
||||
/**
|
||||
* Get Region by Region Id
|
||||
*
|
||||
* @param id Region Id
|
||||
* @return the region corresponding to the given Id
|
||||
*/
|
||||
TiRegion getRegionByID(BackOffer backOffer, long id);
|
||||
|
||||
Future<TiRegion> getRegionByIDAsync(BackOffer backOffer, long id);
|
||||
|
||||
/**
|
||||
* Get Store by StoreId
|
||||
*
|
||||
* @param storeId StoreId
|
||||
* @return the Store corresponding to the given Id
|
||||
*/
|
||||
Store getStore(BackOffer backOffer, long storeId);
|
||||
|
||||
Future<Store> getStoreAsync(BackOffer backOffer, long storeId);
|
||||
|
||||
/** Close underlining resources */
|
||||
void close() throws InterruptedException;
|
||||
|
||||
/** Get associated session * @return the session associated to client */
|
||||
TiSession getSession();
|
||||
}
|
||||
|
|
@ -0,0 +1,168 @@
|
|||
/*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package com.pingcap.tikv;
|
||||
|
||||
import static com.pingcap.tikv.operation.iterator.CoprocessIterator.getHandleIterator;
|
||||
import static com.pingcap.tikv.operation.iterator.CoprocessIterator.getRowIterator;
|
||||
import static com.pingcap.tikv.util.KeyRangeUtils.makeRange;
|
||||
|
||||
import com.google.common.collect.Range;
|
||||
import com.google.protobuf.ByteString;
|
||||
import com.pingcap.tikv.exception.TiClientInternalException;
|
||||
import com.pingcap.tikv.key.Key;
|
||||
import com.pingcap.tikv.kvproto.Kvrpcpb.KvPair;
|
||||
import com.pingcap.tikv.kvproto.Metapb.Store;
|
||||
import com.pingcap.tikv.meta.TiDAGRequest;
|
||||
import com.pingcap.tikv.meta.TiTimestamp;
|
||||
import com.pingcap.tikv.operation.iterator.ConcreteScanIterator;
|
||||
import com.pingcap.tikv.operation.iterator.IndexScanIterator;
|
||||
import com.pingcap.tikv.region.RegionStoreClient;
|
||||
import com.pingcap.tikv.region.TiRegion;
|
||||
import com.pingcap.tikv.row.Row;
|
||||
import com.pingcap.tikv.util.BackOffer;
|
||||
import com.pingcap.tikv.util.ConcreteBackOffer;
|
||||
import com.pingcap.tikv.util.Pair;
|
||||
import com.pingcap.tikv.util.RangeSplitter;
|
||||
import com.pingcap.tikv.util.RangeSplitter.RegionTask;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
|
||||
public class Snapshot {
|
||||
private final TiTimestamp timestamp;
|
||||
private final TiSession session;
|
||||
private final TiConfiguration conf;
|
||||
|
||||
public Snapshot(TiTimestamp timestamp, TiSession session) {
|
||||
this.timestamp = timestamp;
|
||||
this.session = session;
|
||||
this.conf = session.getConf();
|
||||
}
|
||||
|
||||
public TiSession getSession() {
|
||||
return session;
|
||||
}
|
||||
|
||||
public long getVersion() {
|
||||
return timestamp.getVersion();
|
||||
}
|
||||
|
||||
public TiTimestamp getTimestamp() {
|
||||
return timestamp;
|
||||
}
|
||||
|
||||
public byte[] get(byte[] key) {
|
||||
ByteString keyString = ByteString.copyFrom(key);
|
||||
ByteString value = get(keyString);
|
||||
return value.toByteArray();
|
||||
}
|
||||
|
||||
public ByteString get(ByteString key) {
|
||||
Pair<TiRegion, Store> pair = session.getRegionManager().getRegionStorePairByKey(key);
|
||||
RegionStoreClient client = RegionStoreClient.create(pair.first, pair.second, getSession());
|
||||
// TODO: Need to deal with lock error after grpc stable
|
||||
return client.get(ConcreteBackOffer.newGetBackOff(), key, timestamp.getVersion());
|
||||
}
|
||||
|
||||
/**
|
||||
* Issue a table read request
|
||||
*
|
||||
* @param dagRequest DAG request for coprocessor
|
||||
* @return a Iterator that contains all result from this select request.
|
||||
*/
|
||||
public Iterator<Row> tableRead(TiDAGRequest dagRequest) {
|
||||
if (dagRequest.isIndexScan()) {
|
||||
Iterator<Long> iter =
|
||||
getHandleIterator(
|
||||
dagRequest,
|
||||
RangeSplitter.newSplitter(session.getRegionManager())
|
||||
.splitRangeByRegion(dagRequest.getRanges()),
|
||||
session);
|
||||
return new IndexScanIterator(this, dagRequest, iter);
|
||||
} else {
|
||||
return getRowIterator(
|
||||
dagRequest,
|
||||
RangeSplitter.newSplitter(session.getRegionManager())
|
||||
.splitRangeByRegion(dagRequest.getRanges()),
|
||||
session);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Below is lower level API for env like Spark which already did key range split Perform table
|
||||
* scan
|
||||
*
|
||||
* @param dagRequest DAGRequest for coprocessor
|
||||
* @param task RegionTask of the coprocessor request to send
|
||||
* @return Row iterator to iterate over resulting rows
|
||||
*/
|
||||
public Iterator<Row> tableRead(TiDAGRequest dagRequest, List<RegionTask> task) {
|
||||
if (dagRequest.isDoubleRead()) {
|
||||
Iterator<Long> iter = getHandleIterator(dagRequest, task, session);
|
||||
return new IndexScanIterator(this, dagRequest, iter);
|
||||
} else {
|
||||
return getRowIterator(dagRequest, task, session);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Below is lower level API for env like Spark which already did key range split Perform handle
|
||||
* scan
|
||||
*
|
||||
* @param dagRequest DAGRequest for coprocessor
|
||||
* @param tasks RegionTask of the coprocessor request to send
|
||||
* @return Row iterator to iterate over resulting rows
|
||||
*/
|
||||
public Iterator<Long> indexHandleRead(TiDAGRequest dagRequest, List<RegionTask> tasks) {
|
||||
return getHandleIterator(dagRequest, tasks, session);
|
||||
}
|
||||
|
||||
public Iterator<KvPair> scan(ByteString startKey) {
|
||||
return new ConcreteScanIterator(startKey, session, timestamp.getVersion());
|
||||
}
|
||||
|
||||
// TODO: Need faster implementation, say concurrent version
|
||||
// Assume keys sorted
|
||||
public List<KvPair> batchGet(List<ByteString> keys) {
|
||||
TiRegion curRegion = null;
|
||||
Range<Key> curKeyRange = null;
|
||||
Pair<TiRegion, Store> lastPair;
|
||||
List<ByteString> keyBuffer = new ArrayList<>();
|
||||
List<KvPair> result = new ArrayList<>(keys.size());
|
||||
BackOffer backOffer = ConcreteBackOffer.newBatchGetMaxBackOff();
|
||||
for (ByteString key : keys) {
|
||||
if (curRegion == null || !curKeyRange.contains(Key.toRawKey(key))) {
|
||||
Pair<TiRegion, Store> pair = session.getRegionManager().getRegionStorePairByKey(key);
|
||||
lastPair = pair;
|
||||
curRegion = pair.first;
|
||||
curKeyRange = makeRange(curRegion.getStartKey(), curRegion.getEndKey());
|
||||
|
||||
try (RegionStoreClient client =
|
||||
RegionStoreClient.create(lastPair.first, lastPair.second, getSession())) {
|
||||
List<KvPair> partialResult =
|
||||
client.batchGet(backOffer, keyBuffer, timestamp.getVersion());
|
||||
// TODO: Add lock check
|
||||
result.addAll(partialResult);
|
||||
} catch (Exception e) {
|
||||
throw new TiClientInternalException("Error Closing Store client.", e);
|
||||
}
|
||||
keyBuffer = new ArrayList<>();
|
||||
keyBuffer.add(key);
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,219 @@
|
|||
/*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package com.pingcap.tikv;
|
||||
|
||||
import com.google.common.collect.ImmutableList;
|
||||
import com.google.common.net.HostAndPort;
|
||||
import com.pingcap.tikv.kvproto.Kvrpcpb.CommandPri;
|
||||
import com.pingcap.tikv.kvproto.Kvrpcpb.IsolationLevel;
|
||||
import java.io.Serializable;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
public class TiConfiguration implements Serializable {
|
||||
private static final int DEF_TIMEOUT = 10;
|
||||
private static final TimeUnit DEF_TIMEOUT_UNIT = TimeUnit.MINUTES;
|
||||
private static final int DEF_SCAN_BATCH_SIZE = 100;
|
||||
private static final boolean DEF_IGNORE_TRUNCATE = true;
|
||||
private static final boolean DEF_TRUNCATE_AS_WARNING = false;
|
||||
private static final int DEF_META_RELOAD_PERIOD = 10;
|
||||
private static final TimeUnit DEF_META_RELOAD_UNIT = TimeUnit.SECONDS;
|
||||
private static final int DEF_MAX_FRAME_SIZE = 268435456 * 2; // 256 * 2 MB
|
||||
private static final int DEF_INDEX_SCAN_BATCH_SIZE = 20000;
|
||||
// if keyRange size per request exceeds this limit, the request might be too large to be accepted
|
||||
// by TiKV(maximum request size accepted by TiKV is around 1MB)
|
||||
private static final int MAX_REQUEST_KEY_RANGE_SIZE = 20000;
|
||||
private static final int DEF_INDEX_SCAN_CONCURRENCY = 5;
|
||||
private static final int DEF_TABLE_SCAN_CONCURRENCY = 512;
|
||||
private static final CommandPri DEF_COMMAND_PRIORITY = CommandPri.Low;
|
||||
private static final IsolationLevel DEF_ISOLATION_LEVEL = IsolationLevel.RC;
|
||||
private static final boolean DEF_SHOW_ROWID = false;
|
||||
private static final String DEF_DB_PREFIX = "";
|
||||
|
||||
private int timeout = DEF_TIMEOUT;
|
||||
private TimeUnit timeoutUnit = DEF_TIMEOUT_UNIT;
|
||||
private boolean ignoreTruncate = DEF_IGNORE_TRUNCATE;
|
||||
private boolean truncateAsWarning = DEF_TRUNCATE_AS_WARNING;
|
||||
private TimeUnit metaReloadUnit = DEF_META_RELOAD_UNIT;
|
||||
private int metaReloadPeriod = DEF_META_RELOAD_PERIOD;
|
||||
private int maxFrameSize = DEF_MAX_FRAME_SIZE;
|
||||
private List<HostAndPort> pdAddrs = new ArrayList<>();
|
||||
private int indexScanBatchSize = DEF_INDEX_SCAN_BATCH_SIZE;
|
||||
private int indexScanConcurrency = DEF_INDEX_SCAN_CONCURRENCY;
|
||||
private int tableScanConcurrency = DEF_TABLE_SCAN_CONCURRENCY;
|
||||
private CommandPri commandPriority = DEF_COMMAND_PRIORITY;
|
||||
private IsolationLevel isolationLevel = DEF_ISOLATION_LEVEL;
|
||||
private int maxRequestKeyRangeSize = MAX_REQUEST_KEY_RANGE_SIZE;
|
||||
private boolean showRowId = DEF_SHOW_ROWID;
|
||||
private String dbPrefix = DEF_DB_PREFIX;
|
||||
|
||||
public static TiConfiguration createDefault(String pdAddrsStr) {
|
||||
Objects.requireNonNull(pdAddrsStr, "pdAddrsStr is null");
|
||||
TiConfiguration conf = new TiConfiguration();
|
||||
conf.pdAddrs = strToHostAndPort(pdAddrsStr);
|
||||
return conf;
|
||||
}
|
||||
|
||||
private static List<HostAndPort> strToHostAndPort(String addressStr) {
|
||||
Objects.requireNonNull(addressStr);
|
||||
String[] addrs = addressStr.split(",");
|
||||
ImmutableList.Builder<HostAndPort> addrsBuilder = ImmutableList.builder();
|
||||
for (String addr : addrs) {
|
||||
addrsBuilder.add(HostAndPort.fromString(addr));
|
||||
}
|
||||
return addrsBuilder.build();
|
||||
}
|
||||
|
||||
public int getTimeout() {
|
||||
return timeout;
|
||||
}
|
||||
|
||||
public TiConfiguration setTimeout(int timeout) {
|
||||
this.timeout = timeout;
|
||||
return this;
|
||||
}
|
||||
|
||||
public TimeUnit getTimeoutUnit() {
|
||||
return timeoutUnit;
|
||||
}
|
||||
|
||||
public TimeUnit getMetaReloadPeriodUnit() {
|
||||
return metaReloadUnit;
|
||||
}
|
||||
|
||||
public TiConfiguration setMetaReloadPeriodUnit(TimeUnit timeUnit) {
|
||||
this.metaReloadUnit = timeUnit;
|
||||
return this;
|
||||
}
|
||||
|
||||
public TiConfiguration setMetaReloadPeriod(int metaReloadPeriod) {
|
||||
this.metaReloadPeriod = metaReloadPeriod;
|
||||
return this;
|
||||
}
|
||||
|
||||
public int getMetaReloadPeriod() {
|
||||
return metaReloadPeriod;
|
||||
}
|
||||
|
||||
public TiConfiguration setTimeoutUnit(TimeUnit timeoutUnit) {
|
||||
this.timeoutUnit = timeoutUnit;
|
||||
return this;
|
||||
}
|
||||
|
||||
public List<HostAndPort> getPdAddrs() {
|
||||
return pdAddrs;
|
||||
}
|
||||
|
||||
public int getScanBatchSize() {
|
||||
return DEF_SCAN_BATCH_SIZE;
|
||||
}
|
||||
|
||||
boolean isIgnoreTruncate() {
|
||||
return ignoreTruncate;
|
||||
}
|
||||
|
||||
public TiConfiguration setIgnoreTruncate(boolean ignoreTruncate) {
|
||||
this.ignoreTruncate = ignoreTruncate;
|
||||
return this;
|
||||
}
|
||||
|
||||
boolean isTruncateAsWarning() {
|
||||
return truncateAsWarning;
|
||||
}
|
||||
|
||||
public TiConfiguration setTruncateAsWarning(boolean truncateAsWarning) {
|
||||
this.truncateAsWarning = truncateAsWarning;
|
||||
return this;
|
||||
}
|
||||
|
||||
public int getMaxFrameSize() {
|
||||
return maxFrameSize;
|
||||
}
|
||||
|
||||
public TiConfiguration setMaxFrameSize(int maxFrameSize) {
|
||||
this.maxFrameSize = maxFrameSize;
|
||||
return this;
|
||||
}
|
||||
|
||||
public int getIndexScanBatchSize() {
|
||||
return indexScanBatchSize;
|
||||
}
|
||||
|
||||
public void setIndexScanBatchSize(int indexScanBatchSize) {
|
||||
this.indexScanBatchSize = indexScanBatchSize;
|
||||
}
|
||||
|
||||
public int getIndexScanConcurrency() {
|
||||
return indexScanConcurrency;
|
||||
}
|
||||
|
||||
public void setIndexScanConcurrency(int indexScanConcurrency) {
|
||||
this.indexScanConcurrency = indexScanConcurrency;
|
||||
}
|
||||
|
||||
public int getTableScanConcurrency() {
|
||||
return tableScanConcurrency;
|
||||
}
|
||||
|
||||
public void setTableScanConcurrency(int tableScanConcurrency) {
|
||||
this.tableScanConcurrency = tableScanConcurrency;
|
||||
}
|
||||
|
||||
public CommandPri getCommandPriority() {
|
||||
return commandPriority;
|
||||
}
|
||||
|
||||
public void setCommandPriority(CommandPri commandPriority) {
|
||||
this.commandPriority = commandPriority;
|
||||
}
|
||||
|
||||
public IsolationLevel getIsolationLevel() {
|
||||
return isolationLevel;
|
||||
}
|
||||
|
||||
public void setIsolationLevel(IsolationLevel isolationLevel) {
|
||||
this.isolationLevel = isolationLevel;
|
||||
}
|
||||
|
||||
public int getMaxRequestKeyRangeSize() {
|
||||
return maxRequestKeyRangeSize;
|
||||
}
|
||||
|
||||
public void setMaxRequestKeyRangeSize(int maxRequestKeyRangeSize) {
|
||||
if (maxRequestKeyRangeSize <= 0) {
|
||||
throw new IllegalArgumentException("Key range size cannot be less than 1");
|
||||
}
|
||||
this.maxRequestKeyRangeSize = maxRequestKeyRangeSize;
|
||||
}
|
||||
|
||||
public void setShowRowId(boolean flag) {
|
||||
this.showRowId = flag;
|
||||
}
|
||||
|
||||
public boolean ifShowRowId() {
|
||||
return showRowId;
|
||||
}
|
||||
|
||||
public String getDBPrefix() {
|
||||
return dbPrefix;
|
||||
}
|
||||
|
||||
public void setDBPrefix(String dbPrefix) {
|
||||
this.dbPrefix = dbPrefix;
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,187 @@
|
|||
/*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package com.pingcap.tikv;
|
||||
|
||||
import com.google.common.net.HostAndPort;
|
||||
import com.google.common.util.concurrent.ThreadFactoryBuilder;
|
||||
import com.pingcap.tikv.catalog.Catalog;
|
||||
import com.pingcap.tikv.event.CacheInvalidateEvent;
|
||||
import com.pingcap.tikv.meta.TiTimestamp;
|
||||
import com.pingcap.tikv.region.RegionManager;
|
||||
import com.pingcap.tikv.util.ConcreteBackOffer;
|
||||
import io.grpc.ManagedChannel;
|
||||
import io.grpc.ManagedChannelBuilder;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.ExecutorService;
|
||||
import java.util.concurrent.Executors;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.function.Function;
|
||||
|
||||
public class TiSession implements AutoCloseable {
|
||||
private static final Map<String, ManagedChannel> connPool = new HashMap<>();
|
||||
private final TiConfiguration conf;
|
||||
private Function<CacheInvalidateEvent, Void> cacheInvalidateCallback;
|
||||
// below object creation is either heavy or making connection (pd), pending for lazy loading
|
||||
private volatile RegionManager regionManager;
|
||||
private volatile PDClient client;
|
||||
private volatile Catalog catalog;
|
||||
private volatile ExecutorService indexScanThreadPool;
|
||||
private volatile ExecutorService tableScanThreadPool;
|
||||
|
||||
public TiSession(TiConfiguration conf) {
|
||||
this.conf = conf;
|
||||
}
|
||||
|
||||
public TiConfiguration getConf() {
|
||||
return conf;
|
||||
}
|
||||
|
||||
public TiTimestamp getTimestamp() {
|
||||
return getPDClient().getTimestamp(ConcreteBackOffer.newTsoBackOff());
|
||||
}
|
||||
|
||||
public Snapshot createSnapshot() {
|
||||
return new Snapshot(getTimestamp(), this);
|
||||
}
|
||||
|
||||
public Snapshot createSnapshot(TiTimestamp ts) {
|
||||
return new Snapshot(ts, this);
|
||||
}
|
||||
|
||||
public PDClient getPDClient() {
|
||||
PDClient res = client;
|
||||
if (res == null) {
|
||||
synchronized (this) {
|
||||
if (client == null) {
|
||||
client = PDClient.createRaw(this);
|
||||
}
|
||||
res = client;
|
||||
}
|
||||
}
|
||||
return res;
|
||||
}
|
||||
|
||||
public Catalog getCatalog() {
|
||||
Catalog res = catalog;
|
||||
if (res == null) {
|
||||
synchronized (this) {
|
||||
if (catalog == null) {
|
||||
catalog =
|
||||
new Catalog(
|
||||
this::createSnapshot,
|
||||
conf.getMetaReloadPeriod(),
|
||||
conf.getMetaReloadPeriodUnit(),
|
||||
conf.ifShowRowId(),
|
||||
conf.getDBPrefix());
|
||||
}
|
||||
res = catalog;
|
||||
}
|
||||
}
|
||||
return res;
|
||||
}
|
||||
|
||||
public synchronized RegionManager getRegionManager() {
|
||||
RegionManager res = regionManager;
|
||||
if (res == null) {
|
||||
synchronized (this) {
|
||||
if (regionManager == null) {
|
||||
regionManager = new RegionManager(getPDClient());
|
||||
}
|
||||
res = regionManager;
|
||||
}
|
||||
}
|
||||
return res;
|
||||
}
|
||||
|
||||
public synchronized ManagedChannel getChannel(String addressStr) {
|
||||
ManagedChannel channel = connPool.get(addressStr);
|
||||
if (channel == null) {
|
||||
HostAndPort address;
|
||||
try {
|
||||
address = HostAndPort.fromString(addressStr);
|
||||
} catch (Exception e) {
|
||||
throw new IllegalArgumentException("failed to form address");
|
||||
}
|
||||
|
||||
// Channel should be lazy without actual connection until first call
|
||||
// So a coarse grain lock is ok here
|
||||
channel =
|
||||
ManagedChannelBuilder.forAddress(address.getHostText(), address.getPort())
|
||||
.maxInboundMessageSize(conf.getMaxFrameSize())
|
||||
.usePlaintext(true)
|
||||
.idleTimeout(60, TimeUnit.SECONDS)
|
||||
.build();
|
||||
connPool.put(addressStr, channel);
|
||||
}
|
||||
return channel;
|
||||
}
|
||||
|
||||
public ExecutorService getThreadPoolForIndexScan() {
|
||||
ExecutorService res = indexScanThreadPool;
|
||||
if (res == null) {
|
||||
synchronized (this) {
|
||||
if (indexScanThreadPool == null) {
|
||||
indexScanThreadPool =
|
||||
Executors.newFixedThreadPool(
|
||||
conf.getIndexScanConcurrency(),
|
||||
new ThreadFactoryBuilder().setDaemon(true).build());
|
||||
}
|
||||
res = indexScanThreadPool;
|
||||
}
|
||||
}
|
||||
return res;
|
||||
}
|
||||
|
||||
public ExecutorService getThreadPoolForTableScan() {
|
||||
ExecutorService res = tableScanThreadPool;
|
||||
if (res == null) {
|
||||
synchronized (this) {
|
||||
if (tableScanThreadPool == null) {
|
||||
tableScanThreadPool =
|
||||
Executors.newFixedThreadPool(
|
||||
conf.getTableScanConcurrency(),
|
||||
new ThreadFactoryBuilder().setDaemon(true).build());
|
||||
}
|
||||
res = tableScanThreadPool;
|
||||
}
|
||||
}
|
||||
return res;
|
||||
}
|
||||
|
||||
public static TiSession create(TiConfiguration conf) {
|
||||
return new TiSession(conf);
|
||||
}
|
||||
|
||||
public Function<CacheInvalidateEvent, Void> getCacheInvalidateCallback() {
|
||||
return cacheInvalidateCallback;
|
||||
}
|
||||
|
||||
/**
|
||||
* This is used for setting call back function to invalidate cache information
|
||||
*
|
||||
* @param callBackFunc callback function
|
||||
*/
|
||||
public void injectCallBackFunc(Function<CacheInvalidateEvent, Void> callBackFunc) {
|
||||
this.cacheInvalidateCallback = callBackFunc;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() throws Exception {
|
||||
getThreadPoolForTableScan().shutdownNow();
|
||||
getThreadPoolForIndexScan().shutdownNow();
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,226 @@
|
|||
/*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package com.pingcap.tikv.catalog;
|
||||
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
import com.google.common.collect.ImmutableList;
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import com.google.common.util.concurrent.ThreadFactoryBuilder;
|
||||
import com.pingcap.tikv.Snapshot;
|
||||
import com.pingcap.tikv.meta.TiDBInfo;
|
||||
import com.pingcap.tikv.meta.TiTableInfo;
|
||||
import java.util.Collection;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
import java.util.concurrent.ConcurrentHashMap;
|
||||
import java.util.concurrent.Executors;
|
||||
import java.util.concurrent.ScheduledExecutorService;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.function.Supplier;
|
||||
import java.util.stream.Collectors;
|
||||
import org.apache.log4j.Logger;
|
||||
|
||||
public class Catalog implements AutoCloseable {
|
||||
private Supplier<Snapshot> snapshotProvider;
|
||||
private ScheduledExecutorService service;
|
||||
private CatalogCache metaCache;
|
||||
private final boolean showRowId;
|
||||
private final String dbPrefix;
|
||||
private final Logger logger = Logger.getLogger(this.getClass());
|
||||
|
||||
@Override
|
||||
public void close() throws Exception {
|
||||
if (service != null) {
|
||||
service.shutdown();
|
||||
}
|
||||
}
|
||||
|
||||
private static class CatalogCache {
|
||||
|
||||
private CatalogCache(CatalogTransaction transaction, String dbPrefix, boolean loadTables) {
|
||||
this.transaction = transaction;
|
||||
this.dbPrefix = dbPrefix;
|
||||
this.dbCache = loadDatabases(loadTables);
|
||||
this.tableCache = new ConcurrentHashMap<>();
|
||||
this.currentVersion = transaction.getLatestSchemaVersion();
|
||||
}
|
||||
|
||||
private final Map<String, TiDBInfo> dbCache;
|
||||
private final ConcurrentHashMap<TiDBInfo, Map<String, TiTableInfo>> tableCache;
|
||||
private CatalogTransaction transaction;
|
||||
private long currentVersion;
|
||||
private final String dbPrefix;
|
||||
|
||||
public CatalogTransaction getTransaction() {
|
||||
return transaction;
|
||||
}
|
||||
|
||||
public long getVersion() {
|
||||
return currentVersion;
|
||||
}
|
||||
|
||||
public TiDBInfo getDatabase(String name) {
|
||||
Objects.requireNonNull(name, "name is null");
|
||||
return dbCache.get(name.toLowerCase());
|
||||
}
|
||||
|
||||
public List<TiDBInfo> listDatabases() {
|
||||
return ImmutableList.copyOf(dbCache.values());
|
||||
}
|
||||
|
||||
public List<TiTableInfo> listTables(TiDBInfo db) {
|
||||
Map<String, TiTableInfo> tableMap = tableCache.get(db);
|
||||
if (tableMap == null) {
|
||||
tableMap = loadTables(db);
|
||||
}
|
||||
return ImmutableList.copyOf(tableMap.values());
|
||||
}
|
||||
|
||||
public TiTableInfo getTable(TiDBInfo db, String tableName) {
|
||||
Map<String, TiTableInfo> tableMap = tableCache.get(db);
|
||||
if (tableMap == null) {
|
||||
tableMap = loadTables(db);
|
||||
}
|
||||
return tableMap.get(tableName.toLowerCase());
|
||||
}
|
||||
|
||||
private Map<String, TiTableInfo> loadTables(TiDBInfo db) {
|
||||
List<TiTableInfo> tables = transaction.getTables(db.getId());
|
||||
ImmutableMap.Builder<String, TiTableInfo> builder = ImmutableMap.builder();
|
||||
for (TiTableInfo table : tables) {
|
||||
builder.put(table.getName().toLowerCase(), table);
|
||||
}
|
||||
Map<String, TiTableInfo> tableMap = builder.build();
|
||||
tableCache.put(db, tableMap);
|
||||
return tableMap;
|
||||
}
|
||||
|
||||
private Map<String, TiDBInfo> loadDatabases(boolean loadTables) {
|
||||
HashMap<String, TiDBInfo> newDBCache = new HashMap<>();
|
||||
|
||||
List<TiDBInfo> databases = transaction.getDatabases();
|
||||
databases.forEach(
|
||||
db -> {
|
||||
TiDBInfo newDBInfo = db.rename(dbPrefix + db.getName());
|
||||
newDBCache.put(newDBInfo.getName().toLowerCase(), newDBInfo);
|
||||
if (loadTables) {
|
||||
loadTables(newDBInfo);
|
||||
}
|
||||
});
|
||||
return newDBCache;
|
||||
}
|
||||
}
|
||||
|
||||
public Catalog(
|
||||
Supplier<Snapshot> snapshotProvider,
|
||||
int refreshPeriod,
|
||||
TimeUnit periodUnit,
|
||||
boolean showRowId,
|
||||
String dbPrefix) {
|
||||
this.snapshotProvider = Objects.requireNonNull(snapshotProvider, "Snapshot Provider is null");
|
||||
this.showRowId = showRowId;
|
||||
this.dbPrefix = dbPrefix;
|
||||
metaCache = new CatalogCache(new CatalogTransaction(snapshotProvider.get()), dbPrefix, false);
|
||||
service =
|
||||
Executors.newSingleThreadScheduledExecutor(
|
||||
new ThreadFactoryBuilder().setDaemon(true).build());
|
||||
service.scheduleAtFixedRate(
|
||||
() -> {
|
||||
// Wrap this with a try catch block in case schedule update fails
|
||||
try {
|
||||
reloadCache();
|
||||
} catch (Exception e) {
|
||||
logger.warn("Reload Cache failed", e);
|
||||
}
|
||||
},
|
||||
refreshPeriod,
|
||||
refreshPeriod,
|
||||
periodUnit);
|
||||
}
|
||||
|
||||
public void reloadCache(boolean loadTables) {
|
||||
Snapshot snapshot = snapshotProvider.get();
|
||||
CatalogTransaction newTrx = new CatalogTransaction(snapshot);
|
||||
long latestVersion = newTrx.getLatestSchemaVersion();
|
||||
if (latestVersion > metaCache.getVersion()) {
|
||||
metaCache = new CatalogCache(newTrx, dbPrefix, loadTables);
|
||||
}
|
||||
}
|
||||
|
||||
@VisibleForTesting
|
||||
public void reloadCache() {
|
||||
reloadCache(false);
|
||||
}
|
||||
|
||||
public List<TiDBInfo> listDatabases() {
|
||||
return metaCache.listDatabases();
|
||||
}
|
||||
|
||||
public List<TiTableInfo> listTables(TiDBInfo database) {
|
||||
Objects.requireNonNull(database, "database is null");
|
||||
if (showRowId) {
|
||||
return metaCache
|
||||
.listTables(database)
|
||||
.stream()
|
||||
.map(TiTableInfo::copyTableWithRowId)
|
||||
.collect(Collectors.toList());
|
||||
} else {
|
||||
return metaCache.listTables(database);
|
||||
}
|
||||
}
|
||||
|
||||
public TiDBInfo getDatabase(String dbName) {
|
||||
Objects.requireNonNull(dbName, "dbName is null");
|
||||
return metaCache.getDatabase(dbName);
|
||||
}
|
||||
|
||||
public TiTableInfo getTable(String dbName, String tableName) {
|
||||
TiDBInfo database = getDatabase(dbName);
|
||||
if (database == null) {
|
||||
return null;
|
||||
}
|
||||
return getTable(database, tableName);
|
||||
}
|
||||
|
||||
public TiTableInfo getTable(TiDBInfo database, String tableName) {
|
||||
Objects.requireNonNull(database, "database is null");
|
||||
Objects.requireNonNull(tableName, "tableName is null");
|
||||
TiTableInfo table = metaCache.getTable(database, tableName);
|
||||
if (showRowId) {
|
||||
return table.copyTableWithRowId();
|
||||
} else {
|
||||
return table;
|
||||
}
|
||||
}
|
||||
|
||||
@VisibleForTesting
|
||||
public TiTableInfo getTable(TiDBInfo database, long tableId) {
|
||||
Objects.requireNonNull(database, "database is null");
|
||||
Collection<TiTableInfo> tables = listTables(database);
|
||||
for (TiTableInfo table : tables) {
|
||||
if (table.getId() == tableId) {
|
||||
if (showRowId) {
|
||||
return table.copyTableWithRowId();
|
||||
} else {
|
||||
return table;
|
||||
}
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,183 @@
|
|||
/*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package com.pingcap.tikv.catalog;
|
||||
|
||||
import static com.google.common.base.Preconditions.checkArgument;
|
||||
|
||||
import com.fasterxml.jackson.core.JsonParseException;
|
||||
import com.fasterxml.jackson.databind.JsonMappingException;
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import com.google.common.collect.ImmutableList;
|
||||
import com.google.protobuf.ByteString;
|
||||
import com.pingcap.tikv.Snapshot;
|
||||
import com.pingcap.tikv.codec.Codec.BytesCodec;
|
||||
import com.pingcap.tikv.codec.Codec.IntegerCodec;
|
||||
import com.pingcap.tikv.codec.CodecDataInput;
|
||||
import com.pingcap.tikv.codec.CodecDataOutput;
|
||||
import com.pingcap.tikv.codec.KeyUtils;
|
||||
import com.pingcap.tikv.exception.TiClientInternalException;
|
||||
import com.pingcap.tikv.kvproto.Kvrpcpb;
|
||||
import com.pingcap.tikv.meta.TiDBInfo;
|
||||
import com.pingcap.tikv.meta.TiTableInfo;
|
||||
import com.pingcap.tikv.util.Pair;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
import org.apache.log4j.Logger;
|
||||
|
||||
public class CatalogTransaction {
|
||||
protected static final Logger logger = Logger.getLogger(Catalog.class);
|
||||
private final Snapshot snapshot;
|
||||
private final byte[] prefix;
|
||||
|
||||
private static final byte[] META_PREFIX = new byte[] {'m'};
|
||||
|
||||
private static final byte HASH_DATA_FLAG = 'h';
|
||||
private static final byte STR_DATA_FLAG = 's';
|
||||
|
||||
private static ByteString KEY_DB = ByteString.copyFromUtf8("DBs");
|
||||
private static ByteString KEY_TABLE = ByteString.copyFromUtf8("Table");
|
||||
private static ByteString KEY_SCHEMA_VERSION = ByteString.copyFromUtf8("SchemaVersionKey");
|
||||
|
||||
private static final String ENCODED_DB_PREFIX = "DB";
|
||||
|
||||
public CatalogTransaction(Snapshot snapshot) {
|
||||
this.snapshot = snapshot;
|
||||
this.prefix = META_PREFIX;
|
||||
}
|
||||
|
||||
private void encodeStringDataKey(CodecDataOutput cdo, byte[] key) {
|
||||
cdo.write(prefix);
|
||||
BytesCodec.writeBytes(cdo, key);
|
||||
IntegerCodec.writeULong(cdo, STR_DATA_FLAG);
|
||||
}
|
||||
|
||||
private void encodeHashDataKey(CodecDataOutput cdo, byte[] key, byte[] field) {
|
||||
encodeHashDataKeyPrefix(cdo, key);
|
||||
BytesCodec.writeBytes(cdo, field);
|
||||
}
|
||||
|
||||
private void encodeHashDataKeyPrefix(CodecDataOutput cdo, byte[] key) {
|
||||
cdo.write(prefix);
|
||||
BytesCodec.writeBytes(cdo, key);
|
||||
IntegerCodec.writeULong(cdo, HASH_DATA_FLAG);
|
||||
}
|
||||
|
||||
private Pair<ByteString, ByteString> decodeHashDataKey(ByteString rawKey) {
|
||||
checkArgument(
|
||||
KeyUtils.hasPrefix(rawKey, ByteString.copyFrom(prefix)),
|
||||
"invalid encoded hash data key prefix: " + new String(prefix));
|
||||
CodecDataInput cdi = new CodecDataInput(rawKey.toByteArray());
|
||||
cdi.skipBytes(prefix.length);
|
||||
byte[] key = BytesCodec.readBytes(cdi);
|
||||
long typeFlag = IntegerCodec.readULong(cdi);
|
||||
if (typeFlag != HASH_DATA_FLAG) {
|
||||
throw new TiClientInternalException("Invalid hash data flag: " + typeFlag);
|
||||
}
|
||||
byte[] field = BytesCodec.readBytes(cdi);
|
||||
return Pair.create(ByteString.copyFrom(key), ByteString.copyFrom(field));
|
||||
}
|
||||
|
||||
private ByteString hashGet(ByteString key, ByteString field) {
|
||||
CodecDataOutput cdo = new CodecDataOutput();
|
||||
encodeHashDataKey(cdo, key.toByteArray(), field.toByteArray());
|
||||
return snapshot.get(cdo.toByteString());
|
||||
}
|
||||
|
||||
private ByteString bytesGet(ByteString key) {
|
||||
CodecDataOutput cdo = new CodecDataOutput();
|
||||
encodeStringDataKey(cdo, key.toByteArray());
|
||||
return snapshot.get(cdo.toByteString());
|
||||
}
|
||||
|
||||
private List<Pair<ByteString, ByteString>> hashGetFields(ByteString key) {
|
||||
CodecDataOutput cdo = new CodecDataOutput();
|
||||
encodeHashDataKeyPrefix(cdo, key.toByteArray());
|
||||
ByteString encodedKey = cdo.toByteString();
|
||||
|
||||
Iterator<Kvrpcpb.KvPair> iterator = snapshot.scan(encodedKey);
|
||||
List<Pair<ByteString, ByteString>> fields = new ArrayList<>();
|
||||
while (iterator.hasNext()) {
|
||||
Kvrpcpb.KvPair kv = iterator.next();
|
||||
if (!KeyUtils.hasPrefix(kv.getKey(), encodedKey)) {
|
||||
break;
|
||||
}
|
||||
fields.add(Pair.create(decodeHashDataKey(kv.getKey()).second, kv.getValue()));
|
||||
}
|
||||
|
||||
return fields;
|
||||
}
|
||||
|
||||
private static ByteString encodeDatabaseID(long id) {
|
||||
return ByteString.copyFrom(String.format("%s:%d", ENCODED_DB_PREFIX, id).getBytes());
|
||||
}
|
||||
|
||||
public long getLatestSchemaVersion() {
|
||||
ByteString versionBytes = bytesGet(KEY_SCHEMA_VERSION);
|
||||
CodecDataInput cdi = new CodecDataInput(versionBytes.toByteArray());
|
||||
return Long.parseLong(new String(cdi.toByteArray(), StandardCharsets.UTF_8));
|
||||
}
|
||||
|
||||
public List<TiDBInfo> getDatabases() {
|
||||
List<Pair<ByteString, ByteString>> fields = hashGetFields(KEY_DB);
|
||||
ImmutableList.Builder<TiDBInfo> builder = ImmutableList.builder();
|
||||
for (Pair<ByteString, ByteString> pair : fields) {
|
||||
builder.add(parseFromJson(pair.second, TiDBInfo.class));
|
||||
}
|
||||
return builder.build();
|
||||
}
|
||||
|
||||
public TiDBInfo getDatabase(long id) {
|
||||
ByteString dbKey = encodeDatabaseID(id);
|
||||
ByteString json = hashGet(KEY_DB, dbKey);
|
||||
if (json == null || json.isEmpty()) {
|
||||
return null;
|
||||
}
|
||||
return parseFromJson(json, TiDBInfo.class);
|
||||
}
|
||||
|
||||
public List<TiTableInfo> getTables(long dbId) {
|
||||
ByteString dbKey = encodeDatabaseID(dbId);
|
||||
List<Pair<ByteString, ByteString>> fields = hashGetFields(dbKey);
|
||||
ImmutableList.Builder<TiTableInfo> builder = ImmutableList.builder();
|
||||
for (Pair<ByteString, ByteString> pair : fields) {
|
||||
if (KeyUtils.hasPrefix(pair.first, KEY_TABLE)) {
|
||||
builder.add(parseFromJson(pair.second, TiTableInfo.class));
|
||||
}
|
||||
}
|
||||
return builder.build();
|
||||
}
|
||||
|
||||
public static <T> T parseFromJson(ByteString json, Class<T> cls) {
|
||||
Objects.requireNonNull(json, "json is null");
|
||||
Objects.requireNonNull(cls, "cls is null");
|
||||
|
||||
logger.debug(String.format("Parse Json %s : %s", cls.getSimpleName(), json.toStringUtf8()));
|
||||
ObjectMapper mapper = new ObjectMapper();
|
||||
try {
|
||||
return mapper.readValue(json.toStringUtf8(), cls);
|
||||
} catch (JsonParseException | JsonMappingException e) {
|
||||
String errMsg =
|
||||
String.format(
|
||||
"Invalid JSON value for Type %s: %s\n", cls.getSimpleName(), json.toStringUtf8());
|
||||
throw new TiClientInternalException(errMsg, e);
|
||||
} catch (Exception e1) {
|
||||
throw new TiClientInternalException("Error parsing Json", e1);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,656 @@
|
|||
/*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package com.pingcap.tikv.codec;
|
||||
|
||||
import static com.google.common.base.Preconditions.checkArgument;
|
||||
|
||||
import com.pingcap.tikv.exception.InvalidCodecFormatException;
|
||||
import gnu.trove.list.array.TIntArrayList;
|
||||
import java.math.BigDecimal;
|
||||
import java.sql.Date;
|
||||
import java.util.Arrays;
|
||||
import org.joda.time.*;
|
||||
|
||||
public class Codec {
|
||||
|
||||
public static final int NULL_FLAG = 0;
|
||||
public static final int BYTES_FLAG = 1;
|
||||
public static final int COMPACT_BYTES_FLAG = 2;
|
||||
public static final int INT_FLAG = 3;
|
||||
public static final int UINT_FLAG = 4;
|
||||
public static final int FLOATING_FLAG = 5;
|
||||
public static final int DECIMAL_FLAG = 6;
|
||||
public static final int DURATION_FLAG = 7;
|
||||
public static final int VARINT_FLAG = 8;
|
||||
public static final int UVARINT_FLAG = 9;
|
||||
public static final int JSON_FLAG = 10;
|
||||
public static final int MAX_FLAG = 250;
|
||||
|
||||
public static boolean isNullFlag(int flag) {
|
||||
return flag == NULL_FLAG;
|
||||
}
|
||||
|
||||
public static class IntegerCodec {
|
||||
private static final long SIGN_MASK = ~Long.MAX_VALUE;
|
||||
|
||||
private static long flipSignBit(long v) {
|
||||
return v ^ SIGN_MASK;
|
||||
}
|
||||
|
||||
/**
|
||||
* Encoding a long value to byte buffer with type flag at the beginning
|
||||
*
|
||||
* @param cdo For outputting data in bytes array
|
||||
* @param lVal The data to encode
|
||||
* @param comparable If the output should be memory comparable without decoding. In real TiDB
|
||||
* use case, if used in Key encoding, we output memory comparable format otherwise not
|
||||
*/
|
||||
public static void writeLongFully(CodecDataOutput cdo, long lVal, boolean comparable) {
|
||||
if (comparable) {
|
||||
cdo.writeByte(INT_FLAG);
|
||||
writeLong(cdo, lVal);
|
||||
} else {
|
||||
cdo.writeByte(VARINT_FLAG);
|
||||
writeVarLong(cdo, lVal);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Encoding a unsigned long value to byte buffer with type flag at the beginning
|
||||
*
|
||||
* @param cdo For outputting data in bytes array
|
||||
* @param lVal The data to encode, note that long is treated as unsigned
|
||||
* @param comparable If the output should be memory comparable without decoding. In real TiDB
|
||||
* use case, if used in Key encoding, we output memory comparable format otherwise not
|
||||
*/
|
||||
public static void writeULongFully(CodecDataOutput cdo, long lVal, boolean comparable) {
|
||||
if (comparable) {
|
||||
cdo.writeByte(UINT_FLAG);
|
||||
writeULong(cdo, lVal);
|
||||
} else {
|
||||
cdo.writeByte(UVARINT_FLAG);
|
||||
writeUVarLong(cdo, lVal);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Encode long value without type flag at the beginning The signed bit is flipped for memory
|
||||
* comparable purpose
|
||||
*
|
||||
* @param cdo For outputting data in bytes array
|
||||
* @param lVal The data to encode
|
||||
*/
|
||||
public static void writeLong(CodecDataOutput cdo, long lVal) {
|
||||
cdo.writeLong(flipSignBit(lVal));
|
||||
}
|
||||
|
||||
/**
|
||||
* Encode long value without type flag at the beginning
|
||||
*
|
||||
* @param cdo For outputting data in bytes array
|
||||
* @param lVal The data to encode
|
||||
*/
|
||||
public static void writeULong(CodecDataOutput cdo, long lVal) {
|
||||
cdo.writeLong(lVal);
|
||||
}
|
||||
|
||||
/**
|
||||
* Encode var-length long, same as go's binary.PutVarint
|
||||
*
|
||||
* @param cdo For outputting data in bytes array
|
||||
* @param value The data to encode
|
||||
*/
|
||||
static void writeVarLong(CodecDataOutput cdo, long value) {
|
||||
long ux = value << 1;
|
||||
if (value < 0) {
|
||||
ux = ~ux;
|
||||
}
|
||||
writeUVarLong(cdo, ux);
|
||||
}
|
||||
|
||||
/**
|
||||
* Encode Data as var-length long, the same as go's binary.PutUvarint
|
||||
*
|
||||
* @param cdo For outputting data in bytes array
|
||||
* @param value The data to encode
|
||||
*/
|
||||
static void writeUVarLong(CodecDataOutput cdo, long value) {
|
||||
while ((value - 0x80) >= 0) {
|
||||
cdo.writeByte((byte) value | 0x80);
|
||||
value >>>= 7;
|
||||
}
|
||||
cdo.writeByte((byte) value);
|
||||
}
|
||||
|
||||
/**
|
||||
* Decode as signed long, assuming encoder flips signed bit for memory comparable
|
||||
*
|
||||
* @param cdi source of data
|
||||
* @return decoded signed long value
|
||||
*/
|
||||
public static long readLong(CodecDataInput cdi) {
|
||||
return flipSignBit(cdi.readLong());
|
||||
}
|
||||
|
||||
public static long readPartialLong(CodecDataInput cdi) {
|
||||
return flipSignBit(cdi.readPartialLong());
|
||||
}
|
||||
|
||||
/**
|
||||
* Decode as unsigned long without any binary manipulation
|
||||
*
|
||||
* @param cdi source of data
|
||||
* @return decoded unsigned long value
|
||||
*/
|
||||
public static long readULong(CodecDataInput cdi) {
|
||||
return cdi.readLong();
|
||||
}
|
||||
|
||||
/**
|
||||
* Decode as var-length long, the same as go's binary.Varint
|
||||
*
|
||||
* @param cdi source of data
|
||||
* @return decoded signed long value
|
||||
*/
|
||||
public static long readVarLong(CodecDataInput cdi) {
|
||||
long ux = readUVarLong(cdi);
|
||||
long x = ux >>> 1;
|
||||
if ((ux & 1) != 0) {
|
||||
x = ~x;
|
||||
}
|
||||
return x;
|
||||
}
|
||||
|
||||
/**
|
||||
* Decode as var-length unsigned long, the same as go's binary.Uvarint
|
||||
*
|
||||
* @param cdi source of data
|
||||
* @return decoded unsigned long value
|
||||
*/
|
||||
public static long readUVarLong(CodecDataInput cdi) {
|
||||
long x = 0;
|
||||
int s = 0;
|
||||
for (int i = 0; !cdi.eof(); i++) {
|
||||
long b = cdi.readUnsignedByte();
|
||||
if ((b - 0x80) < 0) {
|
||||
if (i > 9 || i == 9 && b > 1) {
|
||||
throw new InvalidCodecFormatException("readUVarLong overflow");
|
||||
}
|
||||
return x | b << s;
|
||||
}
|
||||
x |= (b & 0x7f) << s;
|
||||
s += 7;
|
||||
}
|
||||
throw new InvalidCodecFormatException("readUVarLong encountered unfinished data");
|
||||
}
|
||||
}
|
||||
|
||||
public static class BytesCodec {
|
||||
|
||||
private static final int GRP_SIZE = 8;
|
||||
private static final byte[] PADS = new byte[GRP_SIZE];
|
||||
private static final int MARKER = 0xFF;
|
||||
private static final byte PAD = (byte) 0x0;
|
||||
|
||||
public static void writeBytesRaw(CodecDataOutput cdo, byte[] data) {
|
||||
cdo.write(data);
|
||||
}
|
||||
|
||||
public static void writeBytesFully(CodecDataOutput cdo, byte[] data) {
|
||||
cdo.write(Codec.BYTES_FLAG);
|
||||
BytesCodec.writeBytes(cdo, data);
|
||||
}
|
||||
|
||||
// writeBytes guarantees the encoded value is in ascending order for comparison,
|
||||
// encoding with the following rule:
|
||||
// [group1][marker1]...[groupN][markerN]
|
||||
// group is 8 bytes slice which is padding with 0.
|
||||
// marker is `0xFF - padding 0 count`
|
||||
// For example:
|
||||
// [] -> [0, 0, 0, 0, 0, 0, 0, 0, 247]
|
||||
// [1, 2, 3] -> [1, 2, 3, 0, 0, 0, 0, 0, 250]
|
||||
// [1, 2, 3, 0] -> [1, 2, 3, 0, 0, 0, 0, 0, 251]
|
||||
// [1, 2, 3, 4, 5, 6, 7, 8] -> [1, 2, 3, 4, 5, 6, 7, 8, 255, 0, 0, 0, 0, 0, 0, 0, 0, 247]
|
||||
// Refer: https://github.com/facebook/mysql-5.6/wiki/MyRocks-record-format#memcomparable-format
|
||||
public static void writeBytes(CodecDataOutput cdo, byte[] data) {
|
||||
for (int i = 0; i <= data.length; i += GRP_SIZE) {
|
||||
int remain = data.length - i;
|
||||
int padCount = 0;
|
||||
if (remain >= GRP_SIZE) {
|
||||
cdo.write(data, i, GRP_SIZE);
|
||||
} else {
|
||||
padCount = GRP_SIZE - remain;
|
||||
cdo.write(data, i, data.length - i);
|
||||
cdo.write(PADS, 0, padCount);
|
||||
}
|
||||
cdo.write((byte) (MARKER - padCount));
|
||||
}
|
||||
}
|
||||
|
||||
public static void writeCompactBytesFully(CodecDataOutput cdo, byte[] data) {
|
||||
cdo.write(Codec.COMPACT_BYTES_FLAG);
|
||||
writeCompactBytes(cdo, data);
|
||||
}
|
||||
|
||||
/**
|
||||
* Write bytes in a compact form.
|
||||
*
|
||||
* @param cdo destination of data.
|
||||
* @param data is value that will be written into cdo.
|
||||
*/
|
||||
static void writeCompactBytes(CodecDataOutput cdo, byte[] data) {
|
||||
int length = data.length;
|
||||
IntegerCodec.writeVarLong(cdo, length);
|
||||
cdo.write(data);
|
||||
}
|
||||
|
||||
// readBytes decodes bytes which is encoded by EncodeBytes before,
|
||||
// returns the leftover bytes and decoded value if no error.
|
||||
public static byte[] readBytes(CodecDataInput cdi) {
|
||||
return readBytes(cdi, false);
|
||||
}
|
||||
|
||||
public static byte[] readCompactBytes(CodecDataInput cdi) {
|
||||
int size = (int) IntegerCodec.readVarLong(cdi);
|
||||
return readCompactBytes(cdi, size);
|
||||
}
|
||||
|
||||
private static byte[] readCompactBytes(CodecDataInput cdi, int size) {
|
||||
byte[] data = new byte[size];
|
||||
for (int i = 0; i < size; i++) {
|
||||
data[i] = cdi.readByte();
|
||||
}
|
||||
return data;
|
||||
}
|
||||
|
||||
private static byte[] readBytes(CodecDataInput cdi, boolean reverse) {
|
||||
CodecDataOutput cdo = new CodecDataOutput();
|
||||
while (true) {
|
||||
byte[] groupBytes = new byte[GRP_SIZE + 1];
|
||||
|
||||
cdi.readFully(groupBytes, 0, GRP_SIZE + 1);
|
||||
byte[] group = Arrays.copyOfRange(groupBytes, 0, GRP_SIZE);
|
||||
|
||||
int padCount;
|
||||
int marker = Byte.toUnsignedInt(groupBytes[GRP_SIZE]);
|
||||
|
||||
if (reverse) {
|
||||
padCount = marker;
|
||||
} else {
|
||||
padCount = MARKER - marker;
|
||||
}
|
||||
|
||||
checkArgument(padCount <= GRP_SIZE);
|
||||
int realGroupSize = GRP_SIZE - padCount;
|
||||
cdo.write(group, 0, realGroupSize);
|
||||
|
||||
if (padCount != 0) {
|
||||
byte padByte = PAD;
|
||||
if (reverse) {
|
||||
padByte = (byte) MARKER;
|
||||
}
|
||||
// Check validity of padding bytes.
|
||||
for (int i = realGroupSize; i < group.length; i++) {
|
||||
byte b = group[i];
|
||||
checkArgument(padByte == b);
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
byte[] bytes = cdo.toBytes();
|
||||
if (reverse) {
|
||||
for (int i = 0; i < bytes.length; i++) {
|
||||
bytes[i] = (byte) ~bytes[i];
|
||||
}
|
||||
}
|
||||
return bytes;
|
||||
}
|
||||
}
|
||||
|
||||
public static class RealCodec {
|
||||
|
||||
private static final long signMask = 0x8000000000000000L;
|
||||
|
||||
/**
|
||||
* Decode as float
|
||||
*
|
||||
* @param cdi source of data
|
||||
* @return decoded unsigned long value
|
||||
*/
|
||||
public static double readDouble(CodecDataInput cdi) {
|
||||
long u = IntegerCodec.readULong(cdi);
|
||||
if (u < 0) {
|
||||
u &= Long.MAX_VALUE;
|
||||
} else {
|
||||
u = ~u;
|
||||
}
|
||||
return Double.longBitsToDouble(u);
|
||||
}
|
||||
|
||||
private static long encodeDoubleToCmpLong(double val) {
|
||||
long u = Double.doubleToRawLongBits(val);
|
||||
if (val >= 0) {
|
||||
u |= signMask;
|
||||
} else {
|
||||
u = ~u;
|
||||
}
|
||||
return u;
|
||||
}
|
||||
|
||||
public static void writeDoubleFully(CodecDataOutput cdo, double val) {
|
||||
cdo.writeByte(FLOATING_FLAG);
|
||||
writeDouble(cdo, val);
|
||||
}
|
||||
|
||||
/**
|
||||
* Encoding a double value to byte buffer
|
||||
*
|
||||
* @param cdo For outputting data in bytes array
|
||||
* @param val The data to encode
|
||||
*/
|
||||
public static void writeDouble(CodecDataOutput cdo, double val) {
|
||||
IntegerCodec.writeULong(cdo, encodeDoubleToCmpLong(val));
|
||||
}
|
||||
}
|
||||
|
||||
public static class DecimalCodec {
|
||||
|
||||
/**
|
||||
* read a decimal value from CodecDataInput
|
||||
*
|
||||
* @param cdi cdi is source data.
|
||||
*/
|
||||
public static BigDecimal readDecimal(CodecDataInput cdi) {
|
||||
if (cdi.available() < 3) {
|
||||
throw new IllegalArgumentException("insufficient bytes to read value");
|
||||
}
|
||||
|
||||
// 64 should be larger enough for avoiding unnecessary growth.
|
||||
TIntArrayList data = new TIntArrayList(64);
|
||||
int precision = cdi.readUnsignedByte();
|
||||
int frac = cdi.readUnsignedByte();
|
||||
int length = precision + frac;
|
||||
int curPos = cdi.size() - cdi.available();
|
||||
for (int i = 0; i < length; i++) {
|
||||
if (cdi.eof()) {
|
||||
break;
|
||||
}
|
||||
data.add(cdi.readUnsignedByte());
|
||||
}
|
||||
|
||||
MyDecimal dec = new MyDecimal();
|
||||
int binSize = dec.fromBin(precision, frac, data.toArray());
|
||||
cdi.mark(curPos + binSize);
|
||||
cdi.reset();
|
||||
return dec.toDecimal();
|
||||
}
|
||||
|
||||
/**
|
||||
* write a decimal value from CodecDataInput
|
||||
*
|
||||
* @param cdo cdo is destination data.
|
||||
* @param dec is decimal value that will be written into cdo.
|
||||
*/
|
||||
static void writeDecimal(CodecDataOutput cdo, MyDecimal dec) {
|
||||
int[] data = dec.toBin(dec.precision(), dec.frac());
|
||||
cdo.writeByte(dec.precision());
|
||||
cdo.writeByte(dec.frac());
|
||||
for (int aData : data) {
|
||||
cdo.writeByte(aData & 0xFF);
|
||||
}
|
||||
}
|
||||
|
||||
public static void writeDecimalFully(CodecDataOutput cdo, BigDecimal val) {
|
||||
cdo.writeByte(DECIMAL_FLAG);
|
||||
writeDecimal(cdo, val);
|
||||
}
|
||||
|
||||
/**
|
||||
* Encoding a double value to byte buffer
|
||||
*
|
||||
* @param cdo For outputting data in bytes array
|
||||
* @param val The data to encode
|
||||
*/
|
||||
public static void writeDecimal(CodecDataOutput cdo, BigDecimal val) {
|
||||
MyDecimal dec = new MyDecimal();
|
||||
dec.fromString(val.toPlainString());
|
||||
writeDecimal(cdo, dec);
|
||||
}
|
||||
}
|
||||
|
||||
public static class DateTimeCodec {
|
||||
|
||||
/**
|
||||
* Encode a DateTime to a packed long converting to specific timezone
|
||||
*
|
||||
* @param dateTime dateTime that need to be encoded.
|
||||
* @param tz timezone used for converting to localDateTime
|
||||
* @return a packed long.
|
||||
*/
|
||||
static long toPackedLong(DateTime dateTime, DateTimeZone tz) {
|
||||
LocalDateTime localDateTime = dateTime.withZone(tz).toLocalDateTime();
|
||||
return toPackedLong(
|
||||
localDateTime.getYear(),
|
||||
localDateTime.getMonthOfYear(),
|
||||
localDateTime.getDayOfMonth(),
|
||||
localDateTime.getHourOfDay(),
|
||||
localDateTime.getMinuteOfHour(),
|
||||
localDateTime.getSecondOfMinute(),
|
||||
localDateTime.getMillisOfSecond() * 1000);
|
||||
}
|
||||
|
||||
/**
|
||||
* Encode a date/time parts to a packed long.
|
||||
*
|
||||
* @return a packed long.
|
||||
*/
|
||||
static long toPackedLong(
|
||||
int year, int month, int day, int hour, int minute, int second, int micro) {
|
||||
long ymd = (year * 13 + month) << 5 | day;
|
||||
long hms = hour << 12 | minute << 6 | second;
|
||||
return ((ymd << 17 | hms) << 24) | micro;
|
||||
}
|
||||
|
||||
/**
|
||||
* Read datetime from packed Long which contains all parts of a datetime namely, year, month,
|
||||
* day and hour, min and sec, millisec. The original representation does not indicate any
|
||||
* timezone information In Timestamp type, it should be interpreted as UTC while in DateType it
|
||||
* is interpreted as local timezone
|
||||
*
|
||||
* @param packed long value that packs date / time parts
|
||||
* @param tz timezone to interpret datetime parts
|
||||
* @return decoded DateTime using provided timezone
|
||||
*/
|
||||
static DateTime fromPackedLong(long packed, DateTimeZone tz) {
|
||||
// TODO: As for JDBC behavior, it can be configured to "round" or "toNull"
|
||||
// for now we didn't pass in session so we do a toNull behavior
|
||||
if (packed == 0) {
|
||||
return null;
|
||||
}
|
||||
long ymdhms = packed >> 24;
|
||||
long ymd = ymdhms >> 17;
|
||||
int day = (int) (ymd & ((1 << 5) - 1));
|
||||
long ym = ymd >> 5;
|
||||
int month = (int) (ym % 13);
|
||||
int year = (int) (ym / 13);
|
||||
|
||||
int hms = (int) (ymdhms & ((1 << 17) - 1));
|
||||
int second = hms & ((1 << 6) - 1);
|
||||
int minute = (hms >> 6) & ((1 << 6) - 1);
|
||||
int hour = hms >> 12;
|
||||
int microsec = (int) (packed % (1 << 24));
|
||||
|
||||
try {
|
||||
return new DateTime(year, month, day, hour, minute, second, microsec / 1000, tz);
|
||||
} catch (IllegalInstantException e) {
|
||||
LocalDateTime localDateTime =
|
||||
new LocalDateTime(year, month, day, hour, minute, second, microsec / 1000);
|
||||
DateTime dt = localDateTime.toLocalDate().toDateTimeAtStartOfDay(tz);
|
||||
long millis = dt.getMillis() + localDateTime.toLocalTime().getMillisOfDay();
|
||||
return new DateTime(millis, tz);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Encode DateTime as packed long converting into specified timezone All timezone conversion
|
||||
* should be done beforehand
|
||||
*
|
||||
* @param cdo encoding output
|
||||
* @param dateTime value to encode
|
||||
* @param tz timezone used to converting local time
|
||||
*/
|
||||
public static void writeDateTimeFully(CodecDataOutput cdo, DateTime dateTime, DateTimeZone tz) {
|
||||
long val = DateTimeCodec.toPackedLong(dateTime, tz);
|
||||
IntegerCodec.writeULongFully(cdo, val, true);
|
||||
}
|
||||
|
||||
/**
|
||||
* Encode DateTime as packed long converting into specified timezone All timezone conversion
|
||||
* should be done beforehand The encoded value has no data type flag
|
||||
*
|
||||
* @param cdo encoding output
|
||||
* @param dateTime value to encode
|
||||
* @param tz timezone used to converting local time
|
||||
*/
|
||||
public static void writeDateTimeProto(CodecDataOutput cdo, DateTime dateTime, DateTimeZone tz) {
|
||||
long val = DateTimeCodec.toPackedLong(dateTime, tz);
|
||||
IntegerCodec.writeULong(cdo, val);
|
||||
}
|
||||
|
||||
/**
|
||||
* Read datetime from packed Long encoded as unsigned var-len integer converting into specified
|
||||
* timezone
|
||||
*
|
||||
* @see DateTimeCodec#fromPackedLong(long, DateTimeZone)
|
||||
* @param cdi codec buffer input
|
||||
* @param tz timezone to interpret datetime parts
|
||||
* @return decoded DateTime using provided timezone
|
||||
*/
|
||||
public static DateTime readFromUVarInt(CodecDataInput cdi, DateTimeZone tz) {
|
||||
return DateTimeCodec.fromPackedLong(IntegerCodec.readUVarLong(cdi), tz);
|
||||
}
|
||||
|
||||
/**
|
||||
* Read datetime from packed Long as unsigned fixed-len integer
|
||||
*
|
||||
* @see DateTimeCodec#fromPackedLong(long, DateTimeZone)
|
||||
* @param cdi codec buffer input
|
||||
* @param tz timezone to interpret datetime parts
|
||||
* @return decoded DateTime using provided timezone
|
||||
*/
|
||||
public static DateTime readFromUInt(CodecDataInput cdi, DateTimeZone tz) {
|
||||
return DateTimeCodec.fromPackedLong(IntegerCodec.readULong(cdi), tz);
|
||||
}
|
||||
}
|
||||
|
||||
public static class DateCodec {
|
||||
|
||||
/**
|
||||
* Encode a UTC Date to a packed long converting to specific timezone
|
||||
*
|
||||
* @param date date that need to be encoded.
|
||||
* @param tz timezone used for converting to localDate
|
||||
* @return a packed long.
|
||||
*/
|
||||
static long toPackedLong(Date date, DateTimeZone tz) {
|
||||
return toPackedLong(date.getTime(), tz);
|
||||
}
|
||||
|
||||
static long toPackedLong(long utcMillsTs, DateTimeZone tz) {
|
||||
LocalDate date = new LocalDate(utcMillsTs, tz);
|
||||
return toPackedLong(date);
|
||||
}
|
||||
|
||||
static long toPackedLong(LocalDate date) {
|
||||
return Codec.DateCodec.toPackedLong(
|
||||
date.getYear(), date.getMonthOfYear(), date.getDayOfMonth());
|
||||
}
|
||||
|
||||
/**
|
||||
* Encode a date part to a packed long.
|
||||
*
|
||||
* @return a packed long.
|
||||
*/
|
||||
static long toPackedLong(int year, int month, int day) {
|
||||
long ymd = (year * 13 + month) << 5 | day;
|
||||
return ymd << 41;
|
||||
}
|
||||
|
||||
static LocalDate fromPackedLong(long packed) {
|
||||
// TODO: As for JDBC behavior, it can be configured to "round" or "toNull"
|
||||
// for now we didn't pass in session so we do a toNull behavior
|
||||
if (packed == 0) {
|
||||
return null;
|
||||
}
|
||||
long ymd = packed >> 41;
|
||||
int day = (int) (ymd & ((1 << 5) - 1));
|
||||
long ym = ymd >> 5;
|
||||
int month = (int) (ym % 13);
|
||||
int year = (int) (ym / 13);
|
||||
|
||||
return new LocalDate(year, month, day, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Encode Date as packed long converting into specified timezone All timezone conversion should
|
||||
* be done beforehand
|
||||
*
|
||||
* @param cdo encoding output
|
||||
* @param date value to encode
|
||||
* @param tz timezone used to converting local time
|
||||
*/
|
||||
public static void writeDateFully(CodecDataOutput cdo, Date date, DateTimeZone tz) {
|
||||
long val = DateCodec.toPackedLong(date, tz);
|
||||
IntegerCodec.writeULongFully(cdo, val, true);
|
||||
}
|
||||
|
||||
/**
|
||||
* Encode Date as packed long converting into specified timezone All timezone conversion should
|
||||
* be done beforehand The encoded value has no data type flag
|
||||
*
|
||||
* @param cdo encoding output
|
||||
* @param date value to encode
|
||||
* @param tz timezone used to converting local time
|
||||
*/
|
||||
public static void writeDateProto(CodecDataOutput cdo, Date date, DateTimeZone tz) {
|
||||
long val = DateCodec.toPackedLong(date, tz);
|
||||
IntegerCodec.writeULong(cdo, val);
|
||||
}
|
||||
|
||||
/**
|
||||
* Read date from packed Long encoded as unsigned var-len integer converting into specified
|
||||
* timezone
|
||||
*
|
||||
* @see DateCodec#fromPackedLong(long)
|
||||
* @param cdi codec buffer input
|
||||
* @return decoded DateTime using provided timezone
|
||||
*/
|
||||
public static LocalDate readFromUVarInt(CodecDataInput cdi) {
|
||||
return DateCodec.fromPackedLong(IntegerCodec.readUVarLong(cdi));
|
||||
}
|
||||
|
||||
/**
|
||||
* Read date from packed Long as unsigned fixed-len integer
|
||||
*
|
||||
* @see DateCodec#fromPackedLong(long)
|
||||
* @param cdi codec buffer input
|
||||
* @return decoded DateTime using provided timezone
|
||||
*/
|
||||
public static LocalDate readFromUInt(CodecDataInput cdi) {
|
||||
return DateCodec.fromPackedLong(IntegerCodec.readULong(cdi));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,321 @@
|
|||
/*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package com.pingcap.tikv.codec;
|
||||
|
||||
import com.google.protobuf.ByteString;
|
||||
import java.io.*;
|
||||
|
||||
public class CodecDataInput implements DataInput {
|
||||
/**
|
||||
* An copy of ByteArrayInputStream without synchronization for faster decode.
|
||||
*
|
||||
* @see ByteArrayInputStream
|
||||
*/
|
||||
private class UnSyncByteArrayInputStream extends InputStream {
|
||||
protected byte buf[];
|
||||
protected int pos;
|
||||
protected int mark = 0;
|
||||
protected int count;
|
||||
|
||||
UnSyncByteArrayInputStream(byte buf[]) {
|
||||
this.buf = buf;
|
||||
this.pos = 0;
|
||||
this.count = buf.length;
|
||||
}
|
||||
|
||||
public UnSyncByteArrayInputStream(byte buf[], int offset, int length) {
|
||||
this.buf = buf;
|
||||
this.pos = offset;
|
||||
this.count = Math.min(offset + length, buf.length);
|
||||
this.mark = offset;
|
||||
}
|
||||
|
||||
public int read() {
|
||||
return (pos < count) ? (buf[pos++] & 0xff) : -1;
|
||||
}
|
||||
|
||||
public int read(byte b[], int off, int len) {
|
||||
if (b == null) {
|
||||
throw new NullPointerException();
|
||||
} else if (off < 0 || len < 0 || len > b.length - off) {
|
||||
throw new IndexOutOfBoundsException();
|
||||
}
|
||||
|
||||
if (pos >= count) {
|
||||
return -1;
|
||||
}
|
||||
|
||||
int avail = count - pos;
|
||||
if (len > avail) {
|
||||
len = avail;
|
||||
}
|
||||
if (len <= 0) {
|
||||
return 0;
|
||||
}
|
||||
System.arraycopy(buf, pos, b, off, len);
|
||||
pos += len;
|
||||
return len;
|
||||
}
|
||||
|
||||
public long skip(long n) {
|
||||
long k = count - pos;
|
||||
if (n < k) {
|
||||
k = n < 0 ? 0 : n;
|
||||
}
|
||||
|
||||
pos += k;
|
||||
return k;
|
||||
}
|
||||
|
||||
public int available() {
|
||||
return count - pos;
|
||||
}
|
||||
|
||||
public boolean markSupported() {
|
||||
return true;
|
||||
}
|
||||
|
||||
public void mark(int readAheadLimit) {
|
||||
mark = pos;
|
||||
}
|
||||
|
||||
public void reset() {
|
||||
pos = mark;
|
||||
}
|
||||
|
||||
public void close() throws IOException {}
|
||||
}
|
||||
|
||||
private final DataInputStream inputStream;
|
||||
private final UnSyncByteArrayInputStream backingStream;
|
||||
private final byte[] backingBuffer;
|
||||
|
||||
public CodecDataInput(ByteString data) {
|
||||
this(data.toByteArray());
|
||||
}
|
||||
|
||||
public CodecDataInput(byte[] buf) {
|
||||
backingBuffer = buf;
|
||||
// MyDecimal usually will consume more bytes. If this happened,
|
||||
// we need have a mechanism to reset backingStream.
|
||||
// User mark first and then reset it later can do the trick.
|
||||
backingStream =
|
||||
new UnSyncByteArrayInputStream(buf) {
|
||||
@Override
|
||||
public void mark(int givenPos) {
|
||||
mark = givenPos;
|
||||
}
|
||||
};
|
||||
inputStream = new DataInputStream(backingStream);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void readFully(byte[] b) {
|
||||
try {
|
||||
inputStream.readFully(b);
|
||||
} catch (Exception e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void readFully(byte[] b, int off, int len) {
|
||||
try {
|
||||
inputStream.readFully(b, off, len);
|
||||
} catch (Exception e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public int skipBytes(int n) {
|
||||
try {
|
||||
return inputStream.skipBytes(n);
|
||||
} catch (Exception e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean readBoolean() {
|
||||
try {
|
||||
return inputStream.readBoolean();
|
||||
} catch (Exception e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public byte readByte() {
|
||||
try {
|
||||
return inputStream.readByte();
|
||||
} catch (Exception e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public int readUnsignedByte() {
|
||||
try {
|
||||
return inputStream.readUnsignedByte();
|
||||
} catch (Exception e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public short readShort() {
|
||||
try {
|
||||
return inputStream.readShort();
|
||||
} catch (Exception e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public int readUnsignedShort() {
|
||||
try {
|
||||
return inputStream.readUnsignedShort();
|
||||
} catch (Exception e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
|
||||
public int readPartialUnsignedShort() {
|
||||
try {
|
||||
byte readBuffer[] = new byte[2];
|
||||
inputStream.read(readBuffer, 0, 2);
|
||||
return ((readBuffer[0] & 0xff) << 8) + ((readBuffer[1] & 0xff) << 0);
|
||||
} catch (IOException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public char readChar() {
|
||||
try {
|
||||
return inputStream.readChar();
|
||||
} catch (Exception e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public int readInt() {
|
||||
try {
|
||||
return inputStream.readInt();
|
||||
} catch (Exception e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public long readLong() {
|
||||
try {
|
||||
return inputStream.readLong();
|
||||
} catch (Exception e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
|
||||
public final long readPartialLong() {
|
||||
try {
|
||||
byte readBuffer[] = new byte[8];
|
||||
inputStream.read(readBuffer, 0, 8);
|
||||
return (((long) readBuffer[0] << 56)
|
||||
+ ((long) (readBuffer[1] & 255) << 48)
|
||||
+ ((long) (readBuffer[2] & 255) << 40)
|
||||
+ ((long) (readBuffer[3] & 255) << 32)
|
||||
+ ((long) (readBuffer[4] & 255) << 24)
|
||||
+ ((readBuffer[5] & 255) << 16)
|
||||
+ ((readBuffer[6] & 255) << 8)
|
||||
+ ((readBuffer[7] & 255) << 0));
|
||||
} catch (Exception e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public float readFloat() {
|
||||
try {
|
||||
return inputStream.readFloat();
|
||||
} catch (Exception e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public double readDouble() {
|
||||
try {
|
||||
return inputStream.readDouble();
|
||||
} catch (Exception e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public String readLine() {
|
||||
try {
|
||||
return inputStream.readLine();
|
||||
} catch (Exception e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public String readUTF() {
|
||||
try {
|
||||
return inputStream.readUTF();
|
||||
} catch (Exception e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
|
||||
public int peekByte() {
|
||||
mark(currentPos());
|
||||
int b = readByte() & 0xFF;
|
||||
reset();
|
||||
return b;
|
||||
}
|
||||
|
||||
public int currentPos() {
|
||||
return size() - available();
|
||||
}
|
||||
|
||||
public void mark(int givenPos) {
|
||||
this.backingStream.mark(givenPos);
|
||||
}
|
||||
|
||||
public void reset() {
|
||||
this.backingStream.reset();
|
||||
}
|
||||
|
||||
public boolean eof() {
|
||||
return backingStream.available() == 0;
|
||||
}
|
||||
|
||||
public int size() {
|
||||
return backingBuffer.length;
|
||||
}
|
||||
|
||||
public int available() {
|
||||
return backingStream.available();
|
||||
}
|
||||
|
||||
public byte[] toByteArray() {
|
||||
return backingBuffer;
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,176 @@
|
|||
/*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package com.pingcap.tikv.codec;
|
||||
|
||||
import com.google.protobuf.ByteString;
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.DataOutput;
|
||||
import java.io.DataOutputStream;
|
||||
|
||||
// A trivial implementation supposed to be replaced
|
||||
public class CodecDataOutput implements DataOutput {
|
||||
private DataOutputStream s;
|
||||
// TODO: Switch to ByteBuffer if possible, or a chain of ByteBuffer
|
||||
private ByteArrayOutputStream byteArray;
|
||||
|
||||
public CodecDataOutput() {
|
||||
byteArray = new ByteArrayOutputStream();
|
||||
s = new DataOutputStream(byteArray);
|
||||
}
|
||||
|
||||
public CodecDataOutput(int size) {
|
||||
byteArray = new ByteArrayOutputStream(size);
|
||||
s = new DataOutputStream(byteArray);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void write(int b) {
|
||||
try {
|
||||
s.write(b);
|
||||
} catch (Exception e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void write(byte b[]) {
|
||||
try {
|
||||
s.write(b);
|
||||
} catch (Exception e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void write(byte[] b, int off, int len) {
|
||||
try {
|
||||
s.write(b, off, len);
|
||||
} catch (Exception e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeBoolean(boolean v) {
|
||||
try {
|
||||
s.writeBoolean(v);
|
||||
} catch (Exception e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeByte(int v) {
|
||||
try {
|
||||
s.writeByte(v);
|
||||
} catch (Exception e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeShort(int v) {
|
||||
try {
|
||||
s.writeShort(v);
|
||||
} catch (Exception e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeChar(int v) {
|
||||
try {
|
||||
s.writeChar(v);
|
||||
} catch (Exception e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeInt(int v) {
|
||||
try {
|
||||
s.writeInt(v);
|
||||
} catch (Exception e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeLong(long v) {
|
||||
try {
|
||||
s.writeLong(v);
|
||||
} catch (Exception e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeFloat(float v) {
|
||||
try {
|
||||
s.writeFloat(v);
|
||||
} catch (Exception e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeDouble(double v) {
|
||||
try {
|
||||
s.writeDouble(v);
|
||||
} catch (Exception e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeBytes(String v) {
|
||||
try {
|
||||
s.writeBytes(v);
|
||||
} catch (Exception e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeChars(String v) {
|
||||
try {
|
||||
s.writeChars(v);
|
||||
} catch (Exception e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeUTF(String v) {
|
||||
try {
|
||||
s.writeUTF(v);
|
||||
} catch (Exception e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
|
||||
public byte[] toBytes() {
|
||||
return byteArray.toByteArray();
|
||||
}
|
||||
|
||||
public ByteString toByteString() {
|
||||
return ByteString.copyFrom(byteArray.toByteArray());
|
||||
}
|
||||
|
||||
public void reset() {
|
||||
this.byteArray.reset();
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,54 @@
|
|||
/*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package com.pingcap.tikv.codec;
|
||||
|
||||
import com.google.common.primitives.UnsignedBytes;
|
||||
import com.google.protobuf.ByteString;
|
||||
import com.pingcap.tikv.kvproto.Coprocessor;
|
||||
|
||||
public class KeyUtils {
|
||||
|
||||
public static String formatBytes(byte[] bytes) {
|
||||
if (bytes == null) return "null";
|
||||
StringBuilder sb = new StringBuilder();
|
||||
for (int i = 0; i < bytes.length; i++) {
|
||||
int unsignedByte = UnsignedBytes.toInt(bytes[i]);
|
||||
sb.append(unsignedByte);
|
||||
if (i != bytes.length - 1) {
|
||||
sb.append(",");
|
||||
}
|
||||
}
|
||||
return sb.toString();
|
||||
}
|
||||
|
||||
public static String formatBytes(ByteString bytes) {
|
||||
if (bytes == null) return "null";
|
||||
return formatBytes(bytes.toByteArray());
|
||||
}
|
||||
|
||||
public static String formatBytes(Coprocessor.KeyRange keyRange) {
|
||||
return "[[" + formatBytes(keyRange.getStart()) + "], [" + formatBytes(keyRange.getEnd()) + "])";
|
||||
}
|
||||
|
||||
public static boolean hasPrefix(ByteString str, ByteString prefix) {
|
||||
for (int i = 0; i < prefix.size(); i++) {
|
||||
if (str.byteAt(i) != prefix.byteAt(i)) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,829 @@
|
|||
/*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package com.pingcap.tikv.codec;
|
||||
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
import java.math.BigDecimal;
|
||||
import java.util.Arrays;
|
||||
|
||||
// TODO: We shouldn't allow empty MyDecimal
|
||||
// TODO: It seems MyDecimal to BigDecimal is very slow
|
||||
public class MyDecimal {
|
||||
// how many digits that a word has
|
||||
private static final int digitsPerWord = 9;
|
||||
// MyDecimal can holds at most 9 words.
|
||||
private static final int wordBufLen = 9;
|
||||
// A word is 4 bytes int
|
||||
private static final int wordSize = 4;
|
||||
private static final int ten0 = 1;
|
||||
private static final int ten1 = 10;
|
||||
private static final int ten2 = 100;
|
||||
private static final int ten3 = 1000;
|
||||
private static final int ten4 = 10000;
|
||||
private static final int ten5 = 100000;
|
||||
private static final int ten6 = 1000000;
|
||||
private static final int ten7 = 10000000;
|
||||
private static final int ten8 = 100000000;
|
||||
private static final int ten9 = 1000000000;
|
||||
private static final int digMask = ten8;
|
||||
private static final int wordBase = ten9;
|
||||
private static final int wordMax = wordBase - 1;
|
||||
private static final int[] powers10 =
|
||||
new int[] {ten0, ten1, ten2, ten3, ten4, ten5, ten6, ten7, ten8, ten9};
|
||||
|
||||
// A MyDecimal holds 9 words.
|
||||
private static final int maxWordBufLen = 9;
|
||||
private static final int maxFraction = 30;
|
||||
private static final int[] dig2bytes = new int[] {0, 1, 1, 2, 2, 3, 3, 4, 4, 4};
|
||||
|
||||
// The following are fields of MyDecimal
|
||||
private int digitsInt;
|
||||
private int digitsFrac;
|
||||
private int resultFrac;
|
||||
private boolean negative;
|
||||
private int[] wordBuf = new int[maxWordBufLen];
|
||||
|
||||
/*
|
||||
* Returns total precision of this decimal. Basically, it is sum of digitsInt and digitsFrac. But there
|
||||
* are some special cases need to be token care of such as 000.001.
|
||||
* Precision reflects the actual effective precision without leading zero
|
||||
*/
|
||||
public int precision() {
|
||||
int frac = this.digitsFrac;
|
||||
int digitsInt =
|
||||
this.removeLeadingZeros()[
|
||||
1]; /*this function return an array and the second element is digitsInt*/
|
||||
int precision = digitsInt + frac;
|
||||
// if no precision, it is just 0.
|
||||
if (precision == 0) {
|
||||
precision = 1;
|
||||
}
|
||||
return precision;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns fraction digits that counts how many digits after ".". frac() reflects the actual
|
||||
* effective fraction without trailing zero
|
||||
*/
|
||||
public int frac() {
|
||||
return digitsFrac;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses a decimal value from a string
|
||||
*
|
||||
* @param value an double value
|
||||
*/
|
||||
public void fromDecimal(double value) {
|
||||
String s = Double.toString(value);
|
||||
this.fromString(s);
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses a decimal from binary string for given precision and frac.
|
||||
*
|
||||
* @param precision precision specifies total digits that this decimal will be..
|
||||
* @param frac frac specifies how many fraction digits
|
||||
* @param bin bin is binary string which represents a decimal value.
|
||||
*/
|
||||
public int fromBin(int precision, int frac, int[] bin) {
|
||||
if (bin.length == 0) {
|
||||
throw new IllegalArgumentException("Bad Float Number to parse");
|
||||
}
|
||||
|
||||
int digitsInt = precision - frac;
|
||||
int wordsInt = digitsInt / digitsPerWord;
|
||||
int leadingDigits = digitsInt - wordsInt * digitsPerWord;
|
||||
int wordsFrac = frac / digitsPerWord;
|
||||
int trailingDigits = frac - wordsFrac * digitsPerWord;
|
||||
int wordsIntTo = wordsInt;
|
||||
if (leadingDigits > 0) {
|
||||
wordsIntTo++;
|
||||
}
|
||||
int wordsFracTo = wordsFrac;
|
||||
if (trailingDigits > 0) {
|
||||
wordsFracTo++;
|
||||
}
|
||||
|
||||
int binIdx = 0;
|
||||
int mask = -1;
|
||||
int sign = bin[binIdx] & 0x80;
|
||||
if (sign > 0) {
|
||||
mask = 0;
|
||||
}
|
||||
int binSize = decimalBinSize(precision, frac);
|
||||
int[] dCopy;
|
||||
dCopy = Arrays.copyOf(bin, binSize);
|
||||
dCopy[0] ^= 0x80;
|
||||
bin = dCopy;
|
||||
|
||||
int oldWordsIntTo = wordsIntTo;
|
||||
boolean overflow = false;
|
||||
boolean truncated = false;
|
||||
if (wordsIntTo + wordsFracTo > wordBufLen) {
|
||||
if (wordsIntTo > wordBufLen) {
|
||||
wordsIntTo = wordBufLen;
|
||||
wordsFracTo = 0;
|
||||
overflow = true;
|
||||
} else {
|
||||
wordsIntTo = wordsInt;
|
||||
wordsFracTo = wordBufLen - wordsInt;
|
||||
truncated = true;
|
||||
}
|
||||
}
|
||||
|
||||
if (overflow || truncated) {
|
||||
if (wordsIntTo < oldWordsIntTo) {
|
||||
binIdx += dig2bytes[leadingDigits] + (wordsInt - wordsIntTo) * wordSize;
|
||||
} else {
|
||||
trailingDigits = 0;
|
||||
wordsFrac = wordsFracTo;
|
||||
}
|
||||
}
|
||||
|
||||
this.negative = mask != 0;
|
||||
this.digitsInt = (byte) (wordsInt * digitsPerWord + leadingDigits);
|
||||
this.digitsFrac = (byte) (wordsFrac * digitsPerWord + trailingDigits);
|
||||
|
||||
int wordIdx = 0;
|
||||
if (leadingDigits > 0) {
|
||||
int i = dig2bytes[leadingDigits];
|
||||
int x = readWord(bin, i, binIdx);
|
||||
binIdx += i;
|
||||
this.wordBuf[wordIdx] = (x ^ mask) > 0 ? x ^ mask : (x ^ mask) & 0xFF;
|
||||
if (this.wordBuf[wordIdx] >= powers10[leadingDigits + 1]) {
|
||||
throw new IllegalArgumentException("BadNumber");
|
||||
}
|
||||
if (this.wordBuf[wordIdx] != 0) {
|
||||
wordIdx++;
|
||||
} else {
|
||||
this.digitsInt -= leadingDigits;
|
||||
}
|
||||
}
|
||||
for (int stop = binIdx + wordsInt * wordSize; binIdx < stop; binIdx += wordSize) {
|
||||
this.wordBuf[wordIdx] = (readWord(bin, 4, binIdx) ^ mask);
|
||||
if (this.wordBuf[wordIdx] > wordMax) {
|
||||
throw new IllegalArgumentException("BadNumber");
|
||||
}
|
||||
if (wordIdx > 0 || this.wordBuf[wordIdx] != 0) {
|
||||
wordIdx++;
|
||||
} else {
|
||||
this.digitsInt -= digitsPerWord;
|
||||
}
|
||||
}
|
||||
|
||||
for (int stop = binIdx + wordsFrac * wordSize; binIdx < stop; binIdx += wordSize) {
|
||||
int x = readWord(bin, 4, binIdx);
|
||||
this.wordBuf[wordIdx] = (x ^ mask) > 0 ? x ^ mask : (x ^ mask) & 0xFF;
|
||||
if (this.wordBuf[wordIdx] > wordMax) {
|
||||
throw new IllegalArgumentException("BadNumber");
|
||||
}
|
||||
wordIdx++;
|
||||
}
|
||||
|
||||
if (trailingDigits > 0) {
|
||||
int i = dig2bytes[trailingDigits];
|
||||
int x = readWord(bin, i, binIdx);
|
||||
this.wordBuf[wordIdx] =
|
||||
((x ^ mask) > 0 ? x ^ mask : (x ^ mask) & 0xFF)
|
||||
* powers10[digitsPerWord - trailingDigits];
|
||||
if (this.wordBuf[wordIdx] > wordMax) {
|
||||
throw new IllegalArgumentException("BadNumber");
|
||||
}
|
||||
wordIdx++;
|
||||
}
|
||||
|
||||
this.resultFrac = frac;
|
||||
return binSize;
|
||||
}
|
||||
|
||||
/** Returns a double value from MyDecimal instance. */
|
||||
public BigDecimal toDecimal() {
|
||||
return new BigDecimal(toString());
|
||||
}
|
||||
|
||||
public double toDouble() {
|
||||
return Float.parseFloat(toString());
|
||||
}
|
||||
|
||||
/** Truncates any prefix zeros such as 00.001. After this, digitsInt is truncated from 2 to 0. */
|
||||
private int[] removeLeadingZeros() {
|
||||
int wordIdx = 0;
|
||||
int digitsInt = this.digitsInt;
|
||||
int i = ((digitsInt - 1) % digitsPerWord) + 1;
|
||||
for (; digitsInt > 0 && this.wordBuf[wordIdx] == 0; ) {
|
||||
digitsInt -= i;
|
||||
i = digitsPerWord;
|
||||
wordIdx++;
|
||||
}
|
||||
if (digitsInt > 0) {
|
||||
digitsInt -= countLeadingZeroes((digitsInt - 1) % digitsPerWord, this.wordBuf[wordIdx]);
|
||||
} else {
|
||||
digitsInt = 0;
|
||||
}
|
||||
int[] res = new int[2];
|
||||
res[0] = wordIdx;
|
||||
res[1] = digitsInt;
|
||||
return res;
|
||||
}
|
||||
|
||||
/**
|
||||
* Counts the number of digits of prefix zeors. For 00.001, it reutrns two.
|
||||
*
|
||||
* @param i i is index for getting powers10.
|
||||
* @param word word is a integer.
|
||||
*/
|
||||
private int countLeadingZeroes(int i, int word) {
|
||||
int leading = 0;
|
||||
for (; word < powers10[i]; ) {
|
||||
i--;
|
||||
leading++;
|
||||
}
|
||||
return leading;
|
||||
}
|
||||
|
||||
private int min(int a, int b) {
|
||||
if (a > b) return b;
|
||||
else return a;
|
||||
}
|
||||
|
||||
/** Returns size of word for a give value with number of digits */
|
||||
private int digitsToWords(int digits) {
|
||||
return (digits + digitsPerWord - 1) / digitsPerWord;
|
||||
}
|
||||
|
||||
/**
|
||||
* Reads a word from a array at given size.
|
||||
*
|
||||
* @param b b is source data of unsigned byte as int[]
|
||||
* @param size is word size which can be used in switch statement.
|
||||
* @param start start indicates the where start to read.
|
||||
*/
|
||||
@VisibleForTesting
|
||||
public static int readWord(int[] b, int size, int start) {
|
||||
int x = 0;
|
||||
switch (size) {
|
||||
case 1:
|
||||
x = (byte) b[start];
|
||||
break;
|
||||
case 2:
|
||||
x = (((byte) b[start]) << 8) + (b[start + 1] & 0xFF);
|
||||
break;
|
||||
case 3:
|
||||
int sign = b[start] & 128;
|
||||
if (sign > 0) {
|
||||
x = 0xFF << 24 | (b[start] << 16) | (b[start + 1] << 8) | (b[start + 2]);
|
||||
} else {
|
||||
x = b[start] << 16 | (b[start + 1] << 8) | b[start + 2];
|
||||
}
|
||||
break;
|
||||
case 4:
|
||||
x = b[start + 3] + (b[start + 2] << 8) + (b[start + 1] << 16) + (b[start] << 24);
|
||||
break;
|
||||
}
|
||||
return x;
|
||||
}
|
||||
|
||||
/**
|
||||
* parser a decimal value from a string.
|
||||
*
|
||||
* @param s s is a decimal in string form.
|
||||
*/
|
||||
@VisibleForTesting
|
||||
public void fromString(String s) {
|
||||
char[] sCharArray = s.toCharArray();
|
||||
fromCharArray(sCharArray);
|
||||
}
|
||||
|
||||
// helper function for fromString
|
||||
private void fromCharArray(char[] str) {
|
||||
int startIdx = 0;
|
||||
// found first character is not space and start from here
|
||||
for (; startIdx < str.length; startIdx++) {
|
||||
if (!Character.isSpaceChar(str[startIdx])) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (str.length == 0) {
|
||||
throw new IllegalArgumentException("BadNumber");
|
||||
}
|
||||
|
||||
// skip sign and record where digits start from
|
||||
// [-, 1, 2, 3]
|
||||
// [+, 1, 2, 3]
|
||||
// for +/-, we need skip them and record sign information into negative field.
|
||||
switch (str[startIdx]) {
|
||||
case '-':
|
||||
this.negative = true;
|
||||
startIdx++;
|
||||
break;
|
||||
case '+':
|
||||
startIdx++;
|
||||
break;
|
||||
}
|
||||
int strIdx = startIdx;
|
||||
for (; strIdx < str.length && Character.isDigit(str[strIdx]); ) {
|
||||
strIdx++;
|
||||
}
|
||||
// we initialize strIdx in case of sign notation, here we need substract startIdx from strIdx
|
||||
// casue strIdx is used for counting the number of digits.
|
||||
int digitsInt = strIdx - startIdx;
|
||||
int digitsFrac;
|
||||
int endIdx;
|
||||
if (strIdx < str.length && str[strIdx] == '.') {
|
||||
endIdx = strIdx + 1;
|
||||
// detect where is the end index of this char array.
|
||||
for (; endIdx < str.length && Character.isDigit(str[endIdx]); ) {
|
||||
endIdx++;
|
||||
}
|
||||
digitsFrac = endIdx - strIdx - 1;
|
||||
} else {
|
||||
digitsFrac = 0;
|
||||
endIdx = strIdx;
|
||||
}
|
||||
|
||||
if (digitsInt + digitsFrac == 0) {
|
||||
throw new IllegalArgumentException("BadNumber");
|
||||
}
|
||||
int wordsInt = digitsToWords(digitsInt);
|
||||
int wordsFrac = digitsToWords(digitsFrac);
|
||||
|
||||
// TODO the following code are fixWordCntError such as overflow and truncated error
|
||||
boolean overflow = false;
|
||||
boolean truncated = false;
|
||||
if (wordsInt + wordsFrac > wordBufLen) {
|
||||
if (wordsInt > wordBufLen) {
|
||||
wordsInt = wordBufLen;
|
||||
wordsFrac = 0;
|
||||
overflow = true;
|
||||
} else {
|
||||
wordsFrac = wordBufLen - wordsInt;
|
||||
truncated = true;
|
||||
}
|
||||
}
|
||||
|
||||
if (overflow || truncated) {
|
||||
digitsFrac = wordsFrac * digitsPerWord;
|
||||
if (overflow) {
|
||||
digitsInt = wordsInt * digitsPerWord;
|
||||
}
|
||||
}
|
||||
this.digitsInt = digitsInt;
|
||||
this.digitsFrac = digitsFrac;
|
||||
int wordIdx = wordsInt;
|
||||
int strIdxTmp = strIdx;
|
||||
int word = 0;
|
||||
int innerIdx = 0;
|
||||
for (; digitsInt > 0; ) {
|
||||
digitsInt--;
|
||||
strIdx--;
|
||||
word += (str[strIdx] - '0') * powers10[innerIdx];
|
||||
innerIdx++;
|
||||
if (innerIdx == digitsPerWord) {
|
||||
wordIdx--;
|
||||
this.wordBuf[wordIdx] = word;
|
||||
word = 0;
|
||||
innerIdx = 0;
|
||||
}
|
||||
}
|
||||
|
||||
if (innerIdx != 0) {
|
||||
wordIdx--;
|
||||
this.wordBuf[wordIdx] = word;
|
||||
}
|
||||
|
||||
wordIdx = wordsInt;
|
||||
strIdx = strIdxTmp;
|
||||
word = 0;
|
||||
innerIdx = 0;
|
||||
|
||||
for (; digitsFrac > 0; ) {
|
||||
digitsFrac--;
|
||||
strIdx++;
|
||||
word = (str[strIdx] - '0') + word * 10;
|
||||
innerIdx++;
|
||||
if (innerIdx == digitsPerWord) {
|
||||
this.wordBuf[wordIdx] = word;
|
||||
wordIdx++;
|
||||
word = 0;
|
||||
innerIdx = 0;
|
||||
}
|
||||
}
|
||||
if (innerIdx != 0) {
|
||||
this.wordBuf[wordIdx] = word * powers10[digitsPerWord - innerIdx];
|
||||
}
|
||||
|
||||
// this is -0000 is just 0.
|
||||
boolean allZero = true;
|
||||
for (int i = 0; i < wordBufLen; i++) {
|
||||
if (this.wordBuf[i] != 0) {
|
||||
allZero = false;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (allZero) {
|
||||
this.negative = false;
|
||||
}
|
||||
|
||||
this.resultFrac = this.digitsFrac;
|
||||
}
|
||||
|
||||
// parser a string to a int.
|
||||
private int strToLong(String str) {
|
||||
str = str.trim();
|
||||
if (str.isEmpty()) {
|
||||
return 0;
|
||||
}
|
||||
boolean negative = false;
|
||||
int i = 0;
|
||||
if (str.charAt(i) == '-') {
|
||||
negative = true;
|
||||
i++;
|
||||
} else if (str.charAt(i) == '+') {
|
||||
i++;
|
||||
}
|
||||
|
||||
int r = 0;
|
||||
for (; i < str.length(); i++) {
|
||||
if (!Character.isDigit(str.charAt(i))) {
|
||||
break;
|
||||
}
|
||||
r = r * 10 + (str.charAt(i) - '0');
|
||||
}
|
||||
|
||||
if (negative) {
|
||||
r = -r;
|
||||
}
|
||||
return r;
|
||||
}
|
||||
|
||||
// Returns a decimal string.
|
||||
public String toString() {
|
||||
char[] str;
|
||||
int digitsFrac = this.digitsFrac;
|
||||
int[] res = removeLeadingZeros();
|
||||
int wordStartIdx = res[0];
|
||||
int digitsInt = res[1];
|
||||
if (digitsInt + digitsFrac == 0) {
|
||||
digitsInt = 1;
|
||||
wordStartIdx = 0;
|
||||
}
|
||||
|
||||
int digitsIntLen = digitsInt;
|
||||
if (digitsIntLen == 0) {
|
||||
digitsIntLen = 1;
|
||||
}
|
||||
int digitsFracLen = digitsFrac;
|
||||
int length = digitsIntLen + digitsFracLen;
|
||||
if (this.negative) {
|
||||
length++;
|
||||
}
|
||||
if (digitsFrac > 0) {
|
||||
length++;
|
||||
}
|
||||
str = new char[length];
|
||||
int strIdx = 0;
|
||||
if (this.negative) {
|
||||
str[strIdx] = '-';
|
||||
strIdx++;
|
||||
}
|
||||
int fill = 0;
|
||||
if (digitsFrac > 0) {
|
||||
int fracIdx = strIdx + digitsIntLen;
|
||||
fill = digitsFracLen - digitsFrac;
|
||||
int wordIdx = wordStartIdx + digitsToWords(digitsInt);
|
||||
str[fracIdx] = '.';
|
||||
fracIdx++;
|
||||
for (; digitsFrac > 0; digitsFrac -= digitsPerWord) {
|
||||
int x = this.wordBuf[wordIdx];
|
||||
wordIdx++;
|
||||
for (int i = min(digitsFrac, digitsPerWord); i > 0; i--) {
|
||||
int y = x / digMask;
|
||||
str[fracIdx] = (char) (y + '0');
|
||||
fracIdx++;
|
||||
x -= y * digMask;
|
||||
x *= 10;
|
||||
}
|
||||
}
|
||||
for (; fill > 0; fill--) {
|
||||
str[fracIdx] = '0';
|
||||
fracIdx++;
|
||||
}
|
||||
}
|
||||
fill = digitsIntLen - digitsInt;
|
||||
if (digitsInt == 0) {
|
||||
fill--; /* symbol 0 before digital point */
|
||||
}
|
||||
for (; fill > 0; fill--) {
|
||||
str[strIdx] = '0';
|
||||
strIdx++;
|
||||
}
|
||||
if (digitsInt > 0) {
|
||||
strIdx += digitsInt;
|
||||
int wordIdx = wordStartIdx + digitsToWords(digitsInt);
|
||||
for (; digitsInt > 0; digitsInt -= digitsPerWord) {
|
||||
wordIdx--;
|
||||
int x = this.wordBuf[wordIdx];
|
||||
for (int i = min(digitsInt, digitsPerWord); i > 0; i--) {
|
||||
int y = x / 10;
|
||||
strIdx--;
|
||||
str[strIdx] = (char) ('0' + (x - y * 10));
|
||||
x = y;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
str[strIdx] = '0';
|
||||
}
|
||||
|
||||
return new String(str);
|
||||
}
|
||||
|
||||
private int stringSize() {
|
||||
return digitsInt + digitsFrac + 3;
|
||||
}
|
||||
|
||||
public long toLong() {
|
||||
long x = 0;
|
||||
int wordIdx = 0;
|
||||
for (int i = this.digitsInt; i > 0; i -= digitsPerWord) {
|
||||
/*
|
||||
Attention: trick!
|
||||
we're calculating -|from| instead of |from| here
|
||||
because |LONGLONG_MIN| > LONGLONG_MAX
|
||||
so we can convert -9223372036854775808 correctly
|
||||
*/
|
||||
long y = x;
|
||||
x = x * wordBase - (long) this.wordBuf[wordIdx];
|
||||
wordIdx++;
|
||||
if (y < Long.MIN_VALUE / wordBase || x > y) {
|
||||
/*
|
||||
the decimal is bigger than any possible integer
|
||||
return border integer depending on the sign
|
||||
*/
|
||||
if (this.negative) {
|
||||
return Long.MIN_VALUE;
|
||||
}
|
||||
return Long.MAX_VALUE;
|
||||
}
|
||||
}
|
||||
|
||||
/* boundary case: 9223372036854775808 */
|
||||
if (!this.negative && x == Long.MIN_VALUE) {
|
||||
return Long.MAX_VALUE;
|
||||
}
|
||||
|
||||
if (!this.negative) {
|
||||
x = -x;
|
||||
}
|
||||
for (int i = this.digitsFrac; i > 0; i -= digitsPerWord) {
|
||||
if (this.wordBuf[wordIdx] != 0) {
|
||||
return x;
|
||||
}
|
||||
wordIdx++;
|
||||
}
|
||||
return x;
|
||||
}
|
||||
|
||||
// decimalBinSize returns the size of array to hold a binary representation of a decimal.
|
||||
private int decimalBinSize(int precision, int frac) {
|
||||
int digitsInt = precision - frac;
|
||||
int wordsInt = digitsInt / digitsPerWord;
|
||||
int wordsFrac = frac / digitsPerWord;
|
||||
int xInt = digitsInt - wordsInt * digitsPerWord;
|
||||
int xFrac = frac - wordsFrac * digitsPerWord;
|
||||
return wordsInt * wordSize + dig2bytes[xInt] + wordsFrac * wordSize + dig2bytes[xFrac];
|
||||
}
|
||||
|
||||
/**
|
||||
* ToBin converts decimal to its binary fixed-length representation two representations of the
|
||||
* same length can be compared with memcmp with the correct -1/0/+1 result
|
||||
*
|
||||
* <p>PARAMS precision/frac - if precision is 0, internal value of the decimal will be used, then
|
||||
* the encoded value is not memory comparable.
|
||||
*
|
||||
* <p>NOTE the buffer is assumed to be of the size decimalBinSize(precision, frac)
|
||||
*
|
||||
* <p>RETURN VALUE bin - binary value errCode - eDecOK/eDecTruncate/eDecOverflow
|
||||
*
|
||||
* <p>DESCRIPTION for storage decimal numbers are converted to the "binary" format.
|
||||
*
|
||||
* <p>This format has the following properties: 1. length of the binary representation depends on
|
||||
* the {precision, frac} as provided by the caller and NOT on the digitsInt/digitsFrac of the
|
||||
* decimal to convert. 2. binary representations of the same {precision, frac} can be compared
|
||||
* with memcmp - with the same result as DecimalCompare() of the original decimals (not taking
|
||||
* into account possible precision loss during conversion).
|
||||
*
|
||||
* <p>This binary format is as follows: 1. First the number is converted to have a requested
|
||||
* precision and frac. 2. Every full digitsPerWord digits of digitsInt part are stored in 4 bytes
|
||||
* as is 3. The first digitsInt % digitesPerWord digits are stored in the reduced number of bytes
|
||||
* (enough bytes to store this number of digits - see dig2bytes) 4. same for frac - full word are
|
||||
* stored as is, the last frac % digitsPerWord digits - in the reduced number of bytes. 5. If the
|
||||
* number is negative - every byte is inversed. 5. The very first bit of the resulting byte array
|
||||
* is inverted (because memcmp compares unsigned bytes, see property 2 above)
|
||||
*
|
||||
* <p>Example:
|
||||
*
|
||||
* <p>1234567890.1234
|
||||
*
|
||||
* <p>internally is represented as 3 words
|
||||
*
|
||||
* <p>1 234567890 123400000
|
||||
*
|
||||
* <p>(assuming we want a binary representation with precision=14, frac=4) in hex it's
|
||||
*
|
||||
* <p>00-00-00-01 0D-FB-38-D2 07-5A-EF-40
|
||||
*
|
||||
* <p>now, middle word is full - it stores 9 decimal digits. It goes into binary representation as
|
||||
* is:
|
||||
*
|
||||
* <p>........... 0D-FB-38-D2 ............
|
||||
*
|
||||
* <p>First word has only one decimal digit. We can store one digit in one byte, no need to waste
|
||||
* four:
|
||||
*
|
||||
* <p>01 0D-FB-38-D2 ............
|
||||
*
|
||||
* <p>now, last word. It's 123400000. We can store 1234 in two bytes:
|
||||
*
|
||||
* <p>01 0D-FB-38-D2 04-D2
|
||||
*
|
||||
* <p>So, we've packed 12 bytes number in 7 bytes. And now we invert the highest bit to get the
|
||||
* final result:
|
||||
*
|
||||
* <p>81 0D FB 38 D2 04 D2
|
||||
*
|
||||
* <p>And for -1234567890.1234 it would be
|
||||
*
|
||||
* <p>7E F2 04 C7 2D FB 2D return a int array which represents a decimal value.
|
||||
*
|
||||
* @param precision precision for decimal value.
|
||||
* @param frac fraction for decimal value.
|
||||
*/
|
||||
public int[] toBin(int precision, int frac) {
|
||||
if (precision > digitsPerWord * maxWordBufLen
|
||||
|| precision < 0
|
||||
|| frac > maxFraction
|
||||
|| frac < 0) {
|
||||
throw new IllegalArgumentException("BadNumber");
|
||||
}
|
||||
|
||||
int mask = 0;
|
||||
if (this.negative) {
|
||||
mask = -1;
|
||||
}
|
||||
|
||||
int digitsInt = precision - frac;
|
||||
int wordsInt = digitsInt / digitsPerWord;
|
||||
int leadingDigits = digitsInt - wordsInt * digitsPerWord;
|
||||
int wordsFrac = frac / digitsPerWord;
|
||||
int trailingDigits = frac - wordsFrac * digitsPerWord;
|
||||
|
||||
// this should be one of 0, 1, 2, 3, 4
|
||||
int wordsFracFrom = this.digitsFrac / digitsPerWord;
|
||||
int trailingDigitsFrom = this.digitsFrac - wordsFracFrom * digitsPerWord;
|
||||
int intSize = wordsInt * wordSize + dig2bytes[leadingDigits];
|
||||
int fracSize = wordsFrac * wordSize + dig2bytes[trailingDigits];
|
||||
int fracSizeFrom = wordsFracFrom * wordSize + dig2bytes[trailingDigitsFrom];
|
||||
int originIntSize = intSize;
|
||||
int originFracSize = fracSize;
|
||||
int[] bin = new int[intSize + fracSize];
|
||||
int binIdx = 0;
|
||||
int[] res = this.removeLeadingZeros();
|
||||
int wordIdxFrom = res[0];
|
||||
int digitsIntFrom = res[1];
|
||||
if (digitsIntFrom + fracSizeFrom == 0) {
|
||||
mask = 0;
|
||||
digitsInt = 1;
|
||||
}
|
||||
|
||||
int wordsIntFrom = digitsIntFrom / digitsPerWord;
|
||||
int leadingDigitsFrom = digitsIntFrom - wordsIntFrom * digitsPerWord;
|
||||
int iSizeFrom = wordsIntFrom * wordSize + dig2bytes[leadingDigitsFrom];
|
||||
|
||||
if (digitsInt < digitsIntFrom) {
|
||||
wordIdxFrom += (wordsIntFrom - wordsInt);
|
||||
if (leadingDigitsFrom > 0) {
|
||||
wordIdxFrom++;
|
||||
}
|
||||
|
||||
if (leadingDigits > 0) {
|
||||
wordIdxFrom--;
|
||||
}
|
||||
|
||||
wordsIntFrom = wordsInt;
|
||||
leadingDigitsFrom = leadingDigits;
|
||||
// TODO overflow here
|
||||
} else if (intSize > iSizeFrom) {
|
||||
for (; intSize > iSizeFrom; ) {
|
||||
intSize--;
|
||||
bin[binIdx] = mask & 0xff;
|
||||
binIdx++;
|
||||
}
|
||||
}
|
||||
|
||||
// when fracSize smaller than fracSizeFrom, output is truncated
|
||||
if (fracSize < fracSizeFrom) {
|
||||
wordsFracFrom = wordsFrac;
|
||||
trailingDigitsFrom = trailingDigits;
|
||||
// TODO truncated
|
||||
} else if (fracSize > fracSizeFrom && trailingDigitsFrom > 0) {
|
||||
if (wordsFrac == wordsFracFrom) {
|
||||
trailingDigitsFrom = trailingDigits;
|
||||
fracSize = fracSizeFrom;
|
||||
} else {
|
||||
wordsFracFrom++;
|
||||
trailingDigitsFrom = 0;
|
||||
}
|
||||
}
|
||||
|
||||
// xIntFrom part
|
||||
if (leadingDigitsFrom > 0) {
|
||||
int i = dig2bytes[leadingDigitsFrom];
|
||||
int x = (this.wordBuf[wordIdxFrom] % powers10[leadingDigitsFrom]) ^ mask;
|
||||
wordIdxFrom++;
|
||||
writeWord(bin, x, i, binIdx);
|
||||
binIdx += i;
|
||||
}
|
||||
|
||||
// wordsInt + wordsFrac part.
|
||||
for (int stop = wordIdxFrom + wordsIntFrom + wordsFracFrom;
|
||||
wordIdxFrom < stop;
|
||||
binIdx += wordSize) {
|
||||
int x = this.wordBuf[wordIdxFrom] ^ mask;
|
||||
wordIdxFrom++;
|
||||
writeWord(bin, x, 4, binIdx);
|
||||
}
|
||||
|
||||
if (trailingDigitsFrom > 0) {
|
||||
int x;
|
||||
int i = dig2bytes[trailingDigitsFrom];
|
||||
int lim = trailingDigits;
|
||||
if (wordsFracFrom < wordsFrac) {
|
||||
lim = digitsPerWord;
|
||||
}
|
||||
|
||||
for (; trailingDigitsFrom < lim && dig2bytes[trailingDigitsFrom] == i; ) {
|
||||
trailingDigitsFrom++;
|
||||
}
|
||||
x = (this.wordBuf[wordIdxFrom] / powers10[digitsPerWord - trailingDigitsFrom]) ^ mask;
|
||||
writeWord(bin, x, i, binIdx);
|
||||
binIdx += i;
|
||||
}
|
||||
|
||||
if (fracSize > fracSizeFrom) {
|
||||
int binIdxEnd = originIntSize + originFracSize;
|
||||
for (; fracSize > fracSizeFrom && binIdx < binIdxEnd; ) {
|
||||
fracSize--;
|
||||
bin[binIdx] = mask & 0xff;
|
||||
binIdx++;
|
||||
}
|
||||
}
|
||||
bin[0] ^= 0x80;
|
||||
return bin;
|
||||
}
|
||||
|
||||
// write a word into buf.
|
||||
private void writeWord(int[] b, int word, int size, int start) {
|
||||
switch (size) {
|
||||
case 1:
|
||||
b[start] = word & 0xFF;
|
||||
break;
|
||||
case 2:
|
||||
b[start] = (word >>> 8) & 0xFF;
|
||||
b[start + 1] = word & 0xFF;
|
||||
break;
|
||||
case 3:
|
||||
b[start] = (word >>> 16) & 0xFF;
|
||||
b[start + 1] = (word >>> 8) & 0xFF;
|
||||
b[start + 2] = word & 0xFF;
|
||||
break;
|
||||
case 4:
|
||||
b[start] = (word >>> 24) & 0xFF;
|
||||
b[start + 1] = (word >>> 16) & 0xFF;
|
||||
b[start + 2] = (word >>> 8) & 0xFF;
|
||||
b[start + 3] = word & 0xFF;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
/** Clears this instance. */
|
||||
public void clear() {
|
||||
this.digitsFrac = 0;
|
||||
this.digitsInt = 0;
|
||||
this.negative = false;
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,101 @@
|
|||
/*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package com.pingcap.tikv.event;
|
||||
|
||||
import java.io.Serializable;
|
||||
|
||||
public class CacheInvalidateEvent implements Serializable {
|
||||
public enum CacheType implements Serializable {
|
||||
REGION_STORE,
|
||||
REQ_FAILED,
|
||||
LEADER
|
||||
}
|
||||
|
||||
private long regionId;
|
||||
private long storeId;
|
||||
private boolean invalidateRegion;
|
||||
private boolean invalidateStore;
|
||||
private CacheType cacheType;
|
||||
|
||||
public CacheInvalidateEvent(
|
||||
long regionId, long storeId, boolean updateRegion, boolean updateStore, CacheType type) {
|
||||
this.regionId = regionId;
|
||||
this.storeId = storeId;
|
||||
this.cacheType = type;
|
||||
if (updateRegion) {
|
||||
invalidateRegion();
|
||||
}
|
||||
|
||||
if (updateStore) {
|
||||
invalidateStore();
|
||||
}
|
||||
}
|
||||
|
||||
public long getRegionId() {
|
||||
return regionId;
|
||||
}
|
||||
|
||||
public long getStoreId() {
|
||||
return storeId;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (obj == this) {
|
||||
return true;
|
||||
} else if (obj instanceof CacheInvalidateEvent) {
|
||||
CacheInvalidateEvent event = (CacheInvalidateEvent) obj;
|
||||
return event.getRegionId() == getRegionId()
|
||||
&& event.getStoreId() == getStoreId()
|
||||
&& event.getCacheType() == getCacheType();
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
int result = 1106;
|
||||
result += result * 31 + getStoreId();
|
||||
result += result * 31 + getRegionId();
|
||||
result += result * 31 + getCacheType().name().hashCode();
|
||||
return result;
|
||||
}
|
||||
|
||||
public void invalidateRegion() {
|
||||
invalidateRegion = true;
|
||||
}
|
||||
|
||||
public void invalidateStore() {
|
||||
invalidateStore = true;
|
||||
}
|
||||
|
||||
public boolean shouldUpdateRegion() {
|
||||
return invalidateRegion;
|
||||
}
|
||||
|
||||
public boolean shouldUpdateStore() {
|
||||
return invalidateStore;
|
||||
}
|
||||
|
||||
public CacheType getCacheType() {
|
||||
return cacheType;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return String.format("RegionId=%d,StoreId=%d,Type=%s", regionId, storeId, cacheType.name());
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,26 @@
|
|||
/*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package com.pingcap.tikv.exception;
|
||||
|
||||
public class CastingException extends RuntimeException {
|
||||
public CastingException(Exception e) {
|
||||
super(e);
|
||||
}
|
||||
|
||||
public CastingException(String msg) {
|
||||
super(msg);
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,22 @@
|
|||
/*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package com.pingcap.tikv.exception;
|
||||
|
||||
public class DAGRequestException extends RuntimeException {
|
||||
public DAGRequestException(String msg) {
|
||||
super(msg);
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,30 @@
|
|||
/*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package com.pingcap.tikv.exception;
|
||||
|
||||
public class GrpcException extends RuntimeException {
|
||||
public GrpcException(Exception e) {
|
||||
super(e);
|
||||
}
|
||||
|
||||
public GrpcException(String msg) {
|
||||
super(msg);
|
||||
}
|
||||
|
||||
public GrpcException(String msg, Exception e) {
|
||||
super(msg, e);
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,22 @@
|
|||
/*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package com.pingcap.tikv.exception;
|
||||
|
||||
public class IgnoreUnsupportedTypeException extends RuntimeException {
|
||||
public IgnoreUnsupportedTypeException(String msg) {
|
||||
super(msg);
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,22 @@
|
|||
/*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package com.pingcap.tikv.exception;
|
||||
|
||||
public class InvalidCodecFormatException extends RuntimeException {
|
||||
public InvalidCodecFormatException(String msg) {
|
||||
super(msg);
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,35 @@
|
|||
/*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package com.pingcap.tikv.exception;
|
||||
|
||||
import com.pingcap.tikv.kvproto.Kvrpcpb;
|
||||
|
||||
public class KeyException extends RuntimeException {
|
||||
private final Kvrpcpb.KeyError keyErr;
|
||||
|
||||
public KeyException(String errMsg) {
|
||||
super(errMsg);
|
||||
keyErr = null;
|
||||
}
|
||||
|
||||
public KeyException(Kvrpcpb.KeyError keyErr) {
|
||||
this.keyErr = keyErr;
|
||||
}
|
||||
|
||||
public Kvrpcpb.KeyError getKeyErr() {
|
||||
return keyErr;
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,30 @@
|
|||
/*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package com.pingcap.tikv.exception;
|
||||
|
||||
import com.pingcap.tikv.kvproto.Errorpb.Error;
|
||||
|
||||
public class RegionException extends RuntimeException {
|
||||
private final Error regionErr;
|
||||
|
||||
public RegionException(Error regionErr) {
|
||||
this.regionErr = regionErr;
|
||||
}
|
||||
|
||||
public Error getRegionErr() {
|
||||
return regionErr;
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,22 @@
|
|||
/*
|
||||
* Copyright 2018 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package com.pingcap.tikv.exception;
|
||||
|
||||
public class RegionTaskException extends RuntimeException {
|
||||
public RegionTaskException(String msg, Throwable throwable) {
|
||||
super(msg, throwable);
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,37 @@
|
|||
/*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package com.pingcap.tikv.exception;
|
||||
|
||||
import com.pingcap.tidb.tipb.Error;
|
||||
|
||||
public class SelectException extends RuntimeException {
|
||||
private final Error err;
|
||||
|
||||
public SelectException(Error err, String msg) {
|
||||
super(msg);
|
||||
this.err = err;
|
||||
}
|
||||
|
||||
// TODO: improve this
|
||||
public SelectException(String msg) {
|
||||
super(msg);
|
||||
this.err = null;
|
||||
}
|
||||
|
||||
public Error getError() {
|
||||
return err;
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,26 @@
|
|||
/*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package com.pingcap.tikv.exception;
|
||||
|
||||
public class TiClientInternalException extends RuntimeException {
|
||||
public TiClientInternalException(String msg) {
|
||||
super(msg);
|
||||
}
|
||||
|
||||
public TiClientInternalException(String msg, Throwable t) {
|
||||
super(msg, t);
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,26 @@
|
|||
/*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package com.pingcap.tikv.exception;
|
||||
|
||||
public class TiExpressionException extends RuntimeException {
|
||||
public TiExpressionException(String msg) {
|
||||
super(msg);
|
||||
}
|
||||
|
||||
public TiExpressionException(String msg, Throwable t) {
|
||||
super(msg, t);
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,26 @@
|
|||
/*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package com.pingcap.tikv.exception;
|
||||
|
||||
public class TypeException extends RuntimeException {
|
||||
public TypeException(String msg) {
|
||||
super(msg);
|
||||
}
|
||||
|
||||
public TypeException(String msg, Throwable t) {
|
||||
super(msg, t);
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,22 @@
|
|||
/*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package com.pingcap.tikv.exception;
|
||||
|
||||
public class UnsupportedTypeException extends RuntimeException {
|
||||
public UnsupportedTypeException(String msg) {
|
||||
super(msg);
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,87 @@
|
|||
/*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package com.pingcap.tikv.expression;
|
||||
|
||||
import static java.util.Objects.requireNonNull;
|
||||
|
||||
import com.google.common.base.Joiner;
|
||||
import com.google.common.collect.ImmutableList;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
|
||||
public class AggregateFunction implements Expression {
|
||||
public enum FunctionType {
|
||||
Sum,
|
||||
Count,
|
||||
Min,
|
||||
Max,
|
||||
First
|
||||
}
|
||||
|
||||
private final FunctionType type;
|
||||
private final Expression argument;
|
||||
|
||||
public static AggregateFunction newCall(FunctionType type, Expression argument) {
|
||||
return new AggregateFunction(type, argument);
|
||||
}
|
||||
|
||||
private AggregateFunction(FunctionType type, Expression argument) {
|
||||
this.type = requireNonNull(type, "function type is null");
|
||||
this.argument = requireNonNull(argument, "function argument is null");
|
||||
}
|
||||
|
||||
public FunctionType getType() {
|
||||
return type;
|
||||
}
|
||||
|
||||
public Expression getArgument() {
|
||||
return argument;
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<Expression> getChildren() {
|
||||
return ImmutableList.of(argument);
|
||||
}
|
||||
|
||||
@Override
|
||||
public <R, C> R accept(Visitor<R, C> visitor, C context) {
|
||||
return visitor.visit(this, context);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object other) {
|
||||
if (this == other) {
|
||||
return true;
|
||||
}
|
||||
if (!(other instanceof AggregateFunction)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
AggregateFunction that = (AggregateFunction) other;
|
||||
return type == that.type && Objects.equals(argument, that.argument);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(type, argument);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return String.format(
|
||||
"%s(%s)", getType(), Joiner.on(",").useForNull("NULL").join(getChildren()));
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,120 @@
|
|||
/*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package com.pingcap.tikv.expression;
|
||||
|
||||
import static com.pingcap.tikv.expression.ArithmeticBinaryExpression.Type.*;
|
||||
import static java.util.Objects.requireNonNull;
|
||||
|
||||
import com.google.common.collect.ImmutableList;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
|
||||
public class ArithmeticBinaryExpression implements Expression {
|
||||
public enum Type {
|
||||
PLUS,
|
||||
MINUS,
|
||||
MULTIPLY,
|
||||
DIVIDE,
|
||||
BIT_AND,
|
||||
BIT_OR,
|
||||
BIT_XOR
|
||||
}
|
||||
|
||||
public static ArithmeticBinaryExpression plus(Expression left, Expression right) {
|
||||
return new ArithmeticBinaryExpression(PLUS, left, right);
|
||||
}
|
||||
|
||||
public static ArithmeticBinaryExpression minus(Expression left, Expression right) {
|
||||
return new ArithmeticBinaryExpression(MINUS, left, right);
|
||||
}
|
||||
|
||||
public static ArithmeticBinaryExpression multiply(Expression left, Expression right) {
|
||||
return new ArithmeticBinaryExpression(MULTIPLY, left, right);
|
||||
}
|
||||
|
||||
public static ArithmeticBinaryExpression divide(Expression left, Expression right) {
|
||||
return new ArithmeticBinaryExpression(DIVIDE, left, right);
|
||||
}
|
||||
|
||||
public static ArithmeticBinaryExpression bitAnd(Expression left, Expression right) {
|
||||
return new ArithmeticBinaryExpression(BIT_AND, left, right);
|
||||
}
|
||||
|
||||
public static ArithmeticBinaryExpression bitOr(Expression left, Expression right) {
|
||||
return new ArithmeticBinaryExpression(BIT_OR, left, right);
|
||||
}
|
||||
|
||||
public static ArithmeticBinaryExpression bitXor(Expression left, Expression right) {
|
||||
return new ArithmeticBinaryExpression(BIT_XOR, left, right);
|
||||
}
|
||||
|
||||
private final Expression left;
|
||||
private final Expression right;
|
||||
private final Type compType;
|
||||
|
||||
public ArithmeticBinaryExpression(Type type, Expression left, Expression right) {
|
||||
this.left = requireNonNull(left, "left expression is null");
|
||||
this.right = requireNonNull(right, "right expression is null");
|
||||
this.compType = requireNonNull(type, "type is null");
|
||||
}
|
||||
|
||||
public Expression getLeft() {
|
||||
return left;
|
||||
}
|
||||
|
||||
public Expression getRight() {
|
||||
return right;
|
||||
}
|
||||
|
||||
public Type getCompType() {
|
||||
return compType;
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<Expression> getChildren() {
|
||||
return ImmutableList.of(left, right);
|
||||
}
|
||||
|
||||
@Override
|
||||
public <R, C> R accept(Visitor<R, C> visitor, C context) {
|
||||
return visitor.visit(this, context);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object other) {
|
||||
if (this == other) {
|
||||
return true;
|
||||
}
|
||||
if (!(other instanceof ArithmeticBinaryExpression)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
ArithmeticBinaryExpression that = (ArithmeticBinaryExpression) other;
|
||||
return (compType == that.compType)
|
||||
&& Objects.equals(left, that.left)
|
||||
&& Objects.equals(right, that.right);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(compType, left, right);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return String.format("[%s %s %s]", getLeft(), getCompType(), getRight());
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,53 @@
|
|||
/*
|
||||
*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*
|
||||
*/
|
||||
|
||||
package com.pingcap.tikv.expression;
|
||||
|
||||
import static java.util.Objects.requireNonNull;
|
||||
|
||||
import java.util.HashSet;
|
||||
import java.util.Set;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
public class Blacklist {
|
||||
private final Set<String> unsupported = new HashSet<>();
|
||||
|
||||
Blacklist(String string) {
|
||||
if (string != null) {
|
||||
String[] some = string.split(",");
|
||||
for (String one : some) {
|
||||
String trimmedExprName = one.trim();
|
||||
if (!trimmedExprName.isEmpty()) {
|
||||
unsupported.add(one.trim());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
boolean isUnsupported(String name) {
|
||||
return unsupported.contains(name);
|
||||
}
|
||||
|
||||
boolean isUnsupported(Class<?> cls) {
|
||||
return isUnsupported(requireNonNull(cls).getSimpleName());
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return unsupported.stream().collect(Collectors.joining(","));
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,55 @@
|
|||
/*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package com.pingcap.tikv.expression;
|
||||
|
||||
import static com.google.common.base.Preconditions.checkNotNull;
|
||||
|
||||
import com.pingcap.tikv.expression.visitor.ProtoConverter;
|
||||
import java.io.Serializable;
|
||||
|
||||
public class ByItem implements Serializable {
|
||||
private Expression expr;
|
||||
private boolean desc;
|
||||
|
||||
public static ByItem create(Expression expr, boolean desc) {
|
||||
return new ByItem(expr, desc);
|
||||
}
|
||||
|
||||
private ByItem(Expression expr, boolean desc) {
|
||||
checkNotNull(expr, "Expr cannot be null for ByItem");
|
||||
|
||||
this.expr = expr;
|
||||
this.desc = desc;
|
||||
}
|
||||
|
||||
public com.pingcap.tidb.tipb.ByItem toProto(Object context) {
|
||||
com.pingcap.tidb.tipb.ByItem.Builder builder = com.pingcap.tidb.tipb.ByItem.newBuilder();
|
||||
return builder.setExpr(ProtoConverter.toProto(expr, context)).setDesc(desc).build();
|
||||
}
|
||||
|
||||
public Expression getExpr() {
|
||||
return expr;
|
||||
}
|
||||
|
||||
public boolean isDesc() {
|
||||
return desc;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return String.format("[%s %s]", expr.toString(), desc ? "DESC" : "ASC");
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,143 @@
|
|||
/*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package com.pingcap.tikv.expression;
|
||||
|
||||
import com.google.common.collect.ImmutableList;
|
||||
import com.pingcap.tikv.exception.TiClientInternalException;
|
||||
import com.pingcap.tikv.exception.TiExpressionException;
|
||||
import com.pingcap.tikv.meta.TiColumnInfo;
|
||||
import com.pingcap.tikv.meta.TiTableInfo;
|
||||
import com.pingcap.tikv.types.DataType;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
|
||||
public class ColumnRef implements Expression {
|
||||
public static ColumnRef create(String name, TiTableInfo table) {
|
||||
for (TiColumnInfo columnInfo : table.getColumns()) {
|
||||
if (columnInfo.matchName(name)) {
|
||||
return new ColumnRef(columnInfo.getName(), columnInfo, table);
|
||||
}
|
||||
}
|
||||
throw new TiExpressionException(
|
||||
String.format("Column name %s not found in table %s", name, table));
|
||||
}
|
||||
|
||||
public static ColumnRef create(String name) {
|
||||
return new ColumnRef(name);
|
||||
}
|
||||
|
||||
private final String name;
|
||||
|
||||
private TiColumnInfo columnInfo;
|
||||
private TiTableInfo tableInfo;
|
||||
|
||||
public ColumnRef(String name) {
|
||||
this.name = name;
|
||||
}
|
||||
|
||||
public ColumnRef(String name, TiColumnInfo columnInfo, TiTableInfo tableInfo) {
|
||||
this.name = name;
|
||||
this.columnInfo = columnInfo;
|
||||
this.tableInfo = tableInfo;
|
||||
}
|
||||
|
||||
public String getName() {
|
||||
return name;
|
||||
}
|
||||
|
||||
public void resolve(TiTableInfo table) {
|
||||
TiColumnInfo columnInfo = null;
|
||||
for (TiColumnInfo col : table.getColumns()) {
|
||||
if (col.matchName(name)) {
|
||||
columnInfo = col;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (columnInfo == null) {
|
||||
throw new TiExpressionException(
|
||||
String.format("No Matching column %s from table %s", name, table.getName()));
|
||||
}
|
||||
|
||||
if (columnInfo.getId() == 0) {
|
||||
throw new TiExpressionException("Zero Id is not a referable column id");
|
||||
}
|
||||
|
||||
this.tableInfo = table;
|
||||
this.columnInfo = columnInfo;
|
||||
}
|
||||
|
||||
public TiColumnInfo getColumnInfo() {
|
||||
if (columnInfo == null) {
|
||||
throw new TiClientInternalException(String.format("ColumnRef [%s] is unbound", name));
|
||||
}
|
||||
return columnInfo;
|
||||
}
|
||||
|
||||
public DataType getType() {
|
||||
return getColumnInfo().getType();
|
||||
}
|
||||
|
||||
public TiTableInfo getTableInfo() {
|
||||
return tableInfo;
|
||||
}
|
||||
|
||||
public boolean isResolved() {
|
||||
return tableInfo != null && columnInfo != null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object another) {
|
||||
if (this == another) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (another instanceof ColumnRef) {
|
||||
ColumnRef that = (ColumnRef) another;
|
||||
if (isResolved() && that.isResolved()) {
|
||||
return Objects.equals(columnInfo, that.columnInfo)
|
||||
&& Objects.equals(tableInfo, that.tableInfo);
|
||||
} else {
|
||||
return name.equalsIgnoreCase(that.name);
|
||||
}
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
if (isResolved()) {
|
||||
return Objects.hash(tableInfo, columnInfo);
|
||||
} else {
|
||||
return Objects.hashCode(name);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return String.format("[%s]", getName());
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<Expression> getChildren() {
|
||||
return ImmutableList.of();
|
||||
}
|
||||
|
||||
@Override
|
||||
public <R, C> R accept(Visitor<R, C> visitor, C context) {
|
||||
return visitor.visit(this, context);
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,199 @@
|
|||
/*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package com.pingcap.tikv.expression;
|
||||
|
||||
import static com.google.common.base.Preconditions.checkArgument;
|
||||
import static com.pingcap.tikv.expression.ComparisonBinaryExpression.Type.*;
|
||||
import static java.util.Objects.requireNonNull;
|
||||
|
||||
import com.google.common.collect.ImmutableList;
|
||||
import com.pingcap.tikv.exception.TiExpressionException;
|
||||
import com.pingcap.tikv.key.TypedKey;
|
||||
import com.pingcap.tikv.types.DataType;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
import java.util.Optional;
|
||||
|
||||
public class ComparisonBinaryExpression implements Expression {
|
||||
public enum Type {
|
||||
EQUAL,
|
||||
NOT_EQUAL,
|
||||
LESS_THAN,
|
||||
LESS_EQUAL,
|
||||
GREATER_THAN,
|
||||
GREATER_EQUAL
|
||||
}
|
||||
|
||||
public static ComparisonBinaryExpression equal(Expression left, Expression right) {
|
||||
return new ComparisonBinaryExpression(EQUAL, left, right);
|
||||
}
|
||||
|
||||
public static ComparisonBinaryExpression notEqual(Expression left, Expression right) {
|
||||
return new ComparisonBinaryExpression(NOT_EQUAL, left, right);
|
||||
}
|
||||
|
||||
public static ComparisonBinaryExpression lessThan(Expression left, Expression right) {
|
||||
return new ComparisonBinaryExpression(LESS_THAN, left, right);
|
||||
}
|
||||
|
||||
public static ComparisonBinaryExpression lessEqual(Expression left, Expression right) {
|
||||
return new ComparisonBinaryExpression(LESS_EQUAL, left, right);
|
||||
}
|
||||
|
||||
public static ComparisonBinaryExpression greaterThan(Expression left, Expression right) {
|
||||
return new ComparisonBinaryExpression(GREATER_THAN, left, right);
|
||||
}
|
||||
|
||||
public static ComparisonBinaryExpression greaterEqual(Expression left, Expression right) {
|
||||
return new ComparisonBinaryExpression(GREATER_EQUAL, left, right);
|
||||
}
|
||||
|
||||
public static class NormalizedPredicate {
|
||||
private final ComparisonBinaryExpression pred;
|
||||
private TypedKey key;
|
||||
|
||||
NormalizedPredicate(ComparisonBinaryExpression pred) {
|
||||
checkArgument(pred.getLeft() instanceof ColumnRef);
|
||||
checkArgument(pred.getRight() instanceof Constant);
|
||||
this.pred = pred;
|
||||
}
|
||||
|
||||
public ColumnRef getColumnRef() {
|
||||
return (ColumnRef) pred.getLeft();
|
||||
}
|
||||
|
||||
public Constant getValue() {
|
||||
return (Constant) pred.getRight();
|
||||
}
|
||||
|
||||
public Type getType() {
|
||||
return pred.getComparisonType();
|
||||
}
|
||||
|
||||
public TypedKey getTypedLiteral() {
|
||||
return getTypedLiteral(DataType.UNSPECIFIED_LEN);
|
||||
}
|
||||
|
||||
public TypedKey getTypedLiteral(int prefixLength) {
|
||||
if (key == null) {
|
||||
key = TypedKey.toTypedKey(getValue().getValue(), getColumnRef().getType(), prefixLength);
|
||||
}
|
||||
return key;
|
||||
}
|
||||
}
|
||||
|
||||
private final Expression left;
|
||||
private final Expression right;
|
||||
private final Type compType;
|
||||
private transient Optional<NormalizedPredicate> normalizedPredicate;
|
||||
|
||||
public ComparisonBinaryExpression(Type type, Expression left, Expression right) {
|
||||
this.left = requireNonNull(left, "left expression is null");
|
||||
this.right = requireNonNull(right, "right expression is null");
|
||||
this.compType = requireNonNull(type, "type is null");
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<Expression> getChildren() {
|
||||
return ImmutableList.of(left, right);
|
||||
}
|
||||
|
||||
@Override
|
||||
public <R, C> R accept(Visitor<R, C> visitor, C context) {
|
||||
return visitor.visit(this, context);
|
||||
}
|
||||
|
||||
public Expression getLeft() {
|
||||
return left;
|
||||
}
|
||||
|
||||
public Expression getRight() {
|
||||
return right;
|
||||
}
|
||||
|
||||
public Type getComparisonType() {
|
||||
return compType;
|
||||
}
|
||||
|
||||
public NormalizedPredicate normalize() {
|
||||
if (normalizedPredicate != null) {
|
||||
return normalizedPredicate.orElseGet(null);
|
||||
}
|
||||
if (getLeft() instanceof Constant && getRight() instanceof ColumnRef) {
|
||||
Constant left = (Constant) getLeft();
|
||||
ColumnRef right = (ColumnRef) getRight();
|
||||
Type newType;
|
||||
switch (getComparisonType()) {
|
||||
case EQUAL:
|
||||
newType = EQUAL;
|
||||
break;
|
||||
case LESS_EQUAL:
|
||||
newType = GREATER_EQUAL;
|
||||
break;
|
||||
case LESS_THAN:
|
||||
newType = GREATER_THAN;
|
||||
break;
|
||||
case GREATER_EQUAL:
|
||||
newType = LESS_EQUAL;
|
||||
break;
|
||||
case GREATER_THAN:
|
||||
newType = LESS_THAN;
|
||||
break;
|
||||
case NOT_EQUAL:
|
||||
newType = NOT_EQUAL;
|
||||
break;
|
||||
default:
|
||||
throw new TiExpressionException(
|
||||
String.format(
|
||||
"PredicateNormalizer is not able to process type %s", getComparisonType()));
|
||||
}
|
||||
ComparisonBinaryExpression newExpression =
|
||||
new ComparisonBinaryExpression(newType, right, left);
|
||||
normalizedPredicate = Optional.of(new NormalizedPredicate(newExpression));
|
||||
return normalizedPredicate.get();
|
||||
} else if (getRight() instanceof Constant && getLeft() instanceof ColumnRef) {
|
||||
normalizedPredicate = Optional.of(new NormalizedPredicate(this));
|
||||
return normalizedPredicate.get();
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return String.format("[%s %s %s]", getLeft(), getComparisonType(), getRight());
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object other) {
|
||||
if (this == other) {
|
||||
return true;
|
||||
}
|
||||
if (!(other instanceof ComparisonBinaryExpression)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
ComparisonBinaryExpression that = (ComparisonBinaryExpression) other;
|
||||
return (compType == that.compType)
|
||||
&& Objects.equals(left, that.left)
|
||||
&& Objects.equals(right, that.right);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(compType, left, right);
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,127 @@
|
|||
/*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package com.pingcap.tikv.expression;
|
||||
|
||||
import com.google.common.collect.ImmutableList;
|
||||
import com.pingcap.tikv.exception.TiExpressionException;
|
||||
import com.pingcap.tikv.types.*;
|
||||
import java.math.BigDecimal;
|
||||
import java.sql.Date;
|
||||
import java.sql.Timestamp;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
import org.joda.time.DateTime;
|
||||
|
||||
// Refactor needed.
|
||||
// Refer to https://github.com/pingcap/tipb/blob/master/go-tipb/expression.pb.go
|
||||
// TODO: This might need a refactor to accept an DataType?
|
||||
public class Constant implements Expression {
|
||||
private final Object value;
|
||||
private DataType type;
|
||||
|
||||
public static Constant create(Object value, DataType type) {
|
||||
return new Constant(value, type);
|
||||
}
|
||||
|
||||
public static Constant create(Object value) {
|
||||
return new Constant(value, null);
|
||||
}
|
||||
|
||||
public Constant(Object value, DataType type) {
|
||||
this.value = value;
|
||||
this.type = (type == null && value != null) ? getDefaultType(value) : type;
|
||||
}
|
||||
|
||||
protected static boolean isIntegerType(Object value) {
|
||||
return value instanceof Long
|
||||
|| value instanceof Integer
|
||||
|| value instanceof Short
|
||||
|| value instanceof Byte;
|
||||
}
|
||||
|
||||
private static DataType getDefaultType(Object value) {
|
||||
if (value == null) {
|
||||
throw new TiExpressionException("NULL constant has no type");
|
||||
} else if (isIntegerType(value)) {
|
||||
return IntegerType.BIGINT;
|
||||
} else if (value instanceof String) {
|
||||
return StringType.VARCHAR;
|
||||
} else if (value instanceof Float) {
|
||||
return RealType.FLOAT;
|
||||
} else if (value instanceof Double) {
|
||||
return RealType.DOUBLE;
|
||||
} else if (value instanceof BigDecimal) {
|
||||
return DecimalType.DECIMAL;
|
||||
} else if (value instanceof DateTime) {
|
||||
return DateTimeType.DATETIME;
|
||||
} else if (value instanceof Date) {
|
||||
return DateType.DATE;
|
||||
} else if (value instanceof Timestamp) {
|
||||
return TimestampType.TIMESTAMP;
|
||||
} else if (value instanceof byte[]) {
|
||||
return BytesType.TEXT;
|
||||
} else {
|
||||
throw new TiExpressionException(
|
||||
"Constant type not supported:" + value.getClass().getSimpleName());
|
||||
}
|
||||
}
|
||||
|
||||
public void setType(DataType type) {
|
||||
this.type = type;
|
||||
}
|
||||
|
||||
public Object getValue() {
|
||||
return value;
|
||||
}
|
||||
|
||||
public DataType getType() {
|
||||
return type;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
if (value == null) {
|
||||
return "null";
|
||||
}
|
||||
if (value instanceof String) {
|
||||
return String.format("\"%s\"", value);
|
||||
}
|
||||
return value.toString();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object other) {
|
||||
if (other instanceof Constant) {
|
||||
return Objects.equals(value, ((Constant) other).value);
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hashCode(value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<Expression> getChildren() {
|
||||
return ImmutableList.of();
|
||||
}
|
||||
|
||||
@Override
|
||||
public <R, C> R accept(Visitor<R, C> visitor, C context) {
|
||||
return visitor.visit(this, context);
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,25 @@
|
|||
/*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package com.pingcap.tikv.expression;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.util.List;
|
||||
|
||||
public interface Expression extends Serializable {
|
||||
List<Expression> getChildren();
|
||||
|
||||
<R, C> R accept(Visitor<R, C> visitor, C context);
|
||||
}
|
||||
|
|
@ -0,0 +1,27 @@
|
|||
/*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package com.pingcap.tikv.expression;
|
||||
|
||||
public class ExpressionBlacklist extends Blacklist {
|
||||
|
||||
public ExpressionBlacklist(String exprsString) {
|
||||
super(exprsString);
|
||||
}
|
||||
|
||||
public boolean isUnsupportedPushdownExpr(Class<?> cls) {
|
||||
return isUnsupported(cls);
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,67 @@
|
|||
/*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package com.pingcap.tikv.expression;
|
||||
|
||||
import static java.util.Objects.requireNonNull;
|
||||
|
||||
import com.google.common.collect.ImmutableList;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
|
||||
public class IsNull implements Expression {
|
||||
private Expression expression;
|
||||
|
||||
public IsNull(Expression expression) {
|
||||
this.expression = requireNonNull(expression, "expression is null");
|
||||
}
|
||||
|
||||
public Expression getExpression() {
|
||||
return expression;
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<Expression> getChildren() {
|
||||
return ImmutableList.of(expression);
|
||||
}
|
||||
|
||||
@Override
|
||||
public <R, C> R accept(Visitor<R, C> visitor, C context) {
|
||||
return visitor.visit(this, context);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return String.format("IsNull(%s)", getExpression());
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object other) {
|
||||
if (this == other) {
|
||||
return true;
|
||||
}
|
||||
if (!(other instanceof IsNull)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
IsNull that = (IsNull) other;
|
||||
return Objects.equals(expression, that.expression);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hashCode(expression);
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,87 @@
|
|||
// Generated by the protocol buffer compiler. DO NOT EDIT!
|
||||
// source: expression.proto
|
||||
|
||||
package com.pingcap.tikv.expression;
|
||||
|
||||
import static java.util.Objects.requireNonNull;
|
||||
|
||||
import com.google.common.collect.ImmutableList;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
|
||||
public class LogicalBinaryExpression implements Expression {
|
||||
public enum Type {
|
||||
AND,
|
||||
OR,
|
||||
XOR
|
||||
}
|
||||
|
||||
public static LogicalBinaryExpression and(Expression left, Expression right) {
|
||||
return new LogicalBinaryExpression(Type.AND, left, right);
|
||||
}
|
||||
|
||||
public static LogicalBinaryExpression or(Expression left, Expression right) {
|
||||
return new LogicalBinaryExpression(Type.OR, left, right);
|
||||
}
|
||||
|
||||
public static LogicalBinaryExpression xor(Expression left, Expression right) {
|
||||
return new LogicalBinaryExpression(Type.XOR, left, right);
|
||||
}
|
||||
|
||||
public LogicalBinaryExpression(Type type, Expression left, Expression right) {
|
||||
this.left = requireNonNull(left, "left expression is null");
|
||||
this.right = requireNonNull(right, "right expression is null");
|
||||
this.compType = requireNonNull(type, "type is null");
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<Expression> getChildren() {
|
||||
return ImmutableList.of(getLeft(), getRight());
|
||||
}
|
||||
|
||||
@Override
|
||||
public <R, C> R accept(Visitor<R, C> visitor, C context) {
|
||||
return visitor.visit(this, context);
|
||||
}
|
||||
|
||||
public Expression getLeft() {
|
||||
return left;
|
||||
}
|
||||
|
||||
public Expression getRight() {
|
||||
return right;
|
||||
}
|
||||
|
||||
public Type getCompType() {
|
||||
return compType;
|
||||
}
|
||||
|
||||
private final Expression left;
|
||||
private final Expression right;
|
||||
private final Type compType;
|
||||
|
||||
@Override
|
||||
public boolean equals(Object other) {
|
||||
if (this == other) {
|
||||
return true;
|
||||
}
|
||||
if (!(other instanceof LogicalBinaryExpression)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
LogicalBinaryExpression that = (LogicalBinaryExpression) other;
|
||||
return (compType == that.compType)
|
||||
&& Objects.equals(left, that.left)
|
||||
&& Objects.equals(right, that.right);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(compType, left, right);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return String.format("[%s %s %s]", getLeft(), getCompType(), getRight());
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,72 @@
|
|||
/*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package com.pingcap.tikv.expression;
|
||||
|
||||
import static java.util.Objects.requireNonNull;
|
||||
|
||||
import com.google.common.collect.ImmutableList;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
|
||||
public class Not implements Expression {
|
||||
|
||||
public static Not not(Expression expression) {
|
||||
return new Not(expression);
|
||||
}
|
||||
|
||||
private Expression expression;
|
||||
|
||||
public Not(Expression expression) {
|
||||
this.expression = requireNonNull(expression, "expression is null");
|
||||
}
|
||||
|
||||
public Expression getExpression() {
|
||||
return expression;
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<Expression> getChildren() {
|
||||
return ImmutableList.of(expression);
|
||||
}
|
||||
|
||||
@Override
|
||||
public <R, C> R accept(Visitor<R, C> visitor, C context) {
|
||||
return visitor.visit(this, context);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return String.format("Not(%s)", getExpression());
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object other) {
|
||||
if (this == other) {
|
||||
return true;
|
||||
}
|
||||
if (!(other instanceof Not)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
Not that = (Not) other;
|
||||
return Objects.equals(expression, that.expression);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hashCode(expression);
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,145 @@
|
|||
/*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package com.pingcap.tikv.expression;
|
||||
|
||||
import static com.pingcap.tikv.expression.StringRegExpression.Type.*;
|
||||
import static java.util.Objects.requireNonNull;
|
||||
|
||||
import com.google.common.collect.ImmutableList;
|
||||
import com.pingcap.tikv.key.TypedKey;
|
||||
import com.pingcap.tikv.types.DataType;
|
||||
import com.pingcap.tikv.types.IntegerType;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
|
||||
public class StringRegExpression implements Expression {
|
||||
public enum Type {
|
||||
STARTS_WITH,
|
||||
CONTAINS,
|
||||
ENDS_WITH,
|
||||
LIKE
|
||||
}
|
||||
|
||||
public static StringRegExpression startsWith(Expression left, Expression right) {
|
||||
Expression reg =
|
||||
Constant.create(((Constant) right).getValue() + "%", ((Constant) right).getType());
|
||||
return new StringRegExpression(STARTS_WITH, left, right, reg);
|
||||
}
|
||||
|
||||
public static StringRegExpression contains(Expression left, Expression right) {
|
||||
Expression reg =
|
||||
Constant.create("%" + ((Constant) right).getValue() + "%", ((Constant) right).getType());
|
||||
return new StringRegExpression(CONTAINS, left, right, reg);
|
||||
}
|
||||
|
||||
public static StringRegExpression endsWith(Expression left, Expression right) {
|
||||
Expression reg =
|
||||
Constant.create("%" + ((Constant) right).getValue(), ((Constant) right).getType());
|
||||
return new StringRegExpression(ENDS_WITH, left, right, reg);
|
||||
}
|
||||
|
||||
public static StringRegExpression like(Expression left, Expression right) {
|
||||
return new StringRegExpression(LIKE, left, right, right);
|
||||
}
|
||||
|
||||
private transient TypedKey key;
|
||||
|
||||
public ColumnRef getColumnRef() {
|
||||
return (ColumnRef) getLeft();
|
||||
}
|
||||
|
||||
public Constant getValue() {
|
||||
return (Constant) getRight();
|
||||
}
|
||||
|
||||
public TypedKey getTypedLiteral() {
|
||||
return getTypedLiteral(DataType.UNSPECIFIED_LEN);
|
||||
}
|
||||
|
||||
public TypedKey getTypedLiteral(int prefixLength) {
|
||||
if (key == null) {
|
||||
key = TypedKey.toTypedKey(getValue().getValue(), getColumnRef().getType(), prefixLength);
|
||||
}
|
||||
return key;
|
||||
}
|
||||
|
||||
private final Expression left;
|
||||
private final Expression right;
|
||||
private final Expression reg;
|
||||
private final Type regType;
|
||||
|
||||
public StringRegExpression(Type type, Expression left, Expression right, Expression reg) {
|
||||
this.left = requireNonNull(left, "left expression is null");
|
||||
this.right = requireNonNull(right, "right expression is null");
|
||||
this.regType = requireNonNull(type, "type is null");
|
||||
this.reg = requireNonNull(reg, "reg string is null");
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<Expression> getChildren() {
|
||||
// For LIKE statement, an extra ESCAPE parameter is required as the third parameter for
|
||||
// ScalarFunc.
|
||||
// However in Spark ESCAPE is not supported so we simply set this value to zero.
|
||||
return ImmutableList.of(left, reg, Constant.create(0, IntegerType.BIGINT));
|
||||
}
|
||||
|
||||
@Override
|
||||
public <R, C> R accept(Visitor<R, C> visitor, C context) {
|
||||
return visitor.visit(this, context);
|
||||
}
|
||||
|
||||
public Expression getLeft() {
|
||||
return left;
|
||||
}
|
||||
|
||||
public Expression getRight() {
|
||||
return right;
|
||||
}
|
||||
|
||||
public Type getRegType() {
|
||||
return regType;
|
||||
}
|
||||
|
||||
public Expression getReg() {
|
||||
return reg;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return String.format("[%s %s %s reg: %s]", getLeft(), getRegType(), getRight(), getReg());
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object other) {
|
||||
if (this == other) {
|
||||
return true;
|
||||
}
|
||||
if (!(other instanceof StringRegExpression)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
StringRegExpression that = (StringRegExpression) other;
|
||||
return (regType == that.regType)
|
||||
&& Objects.equals(left, that.left)
|
||||
&& Objects.equals(left, that.right)
|
||||
&& Objects.equals(reg, that.reg);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(regType, left, right, reg);
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,67 @@
|
|||
/*
|
||||
*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*
|
||||
*/
|
||||
|
||||
package com.pingcap.tikv.expression;
|
||||
|
||||
import static com.pingcap.tikv.types.MySQLType.*;
|
||||
|
||||
import com.pingcap.tikv.types.MySQLType;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
public class TypeBlacklist extends Blacklist {
|
||||
private static final Map<MySQLType, String> typeToMySQLMap = initialTypeMap();
|
||||
|
||||
private static HashMap<MySQLType, String> initialTypeMap() {
|
||||
HashMap<MySQLType, String> map = new HashMap<>();
|
||||
map.put(TypeDecimal, "decimal");
|
||||
map.put(TypeTiny, "tinyint");
|
||||
map.put(TypeShort, "smallint");
|
||||
map.put(TypeLong, "int");
|
||||
map.put(TypeFloat, "float");
|
||||
map.put(TypeDouble, "double");
|
||||
map.put(TypeNull, "null");
|
||||
map.put(TypeTimestamp, "timestamp");
|
||||
map.put(TypeLonglong, "bigint");
|
||||
map.put(TypeInt24, "mediumint");
|
||||
map.put(TypeDate, "date");
|
||||
map.put(TypeDuration, "time");
|
||||
map.put(TypeDatetime, "datetime");
|
||||
map.put(TypeYear, "year");
|
||||
map.put(TypeNewDate, "date");
|
||||
map.put(TypeVarchar, "varchar");
|
||||
map.put(TypeJSON, "json");
|
||||
map.put(TypeNewDecimal, "decimal");
|
||||
map.put(TypeEnum, "enum");
|
||||
map.put(TypeSet, "set");
|
||||
map.put(TypeTinyBlob, "tinytext");
|
||||
map.put(TypeMediumBlob, "mediumtext");
|
||||
map.put(TypeLongBlob, "longtext");
|
||||
map.put(TypeBlob, "text");
|
||||
map.put(TypeVarString, "varString");
|
||||
map.put(TypeString, "string");
|
||||
return map;
|
||||
}
|
||||
|
||||
public TypeBlacklist(String typesString) {
|
||||
super(typesString);
|
||||
}
|
||||
|
||||
public boolean isUnsupportedType(MySQLType sqlType) {
|
||||
return isUnsupported(typeToMySQLMap.getOrDefault(sqlType, ""));
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,38 @@
|
|||
/*
|
||||
*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*
|
||||
*/
|
||||
|
||||
package com.pingcap.tikv.expression;
|
||||
|
||||
public abstract class Visitor<R, C> {
|
||||
protected abstract R visit(ColumnRef node, C context);
|
||||
|
||||
protected abstract R visit(ComparisonBinaryExpression node, C context);
|
||||
|
||||
protected abstract R visit(StringRegExpression node, C context);
|
||||
|
||||
protected abstract R visit(ArithmeticBinaryExpression node, C context);
|
||||
|
||||
protected abstract R visit(LogicalBinaryExpression node, C context);
|
||||
|
||||
protected abstract R visit(Constant node, C context);
|
||||
|
||||
protected abstract R visit(AggregateFunction node, C context);
|
||||
|
||||
protected abstract R visit(IsNull node, C context);
|
||||
|
||||
protected abstract R visit(Not node, C context);
|
||||
}
|
||||
|
|
@ -0,0 +1,42 @@
|
|||
/*
|
||||
* Copyright 2018 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package com.pingcap.tikv.expression.visitor;
|
||||
|
||||
import com.pingcap.tikv.expression.ColumnRef;
|
||||
import com.pingcap.tikv.expression.Expression;
|
||||
|
||||
public class ColumnMatcher extends DefaultVisitor<Boolean, Void> {
|
||||
private final ColumnRef columnRef;
|
||||
|
||||
private ColumnMatcher(ColumnRef exp) {
|
||||
this.columnRef = exp;
|
||||
}
|
||||
|
||||
public static Boolean match(ColumnRef col, Expression expression) {
|
||||
ColumnMatcher matcher = new ColumnMatcher(col);
|
||||
return expression.accept(matcher, null);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Boolean process(Expression node, Void context) {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Boolean visit(ColumnRef node, Void context) {
|
||||
return node.getColumnInfo().matchName(columnRef.getName());
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,72 @@
|
|||
/*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package com.pingcap.tikv.expression.visitor;
|
||||
|
||||
import com.pingcap.tikv.expression.*;
|
||||
|
||||
public class DefaultVisitor<R, C> extends Visitor<R, C> {
|
||||
protected R process(Expression node, C context) {
|
||||
for (Expression expr : node.getChildren()) {
|
||||
expr.accept(this, context);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected R visit(ColumnRef node, C context) {
|
||||
return process(node, context);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected R visit(ComparisonBinaryExpression node, C context) {
|
||||
return process(node, context);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected R visit(StringRegExpression node, C context) {
|
||||
return process(node, context);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected R visit(ArithmeticBinaryExpression node, C context) {
|
||||
return process(node, context);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected R visit(LogicalBinaryExpression node, C context) {
|
||||
return process(node, context);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected R visit(Constant node, C context) {
|
||||
return process(node, context);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected R visit(AggregateFunction node, C context) {
|
||||
return process(node, context);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected R visit(IsNull node, C context) {
|
||||
return process(node, context);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected R visit(Not node, C context) {
|
||||
return process(node, context);
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,224 @@
|
|||
/*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package com.pingcap.tikv.expression.visitor;
|
||||
|
||||
import static java.util.Objects.requireNonNull;
|
||||
|
||||
import com.pingcap.tikv.exception.TiExpressionException;
|
||||
import com.pingcap.tikv.expression.*;
|
||||
import com.pingcap.tikv.expression.AggregateFunction.FunctionType;
|
||||
import com.pingcap.tikv.types.DataType;
|
||||
import com.pingcap.tikv.types.DecimalType;
|
||||
import com.pingcap.tikv.types.IntegerType;
|
||||
import com.pingcap.tikv.types.RealType;
|
||||
import com.pingcap.tikv.util.Pair;
|
||||
import java.util.IdentityHashMap;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* Validate and infer expression type Collected results are returned getTypeMap For now we don't do
|
||||
* any type promotion and only coerce from left to right.
|
||||
*/
|
||||
public class ExpressionTypeCoercer extends Visitor<Pair<DataType, Double>, DataType> {
|
||||
private final IdentityHashMap<Expression, DataType> typeMap = new IdentityHashMap<>();
|
||||
private static final double MAX_CREDIBILITY = 1.0;
|
||||
private static final double MIN_CREDIBILITY = 0.1;
|
||||
private static final double COLUMN_REF_CRED = MAX_CREDIBILITY;
|
||||
private static final double CONSTANT_CRED = MIN_CREDIBILITY;
|
||||
private static final double LOGICAL_OP_CRED = MAX_CREDIBILITY;
|
||||
private static final double COMPARISON_OP_CRED = MAX_CREDIBILITY;
|
||||
private static final double SRING_REG_OP_CRED = MAX_CREDIBILITY;
|
||||
private static final double FUNCTION_CRED = MAX_CREDIBILITY;
|
||||
private static final double ISNULL_CRED = MAX_CREDIBILITY;
|
||||
private static final double NOT_CRED = MAX_CREDIBILITY;
|
||||
|
||||
public IdentityHashMap<Expression, DataType> getTypeMap() {
|
||||
return typeMap;
|
||||
}
|
||||
|
||||
public static DataType inferType(Expression expression) {
|
||||
ExpressionTypeCoercer inf = new ExpressionTypeCoercer();
|
||||
return inf.infer(expression);
|
||||
}
|
||||
|
||||
public DataType infer(Expression expression) {
|
||||
requireNonNull(expression, "expression is null");
|
||||
return expression.accept(this, null).first;
|
||||
}
|
||||
|
||||
public void infer(List<? extends Expression> expressions) {
|
||||
requireNonNull(expressions, "expressions is null");
|
||||
expressions.forEach(expr -> expr.accept(this, null));
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Pair<DataType, Double> visit(ColumnRef node, DataType targetType) {
|
||||
DataType type = node.getType();
|
||||
if (targetType != null && !targetType.equals(type)) {
|
||||
throw new TiExpressionException(String.format("Column %s cannot be %s", node, targetType));
|
||||
}
|
||||
typeMap.put(node, type);
|
||||
return Pair.create(type, COLUMN_REF_CRED);
|
||||
}
|
||||
|
||||
// Try to coerceType if needed
|
||||
// A column reference is source of coerce and constant is the subject to coerce
|
||||
// targetType null means no coerce needed from parent and choose the highest credibility result
|
||||
protected Pair<DataType, Double> coerceType(DataType targetType, Expression... nodes) {
|
||||
if (nodes.length == 0) {
|
||||
throw new TiExpressionException("failed to verify empty node list");
|
||||
}
|
||||
if (targetType == null) {
|
||||
Pair<DataType, Double> baseline = nodes[0].accept(this, null);
|
||||
for (int i = 1; i < nodes.length; i++) {
|
||||
Pair<DataType, Double> current = nodes[i].accept(this, null);
|
||||
if (current.second > baseline.second) {
|
||||
baseline = current;
|
||||
}
|
||||
}
|
||||
for (Expression node : nodes) {
|
||||
node.accept(this, baseline.first);
|
||||
}
|
||||
return baseline;
|
||||
} else {
|
||||
double credibility = -1;
|
||||
for (Expression node : nodes) {
|
||||
Pair<DataType, Double> result = node.accept(this, targetType);
|
||||
if (result.second > credibility) {
|
||||
credibility = result.second;
|
||||
}
|
||||
}
|
||||
return Pair.create(targetType, credibility);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Pair<DataType, Double> visit(ComparisonBinaryExpression node, DataType targetType) {
|
||||
if (targetType != null && !targetType.equals(IntegerType.BOOLEAN)) {
|
||||
throw new TiExpressionException(String.format("Comparison result cannot be %s", targetType));
|
||||
}
|
||||
if (!typeMap.containsKey(node)) {
|
||||
coerceType(null, node.getLeft(), node.getRight());
|
||||
typeMap.put(node, IntegerType.BOOLEAN);
|
||||
}
|
||||
return Pair.create(IntegerType.BOOLEAN, COMPARISON_OP_CRED);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Pair<DataType, Double> visit(StringRegExpression node, DataType targetType) {
|
||||
if (targetType != null && !targetType.equals(IntegerType.BOOLEAN)) {
|
||||
throw new TiExpressionException(String.format("Comparison result cannot be %s", targetType));
|
||||
}
|
||||
if (!typeMap.containsKey(node)) {
|
||||
coerceType(null, node.getLeft(), node.getRight());
|
||||
typeMap.put(node, IntegerType.BOOLEAN);
|
||||
}
|
||||
return Pair.create(IntegerType.BOOLEAN, SRING_REG_OP_CRED);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Pair<DataType, Double> visit(ArithmeticBinaryExpression node, DataType targetType) {
|
||||
Pair<DataType, Double> result = coerceType(targetType, node.getLeft(), node.getRight());
|
||||
typeMap.put(node, result.first);
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Pair<DataType, Double> visit(LogicalBinaryExpression node, DataType targetType) {
|
||||
if (targetType != null && !targetType.equals(IntegerType.BOOLEAN)) {
|
||||
throw new TiExpressionException(String.format("Comparison result cannot be %s", targetType));
|
||||
}
|
||||
if (!typeMap.containsKey(node)) {
|
||||
coerceType(null, node.getLeft(), node.getRight());
|
||||
typeMap.put(node, IntegerType.BOOLEAN);
|
||||
}
|
||||
return Pair.create(IntegerType.BOOLEAN, LOGICAL_OP_CRED);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Pair<DataType, Double> visit(Constant node, DataType targetType) {
|
||||
if (targetType == null) {
|
||||
return Pair.create(node.getType(), CONSTANT_CRED);
|
||||
} else {
|
||||
node.setType(targetType);
|
||||
typeMap.put(node, targetType);
|
||||
return Pair.create(targetType, CONSTANT_CRED);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Pair<DataType, Double> visit(AggregateFunction node, DataType targetType) {
|
||||
FunctionType fType = node.getType();
|
||||
coerceType(null, node.getArgument());
|
||||
switch (fType) {
|
||||
case Count:
|
||||
{
|
||||
if (targetType != null && targetType.equals(IntegerType.BIGINT)) {
|
||||
throw new TiExpressionException(String.format("Count cannot be %s", targetType));
|
||||
}
|
||||
typeMap.put(node, IntegerType.BIGINT);
|
||||
return Pair.create(targetType, FUNCTION_CRED);
|
||||
}
|
||||
case Sum:
|
||||
{
|
||||
if (targetType != null && targetType.equals(DecimalType.DECIMAL)) {
|
||||
throw new TiExpressionException(String.format("Sum cannot be %s", targetType));
|
||||
}
|
||||
DataType colType = node.getArgument().accept(this, null).first;
|
||||
if (colType instanceof RealType) {
|
||||
typeMap.put(node, RealType.DOUBLE);
|
||||
} else {
|
||||
typeMap.put(node, DecimalType.DECIMAL);
|
||||
}
|
||||
return Pair.create(targetType, FUNCTION_CRED);
|
||||
}
|
||||
case First:
|
||||
case Max:
|
||||
case Min:
|
||||
{
|
||||
Pair<DataType, Double> result = coerceType(targetType, node.getArgument());
|
||||
typeMap.put(node, result.first);
|
||||
return result;
|
||||
}
|
||||
default:
|
||||
throw new TiExpressionException(String.format("Unknown function %s", fType));
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Pair<DataType, Double> visit(IsNull node, DataType targetType) {
|
||||
if (targetType != null && !targetType.equals(IntegerType.BOOLEAN)) {
|
||||
throw new TiExpressionException(String.format("IsNull result cannot be %s", targetType));
|
||||
}
|
||||
if (!typeMap.containsKey(node)) {
|
||||
coerceType(null, node.getExpression());
|
||||
typeMap.put(node, IntegerType.BOOLEAN);
|
||||
}
|
||||
return Pair.create(IntegerType.BOOLEAN, ISNULL_CRED);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Pair<DataType, Double> visit(Not node, DataType targetType) {
|
||||
if (targetType != null && !targetType.equals(IntegerType.BOOLEAN)) {
|
||||
throw new TiExpressionException(String.format("Not result cannot be %s", targetType));
|
||||
}
|
||||
if (!typeMap.containsKey(node)) {
|
||||
coerceType(null, node.getExpression());
|
||||
typeMap.put(node, IntegerType.BOOLEAN);
|
||||
}
|
||||
return Pair.create(IntegerType.BOOLEAN, NOT_CRED);
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,111 @@
|
|||
/*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package com.pingcap.tikv.expression.visitor;
|
||||
|
||||
import static java.util.Objects.requireNonNull;
|
||||
|
||||
import com.pingcap.tikv.expression.*;
|
||||
import com.pingcap.tikv.expression.ComparisonBinaryExpression.NormalizedPredicate;
|
||||
import com.pingcap.tikv.meta.TiIndexColumn;
|
||||
|
||||
/**
|
||||
* Test if a predicate matches and index column entirely and can be convert to index related ranges
|
||||
* If a predicate matches only partially, it returns false
|
||||
*/
|
||||
public class IndexMatcher extends DefaultVisitor<Boolean, Void> {
|
||||
private final boolean matchEqualTestOnly;
|
||||
private final TiIndexColumn indexColumn;
|
||||
|
||||
private IndexMatcher(TiIndexColumn indexColumn, boolean matchEqualTestOnly) {
|
||||
this.matchEqualTestOnly = matchEqualTestOnly;
|
||||
this.indexColumn = requireNonNull(indexColumn, "index column is null");
|
||||
}
|
||||
|
||||
public static IndexMatcher equalOnlyMatcher(TiIndexColumn indexColumn) {
|
||||
return new IndexMatcher(indexColumn, true);
|
||||
}
|
||||
|
||||
public static IndexMatcher matcher(TiIndexColumn indexColumn) {
|
||||
return new IndexMatcher(indexColumn, false);
|
||||
}
|
||||
|
||||
public boolean match(Expression expression) {
|
||||
return expression.accept(this, null);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Boolean process(Expression node, Void context) {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Boolean visit(ColumnRef node, Void context) {
|
||||
String indexColumnName = indexColumn.getName();
|
||||
return node.getColumnInfo().matchName(indexColumnName);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Boolean visit(ComparisonBinaryExpression node, Void context) {
|
||||
switch (node.getComparisonType()) {
|
||||
case LESS_THAN:
|
||||
case LESS_EQUAL:
|
||||
case GREATER_THAN:
|
||||
case GREATER_EQUAL:
|
||||
case NOT_EQUAL:
|
||||
if (matchEqualTestOnly) {
|
||||
return false;
|
||||
}
|
||||
case EQUAL:
|
||||
NormalizedPredicate predicate = node.normalize();
|
||||
if (predicate == null) {
|
||||
return false;
|
||||
}
|
||||
return predicate.getColumnRef().accept(this, context);
|
||||
default:
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Boolean visit(StringRegExpression node, Void context) {
|
||||
switch (node.getRegType()) {
|
||||
// If the predicate is StartsWith(col, 'a'), this predicate
|
||||
// indicates a range of ['a', +∞) which can be used by index scan
|
||||
case STARTS_WITH:
|
||||
if (matchEqualTestOnly) {
|
||||
return false;
|
||||
}
|
||||
return node.getLeft().accept(this, context);
|
||||
default:
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Boolean visit(LogicalBinaryExpression node, Void context) {
|
||||
switch (node.getCompType()) {
|
||||
case AND:
|
||||
if (matchEqualTestOnly) {
|
||||
return false;
|
||||
}
|
||||
case OR:
|
||||
case XOR:
|
||||
return node.getLeft().accept(this, context) && node.getRight().accept(this, context);
|
||||
default:
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,200 @@
|
|||
/*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package com.pingcap.tikv.expression.visitor;
|
||||
|
||||
import com.google.common.collect.Range;
|
||||
import com.google.common.collect.RangeSet;
|
||||
import com.google.common.collect.TreeRangeSet;
|
||||
import com.pingcap.tikv.exception.TiExpressionException;
|
||||
import com.pingcap.tikv.expression.*;
|
||||
import com.pingcap.tikv.expression.ComparisonBinaryExpression.NormalizedPredicate;
|
||||
import com.pingcap.tikv.key.TypedKey;
|
||||
import com.pingcap.tikv.meta.TiIndexColumn;
|
||||
import com.pingcap.tikv.meta.TiIndexInfo;
|
||||
import com.pingcap.tikv.meta.TiTableInfo;
|
||||
import com.pingcap.tikv.types.DataType;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
import java.util.Set;
|
||||
|
||||
public class IndexRangeBuilder extends DefaultVisitor<RangeSet<TypedKey>, Void> {
|
||||
|
||||
private final Map<ColumnRef, Integer> lengths; // length of corresponding ColumnRef
|
||||
|
||||
public IndexRangeBuilder(TiTableInfo table, TiIndexInfo index) {
|
||||
Map<ColumnRef, Integer> result = new HashMap<>();
|
||||
if (table != null && index != null) {
|
||||
for (TiIndexColumn indexColumn : index.getIndexColumns()) {
|
||||
ColumnRef columnRef = ColumnRef.create(indexColumn.getName(), table);
|
||||
result.put(columnRef, (int) indexColumn.getLength());
|
||||
}
|
||||
}
|
||||
this.lengths = result;
|
||||
}
|
||||
|
||||
public Set<Range<TypedKey>> buildRange(Expression predicate) {
|
||||
Objects.requireNonNull(predicate, "predicate is null");
|
||||
return predicate.accept(this, null).asRanges();
|
||||
}
|
||||
|
||||
private static void throwOnError(Expression node) {
|
||||
final String errorFormat = "Unsupported conversion to Range: %s";
|
||||
throw new TiExpressionException(String.format(errorFormat, node));
|
||||
}
|
||||
|
||||
protected RangeSet<TypedKey> process(Expression node, Void context) {
|
||||
throwOnError(node);
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected RangeSet<TypedKey> visit(LogicalBinaryExpression node, Void context) {
|
||||
RangeSet<TypedKey> leftRanges = node.getLeft().accept(this, context);
|
||||
RangeSet<TypedKey> rightRanges = node.getRight().accept(this, context);
|
||||
switch (node.getCompType()) {
|
||||
case AND:
|
||||
for (Range<TypedKey> range : leftRanges.asRanges()) {
|
||||
rightRanges = rightRanges.subRangeSet(range);
|
||||
}
|
||||
break;
|
||||
case OR:
|
||||
rightRanges.addAll(leftRanges);
|
||||
break;
|
||||
case XOR:
|
||||
// AND
|
||||
RangeSet<TypedKey> intersection = rightRanges;
|
||||
for (Range<TypedKey> range : leftRanges.asRanges()) {
|
||||
intersection = intersection.subRangeSet(range);
|
||||
}
|
||||
// full set
|
||||
rightRanges.addAll(leftRanges);
|
||||
rightRanges.removeAll(intersection);
|
||||
break;
|
||||
default:
|
||||
throwOnError(node);
|
||||
}
|
||||
return rightRanges;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected RangeSet<TypedKey> visit(ComparisonBinaryExpression node, Void context) {
|
||||
NormalizedPredicate predicate = node.normalize();
|
||||
if (predicate == null) {
|
||||
throwOnError(node);
|
||||
}
|
||||
// In order to match a prefix index, we have to cut the literal by prefix length.
|
||||
// e.g., for table t:
|
||||
// CREATE TABLE `t` {
|
||||
// `b` VARCHAR(10) DEFAULT NULL,
|
||||
// KEY `prefix_index` (`b`(2))
|
||||
// }
|
||||
//
|
||||
// b(2) > "bbc" -> ["bb", +∞)
|
||||
// b(2) >= "bbc" -> ["bb", +∞)
|
||||
// b(2) < "bbc" -> (-∞, "bb"]
|
||||
// b(2) <= "bbc" -> (-∞, "bb"]
|
||||
// b(2) = "bbc" -> ["bb", "bb"]
|
||||
// b(2) > "b" -> ["b", +∞)
|
||||
// b(2) >= "b" -> ["b", +∞)
|
||||
// b(2) < "b" -> (-∞, "b"]
|
||||
// b(2) <= "b" -> (-∞, "b"]
|
||||
//
|
||||
// For varchar, `b`(2) will take first two characters(bytes) as prefix index.
|
||||
// TODO: Note that TiDB only supports UTF-8, we need to check if prefix index behave differently
|
||||
// under other encoding methods
|
||||
int prefixLen = lengths.getOrDefault(predicate.getColumnRef(), DataType.UNSPECIFIED_LEN);
|
||||
TypedKey literal = predicate.getTypedLiteral(prefixLen);
|
||||
RangeSet<TypedKey> ranges = TreeRangeSet.create();
|
||||
|
||||
if (prefixLen != DataType.UNSPECIFIED_LEN) {
|
||||
// With prefix length specified, the filter is loosen and so should the ranges
|
||||
switch (predicate.getType()) {
|
||||
case GREATER_THAN:
|
||||
case GREATER_EQUAL:
|
||||
ranges.add(Range.atLeast(literal));
|
||||
break;
|
||||
case LESS_THAN:
|
||||
case LESS_EQUAL:
|
||||
ranges.add(Range.atMost(literal));
|
||||
break;
|
||||
case EQUAL:
|
||||
ranges.add(Range.singleton(literal));
|
||||
break;
|
||||
case NOT_EQUAL:
|
||||
// Should return full range because prefix index predicate for NOT_EQUAL
|
||||
// will be split into an NOT_EQUAL filter and a full range scan
|
||||
ranges.add(Range.all());
|
||||
break;
|
||||
default:
|
||||
throwOnError(node);
|
||||
}
|
||||
} else {
|
||||
switch (predicate.getType()) {
|
||||
case GREATER_THAN:
|
||||
ranges.add(Range.greaterThan(literal));
|
||||
break;
|
||||
case GREATER_EQUAL:
|
||||
ranges.add(Range.atLeast(literal));
|
||||
break;
|
||||
case LESS_THAN:
|
||||
ranges.add(Range.lessThan(literal));
|
||||
break;
|
||||
case LESS_EQUAL:
|
||||
ranges.add(Range.atMost(literal));
|
||||
break;
|
||||
case EQUAL:
|
||||
ranges.add(Range.singleton(literal));
|
||||
break;
|
||||
case NOT_EQUAL:
|
||||
ranges.add(Range.lessThan(literal));
|
||||
ranges.add(Range.greaterThan(literal));
|
||||
break;
|
||||
default:
|
||||
throwOnError(node);
|
||||
}
|
||||
}
|
||||
return ranges;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected RangeSet<TypedKey> visit(StringRegExpression node, Void context) {
|
||||
ColumnRef columnRef = node.getColumnRef();
|
||||
// In order to match a prefix index, we have to cut the literal by prefix length.
|
||||
// e.g., for table t:
|
||||
// CREATE TABLE `t` {
|
||||
// `c1` VARCHAR(10) DEFAULT NULL,
|
||||
// KEY `prefix_index` (`c`(2))
|
||||
// }
|
||||
// when the predicate is `c1` LIKE 'abc%', the index range should be ['ab', 'ab'].
|
||||
// when the predicate is `c1` LIKE 'a%', the index range should be ['a', 'b').
|
||||
// for varchar, `c1`(2) will take first two characters(bytes) as prefix index.
|
||||
// TODO: Note that TiDB only supports UTF-8, we need to check if prefix index behave differently
|
||||
// under other encoding methods
|
||||
int prefixLen = lengths.getOrDefault(columnRef, DataType.UNSPECIFIED_LEN);
|
||||
TypedKey literal = node.getTypedLiteral(prefixLen);
|
||||
RangeSet<TypedKey> ranges = TreeRangeSet.create();
|
||||
|
||||
switch (node.getRegType()) {
|
||||
case STARTS_WITH:
|
||||
ranges.add(Range.atLeast(literal).intersection(Range.lessThan(literal.next(prefixLen))));
|
||||
break;
|
||||
default:
|
||||
throwOnError(node);
|
||||
}
|
||||
return ranges;
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,55 @@
|
|||
/*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package com.pingcap.tikv.expression.visitor;
|
||||
|
||||
import com.pingcap.tikv.expression.ColumnRef;
|
||||
import com.pingcap.tikv.expression.Expression;
|
||||
import com.pingcap.tikv.meta.TiTableInfo;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
|
||||
public class MetaResolver extends DefaultVisitor<Void, Expression> {
|
||||
public static void resolve(Expression expression, TiTableInfo table) {
|
||||
MetaResolver resolver = new MetaResolver(table);
|
||||
resolver.resolve(expression);
|
||||
}
|
||||
|
||||
public static void resolve(List<? extends Expression> expressions, TiTableInfo table) {
|
||||
MetaResolver resolver = new MetaResolver(table);
|
||||
resolver.resolve(expressions);
|
||||
}
|
||||
|
||||
private final TiTableInfo table;
|
||||
|
||||
public MetaResolver(TiTableInfo table) {
|
||||
this.table = table;
|
||||
}
|
||||
|
||||
public void resolve(List<? extends Expression> expressions) {
|
||||
expressions.forEach(expression -> expression.accept(this, null));
|
||||
}
|
||||
|
||||
public void resolve(Expression expression) {
|
||||
Objects.requireNonNull(expression, "expression is null");
|
||||
expression.accept(this, null);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Void visit(ColumnRef node, Expression parent) {
|
||||
node.resolve(table);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,315 @@
|
|||
/*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package com.pingcap.tikv.expression.visitor;
|
||||
|
||||
import static java.util.Objects.requireNonNull;
|
||||
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import com.pingcap.tidb.tipb.Expr;
|
||||
import com.pingcap.tidb.tipb.ExprType;
|
||||
import com.pingcap.tidb.tipb.FieldType;
|
||||
import com.pingcap.tidb.tipb.ScalarFuncSig;
|
||||
import com.pingcap.tikv.codec.Codec.IntegerCodec;
|
||||
import com.pingcap.tikv.codec.CodecDataOutput;
|
||||
import com.pingcap.tikv.exception.TiExpressionException;
|
||||
import com.pingcap.tikv.expression.*;
|
||||
import com.pingcap.tikv.expression.AggregateFunction.FunctionType;
|
||||
import com.pingcap.tikv.types.*;
|
||||
import com.pingcap.tikv.types.DataType.EncodeType;
|
||||
import java.util.IdentityHashMap;
|
||||
import java.util.Map;
|
||||
|
||||
public class ProtoConverter extends Visitor<Expr, Object> {
|
||||
// All concrete data type should be hooked to a type name
|
||||
private static final Map<Class<? extends DataType>, String> SCALAR_SIG_MAP =
|
||||
ImmutableMap.<Class<? extends DataType>, String>builder()
|
||||
.put(IntegerType.class, "Int")
|
||||
.put(BitType.class, "Int")
|
||||
.put(DecimalType.class, "Decimal")
|
||||
.put(RealType.class, "Real")
|
||||
.put(DateTimeType.class, "Time")
|
||||
.put(DateType.class, "Time")
|
||||
.put(TimestampType.class, "Time")
|
||||
.put(BytesType.class, "String")
|
||||
.put(StringType.class, "String")
|
||||
.build();
|
||||
|
||||
private final IdentityHashMap<Expression, DataType> typeMap;
|
||||
private final boolean validateColPosition;
|
||||
|
||||
public ProtoConverter(IdentityHashMap<Expression, DataType> typeMap) {
|
||||
this(typeMap, true);
|
||||
}
|
||||
|
||||
/**
|
||||
* Instantiate a {{@code ProtoConverter}} using a typeMap.
|
||||
*
|
||||
* @param typeMap the type map
|
||||
* @param validateColPosition whether to consider column position in this converter. By default, a
|
||||
* {{@code TiDAGRequest}} should check whether a {{@code ColumnRef}}'s position is correct in
|
||||
* it's executors. Can ignore this validation if `validateColPosition` is set to false.
|
||||
*/
|
||||
public ProtoConverter(
|
||||
IdentityHashMap<Expression, DataType> typeMap, boolean validateColPosition) {
|
||||
this.typeMap = typeMap;
|
||||
this.validateColPosition = validateColPosition;
|
||||
}
|
||||
|
||||
private DataType getType(Expression expression) {
|
||||
DataType type = typeMap.get(expression);
|
||||
if (type == null) {
|
||||
throw new TiExpressionException(String.format("Expression %s type unknown", expression));
|
||||
}
|
||||
return type;
|
||||
}
|
||||
|
||||
private String getTypeSignature(Expression expression) {
|
||||
DataType type = getType(expression);
|
||||
String typeSignature = SCALAR_SIG_MAP.get(type.getClass());
|
||||
if (typeSignature == null) {
|
||||
throw new TiExpressionException(String.format("Type %s signature unknown", type));
|
||||
}
|
||||
return typeSignature;
|
||||
}
|
||||
|
||||
public static Expr toProto(Expression expression) {
|
||||
return toProto(expression, null);
|
||||
}
|
||||
|
||||
public static Expr toProto(Expression expression, Object context) {
|
||||
ExpressionTypeCoercer coercer = new ExpressionTypeCoercer();
|
||||
coercer.infer(expression);
|
||||
ProtoConverter converter = new ProtoConverter(coercer.getTypeMap());
|
||||
return expression.accept(converter, context);
|
||||
}
|
||||
|
||||
// Generate protobuf builder with partial data encoded.
|
||||
// Scala Signature is left alone
|
||||
private Expr.Builder scalaToPartialProto(Expression node, Object context) {
|
||||
Expr.Builder builder = Expr.newBuilder();
|
||||
// Scalar function type
|
||||
builder.setTp(ExprType.ScalarFunc);
|
||||
|
||||
// Return type
|
||||
builder.setFieldType(FieldType.newBuilder().setTp(getType(node).getTypeCode()).build());
|
||||
|
||||
for (Expression child : node.getChildren()) {
|
||||
Expr exprProto = child.accept(this, context);
|
||||
builder.addChildren(exprProto);
|
||||
}
|
||||
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Expr visit(LogicalBinaryExpression node, Object context) {
|
||||
ScalarFuncSig protoSig;
|
||||
switch (node.getCompType()) {
|
||||
case AND:
|
||||
protoSig = ScalarFuncSig.LogicalAnd;
|
||||
break;
|
||||
case OR:
|
||||
protoSig = ScalarFuncSig.LogicalOr;
|
||||
break;
|
||||
case XOR:
|
||||
protoSig = ScalarFuncSig.LogicalXor;
|
||||
break;
|
||||
default:
|
||||
throw new TiExpressionException(
|
||||
String.format("Unknown comparison type %s", node.getCompType()));
|
||||
}
|
||||
Expr.Builder builder = scalaToPartialProto(node, context);
|
||||
builder.setSig(protoSig);
|
||||
return builder.build();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Expr visit(ArithmeticBinaryExpression node, Object context) {
|
||||
// assume after type coerce, children should be compatible
|
||||
Expression child = node.getLeft();
|
||||
String typeSignature = getTypeSignature(child);
|
||||
ScalarFuncSig protoSig;
|
||||
switch (node.getCompType()) {
|
||||
// TODO: Add test for bitwise push down
|
||||
case BIT_AND:
|
||||
protoSig = ScalarFuncSig.BitAndSig;
|
||||
break;
|
||||
case BIT_OR:
|
||||
protoSig = ScalarFuncSig.BitOrSig;
|
||||
break;
|
||||
case BIT_XOR:
|
||||
protoSig = ScalarFuncSig.BitXorSig;
|
||||
break;
|
||||
case DIVIDE:
|
||||
protoSig = ScalarFuncSig.valueOf("Divide" + typeSignature);
|
||||
break;
|
||||
case MINUS:
|
||||
protoSig = ScalarFuncSig.valueOf("Minus" + typeSignature);
|
||||
break;
|
||||
case MULTIPLY:
|
||||
protoSig = ScalarFuncSig.valueOf("Multiply" + typeSignature);
|
||||
break;
|
||||
case PLUS:
|
||||
protoSig = ScalarFuncSig.valueOf("Plus" + typeSignature);
|
||||
break;
|
||||
default:
|
||||
throw new TiExpressionException(
|
||||
String.format("Unknown comparison type %s", node.getCompType()));
|
||||
}
|
||||
Expr.Builder builder = scalaToPartialProto(node, context);
|
||||
builder.setSig(protoSig);
|
||||
return builder.build();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Expr visit(ComparisonBinaryExpression node, Object context) {
|
||||
// assume after type coerce, children should be compatible
|
||||
Expression child = node.getLeft();
|
||||
String typeSignature = getTypeSignature(child);
|
||||
ScalarFuncSig protoSig;
|
||||
switch (node.getComparisonType()) {
|
||||
case EQUAL:
|
||||
protoSig = ScalarFuncSig.valueOf("EQ" + typeSignature);
|
||||
break;
|
||||
case GREATER_EQUAL:
|
||||
protoSig = ScalarFuncSig.valueOf("GE" + typeSignature);
|
||||
break;
|
||||
case GREATER_THAN:
|
||||
protoSig = ScalarFuncSig.valueOf("GT" + typeSignature);
|
||||
break;
|
||||
case LESS_EQUAL:
|
||||
protoSig = ScalarFuncSig.valueOf("LE" + typeSignature);
|
||||
break;
|
||||
case LESS_THAN:
|
||||
protoSig = ScalarFuncSig.valueOf("LT" + typeSignature);
|
||||
break;
|
||||
case NOT_EQUAL:
|
||||
protoSig = ScalarFuncSig.valueOf("NE" + typeSignature);
|
||||
break;
|
||||
default:
|
||||
throw new TiExpressionException(
|
||||
String.format("Unknown comparison type %s", node.getComparisonType()));
|
||||
}
|
||||
Expr.Builder builder = scalaToPartialProto(node, context);
|
||||
builder.setSig(protoSig);
|
||||
return builder.build();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Expr visit(StringRegExpression node, Object context) {
|
||||
// assume after type coerce, children should be compatible
|
||||
ScalarFuncSig protoSig;
|
||||
switch (node.getRegType()) {
|
||||
case STARTS_WITH:
|
||||
case CONTAINS:
|
||||
case ENDS_WITH:
|
||||
case LIKE:
|
||||
protoSig = ScalarFuncSig.LikeSig;
|
||||
break;
|
||||
default:
|
||||
throw new TiExpressionException(String.format("Unknown reg type %s", node.getRegType()));
|
||||
}
|
||||
Expr.Builder builder = scalaToPartialProto(node, context);
|
||||
builder.setSig(protoSig);
|
||||
return builder.build();
|
||||
}
|
||||
|
||||
@Override
|
||||
@SuppressWarnings("unchecked")
|
||||
protected Expr visit(ColumnRef node, Object context) {
|
||||
long position = 0;
|
||||
if (validateColPosition) {
|
||||
requireNonNull(context, "Context of a ColumnRef should not be null");
|
||||
Map<ColumnRef, Integer> colIdOffsetMap = (Map<ColumnRef, Integer>) context;
|
||||
position =
|
||||
requireNonNull(
|
||||
colIdOffsetMap.get(node), "Required column position info is not in a valid context.");
|
||||
}
|
||||
Expr.Builder builder = Expr.newBuilder();
|
||||
builder.setTp(ExprType.ColumnRef);
|
||||
CodecDataOutput cdo = new CodecDataOutput();
|
||||
// After switching to DAG request mode, expression value
|
||||
// should be the index of table columns we provided in
|
||||
// the first executor of a DAG request.
|
||||
IntegerCodec.writeLong(cdo, position);
|
||||
builder.setVal(cdo.toByteString());
|
||||
return builder.build();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Expr visit(Constant node, Object context) {
|
||||
Expr.Builder builder = Expr.newBuilder();
|
||||
if (node.getValue() == null) {
|
||||
builder.setTp(ExprType.Null);
|
||||
return builder.build();
|
||||
} else {
|
||||
DataType type = node.getType();
|
||||
builder.setTp(type.getProtoExprType());
|
||||
CodecDataOutput cdo = new CodecDataOutput();
|
||||
type.encode(cdo, EncodeType.PROTO, node.getValue());
|
||||
builder.setVal(cdo.toByteString());
|
||||
}
|
||||
return builder.build();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Expr visit(AggregateFunction node, Object context) {
|
||||
Expr.Builder builder = Expr.newBuilder();
|
||||
|
||||
FunctionType type = node.getType();
|
||||
switch (type) {
|
||||
case Max:
|
||||
builder.setTp(ExprType.Max);
|
||||
break;
|
||||
case Sum:
|
||||
builder.setTp(ExprType.Sum);
|
||||
break;
|
||||
case Min:
|
||||
builder.setTp(ExprType.Min);
|
||||
break;
|
||||
case First:
|
||||
builder.setTp(ExprType.First);
|
||||
break;
|
||||
case Count:
|
||||
builder.setTp(ExprType.Count);
|
||||
break;
|
||||
}
|
||||
|
||||
for (Expression arg : node.getChildren()) {
|
||||
Expr exprProto = arg.accept(this, context);
|
||||
builder.addChildren(exprProto);
|
||||
}
|
||||
|
||||
return builder.build();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Expr visit(IsNull node, Object context) {
|
||||
String typeSignature = getTypeSignature(node.getExpression());
|
||||
ScalarFuncSig protoSig = ScalarFuncSig.valueOf(typeSignature + "IsNull");
|
||||
Expr.Builder builder = scalaToPartialProto(node, context);
|
||||
builder.setSig(protoSig);
|
||||
return builder.build();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Expr visit(Not node, Object context) {
|
||||
ScalarFuncSig protoSig = ScalarFuncSig.UnaryNot;
|
||||
Expr.Builder builder = scalaToPartialProto(node, context);
|
||||
builder.setSig(protoSig);
|
||||
return builder.build();
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,65 @@
|
|||
/*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package com.pingcap.tikv.expression.visitor;
|
||||
|
||||
import com.pingcap.tikv.expression.ComparisonBinaryExpression;
|
||||
import com.pingcap.tikv.expression.Expression;
|
||||
import com.pingcap.tikv.expression.LogicalBinaryExpression;
|
||||
|
||||
public class PseudoCostCalculator extends DefaultVisitor<Double, Void> {
|
||||
public static double calculateCost(Expression expr) {
|
||||
PseudoCostCalculator calc = new PseudoCostCalculator();
|
||||
return expr.accept(calc, null);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Double process(Expression node, Void context) {
|
||||
return 1.0;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Double visit(LogicalBinaryExpression node, Void context) {
|
||||
double leftCost = node.getLeft().accept(this, context);
|
||||
double rightCost = node.getLeft().accept(this, context);
|
||||
switch (node.getCompType()) {
|
||||
case AND:
|
||||
return leftCost * rightCost;
|
||||
case OR:
|
||||
case XOR:
|
||||
return leftCost + rightCost;
|
||||
default:
|
||||
return 1.0;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Double visit(ComparisonBinaryExpression node, Void context) {
|
||||
switch (node.getComparisonType()) {
|
||||
case EQUAL:
|
||||
return 0.01;
|
||||
case GREATER_EQUAL:
|
||||
case GREATER_THAN:
|
||||
case LESS_EQUAL:
|
||||
case LESS_THAN:
|
||||
// magic number for testing
|
||||
return 0.3;
|
||||
case NOT_EQUAL:
|
||||
return 0.99;
|
||||
default:
|
||||
return 1.0;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,53 @@
|
|||
/*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package com.pingcap.tikv.expression.visitor;
|
||||
|
||||
import com.pingcap.tikv.expression.Expression;
|
||||
import com.pingcap.tikv.expression.ExpressionBlacklist;
|
||||
|
||||
public class SupportedExpressionValidator extends DefaultVisitor<Boolean, ExpressionBlacklist> {
|
||||
private static final SupportedExpressionValidator validator = new SupportedExpressionValidator();
|
||||
|
||||
public static boolean isSupportedExpression(Expression node, ExpressionBlacklist blacklist) {
|
||||
if (!node.accept(validator, blacklist)) {
|
||||
return false;
|
||||
}
|
||||
try {
|
||||
ExpressionTypeCoercer coercer = new ExpressionTypeCoercer();
|
||||
coercer.infer(node);
|
||||
ProtoConverter protoConverter = new ProtoConverter(coercer.getTypeMap(), false);
|
||||
if (node.accept(protoConverter, null) == null) {
|
||||
return false;
|
||||
}
|
||||
} catch (Exception e) {
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Boolean process(Expression node, ExpressionBlacklist blacklist) {
|
||||
if (blacklist != null && blacklist.isUnsupportedPushdownExpr(getClass())) {
|
||||
return false;
|
||||
}
|
||||
for (Expression expr : node.getChildren()) {
|
||||
if (!expr.accept(this, blacklist)) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,78 @@
|
|||
/*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package com.pingcap.tikv.key;
|
||||
|
||||
import com.google.common.base.Joiner;
|
||||
import com.pingcap.tikv.codec.CodecDataOutput;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
public class CompoundKey extends Key {
|
||||
|
||||
private final List<Key> keys;
|
||||
|
||||
protected CompoundKey(List<Key> keys, byte[] value) {
|
||||
super(value);
|
||||
this.keys = keys;
|
||||
}
|
||||
|
||||
public static CompoundKey concat(Key lKey, Key rKey) {
|
||||
Builder builder = newBuilder();
|
||||
builder.append(lKey).append(rKey);
|
||||
return builder.build();
|
||||
}
|
||||
|
||||
public List<Key> getKeys() {
|
||||
return keys;
|
||||
}
|
||||
|
||||
public static Builder newBuilder() {
|
||||
return new Builder();
|
||||
}
|
||||
|
||||
public static class Builder {
|
||||
private final List<Key> keys = new ArrayList<>();
|
||||
|
||||
public Builder append(Key key) {
|
||||
if (key instanceof CompoundKey) {
|
||||
CompoundKey compKey = (CompoundKey) key;
|
||||
for (Key child : compKey.getKeys()) {
|
||||
append(child);
|
||||
}
|
||||
} else {
|
||||
keys.add(key);
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
public CompoundKey build() {
|
||||
int totalLen = 0;
|
||||
for (Key key : keys) {
|
||||
totalLen += key.getBytes().length;
|
||||
}
|
||||
CodecDataOutput cdo = new CodecDataOutput(totalLen);
|
||||
for (Key key : keys) {
|
||||
cdo.write(key.getBytes());
|
||||
}
|
||||
return new CompoundKey(keys, cdo.toBytes());
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return String.format("[%s]", Joiner.on(",").useForNull("Null").join(keys));
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,72 @@
|
|||
/*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package com.pingcap.tikv.key;
|
||||
|
||||
import com.google.common.base.Joiner;
|
||||
import com.pingcap.tikv.codec.Codec.IntegerCodec;
|
||||
import com.pingcap.tikv.codec.CodecDataOutput;
|
||||
import com.pingcap.tikv.exception.TypeException;
|
||||
|
||||
public class IndexKey extends Key {
|
||||
private static final byte[] IDX_PREFIX_SEP = new byte[] {'_', 'i'};
|
||||
|
||||
private final long tableId;
|
||||
private final long indexId;
|
||||
private final Key[] dataKeys;
|
||||
|
||||
private IndexKey(long tableId, long indexId, Key[] dataKeys) {
|
||||
super(encode(tableId, indexId, dataKeys));
|
||||
this.tableId = tableId;
|
||||
this.indexId = indexId;
|
||||
this.dataKeys = dataKeys;
|
||||
}
|
||||
|
||||
public static IndexKey toIndexKey(long tableId, long indexId, Key... dataKeys) {
|
||||
return new IndexKey(tableId, indexId, dataKeys);
|
||||
}
|
||||
|
||||
private static byte[] encode(long tableId, long indexId, Key[] dataKeys) {
|
||||
CodecDataOutput cdo = new CodecDataOutput();
|
||||
cdo.write(TBL_PREFIX);
|
||||
IntegerCodec.writeLong(cdo, tableId);
|
||||
cdo.write(IDX_PREFIX_SEP);
|
||||
IntegerCodec.writeLong(cdo, indexId);
|
||||
for (Key key : dataKeys) {
|
||||
if (key == null) {
|
||||
throw new TypeException("key cannot be null");
|
||||
}
|
||||
cdo.write(key.getBytes());
|
||||
}
|
||||
return cdo.toBytes();
|
||||
}
|
||||
|
||||
public long getTableId() {
|
||||
return tableId;
|
||||
}
|
||||
|
||||
public long getIndexId() {
|
||||
return indexId;
|
||||
}
|
||||
|
||||
public Key[] getDataKeys() {
|
||||
return dataKeys;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return String.format("[%s]", Joiner.on(",").useForNull("null").join(dataKeys));
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,184 @@
|
|||
/*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package com.pingcap.tikv.key;
|
||||
|
||||
import static com.pingcap.tikv.codec.KeyUtils.formatBytes;
|
||||
import static java.util.Objects.requireNonNull;
|
||||
|
||||
import com.google.protobuf.ByteString;
|
||||
import com.pingcap.tikv.codec.CodecDataOutput;
|
||||
import com.pingcap.tikv.types.DataType;
|
||||
import com.pingcap.tikv.util.FastByteComparisons;
|
||||
import java.util.Arrays;
|
||||
|
||||
public class Key implements Comparable<Key> {
|
||||
protected static final byte[] TBL_PREFIX = new byte[] {'t'};
|
||||
|
||||
protected final byte[] value;
|
||||
protected final int infFlag;
|
||||
|
||||
public static final Key EMPTY = createEmpty();
|
||||
public static final Key NULL = createNull();
|
||||
public static final Key MIN = createTypelessMin();
|
||||
public static final Key MAX = createTypelessMax();
|
||||
|
||||
private Key(byte[] value, boolean negative) {
|
||||
this.value = requireNonNull(value, "value is null");
|
||||
this.infFlag = (value.length == 0 ? 1 : 0) * (negative ? -1 : 1);
|
||||
}
|
||||
|
||||
protected Key(byte[] value) {
|
||||
this(value, false);
|
||||
}
|
||||
|
||||
public static Key toRawKey(ByteString bytes, boolean negative) {
|
||||
return new Key(bytes.toByteArray(), negative);
|
||||
}
|
||||
|
||||
public static Key toRawKey(ByteString bytes) {
|
||||
return new Key(bytes.toByteArray());
|
||||
}
|
||||
|
||||
public static Key toRawKey(byte[] bytes, boolean negative) {
|
||||
return new Key(bytes, negative);
|
||||
}
|
||||
|
||||
public static Key toRawKey(byte[] bytes) {
|
||||
return new Key(bytes);
|
||||
}
|
||||
|
||||
private static Key createNull() {
|
||||
CodecDataOutput cdo = new CodecDataOutput();
|
||||
DataType.encodeNull(cdo);
|
||||
return new Key(cdo.toBytes()) {
|
||||
@Override
|
||||
public String toString() {
|
||||
return "null";
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
private static Key createEmpty() {
|
||||
return new Key(new byte[0]) {
|
||||
@Override
|
||||
public Key next() {
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "EMPTY";
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
private static Key createTypelessMin() {
|
||||
CodecDataOutput cdo = new CodecDataOutput();
|
||||
DataType.encodeIndex(cdo);
|
||||
return new Key(cdo.toBytes()) {
|
||||
@Override
|
||||
public String toString() {
|
||||
return "MIN";
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
private static Key createTypelessMax() {
|
||||
CodecDataOutput cdo = new CodecDataOutput();
|
||||
DataType.encodeMaxValue(cdo);
|
||||
return new Key(cdo.toBytes()) {
|
||||
@Override
|
||||
public String toString() {
|
||||
return "MAX";
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* The next key for bytes domain It first plus one at LSB and if LSB overflows, a zero byte is
|
||||
* appended at the end Original bytes will be reused if possible
|
||||
*
|
||||
* @return encoded results
|
||||
*/
|
||||
public Key next() {
|
||||
return toRawKey(nextValue(value));
|
||||
}
|
||||
|
||||
static byte[] nextValue(byte[] value) {
|
||||
int i;
|
||||
byte[] newVal = Arrays.copyOf(value, value.length);
|
||||
for (i = newVal.length - 1; i >= 0; i--) {
|
||||
newVal[i]++;
|
||||
if (newVal[i] != 0) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (i == -1) {
|
||||
return Arrays.copyOf(value, value.length + 1);
|
||||
} else {
|
||||
return newVal;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public int compareTo(Key other) {
|
||||
requireNonNull(other, "other is null");
|
||||
if ((this.infFlag | other.infFlag) != 0) {
|
||||
return this.infFlag - other.infFlag;
|
||||
}
|
||||
return FastByteComparisons.compareTo(value, other.value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object other) {
|
||||
if (other == this) {
|
||||
return true;
|
||||
}
|
||||
if (other instanceof Key) {
|
||||
return compareTo((Key) other) == 0;
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Arrays.hashCode(value) * infFlag;
|
||||
}
|
||||
|
||||
public byte[] getBytes() {
|
||||
return value;
|
||||
}
|
||||
|
||||
public ByteString toByteString() {
|
||||
return ByteString.copyFrom(value);
|
||||
}
|
||||
|
||||
public int getInfFlag() {
|
||||
return infFlag;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
if (infFlag < 0) {
|
||||
return "-INF";
|
||||
} else if (infFlag > 0) {
|
||||
return "+INF";
|
||||
} else {
|
||||
return String.format("{%s}", formatBytes(value));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,176 @@
|
|||
/*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package com.pingcap.tikv.key;
|
||||
|
||||
import static com.pingcap.tikv.codec.Codec.IntegerCodec.writeLong;
|
||||
|
||||
import com.pingcap.tikv.codec.Codec.IntegerCodec;
|
||||
import com.pingcap.tikv.codec.CodecDataInput;
|
||||
import com.pingcap.tikv.codec.CodecDataOutput;
|
||||
import com.pingcap.tikv.exception.TiClientInternalException;
|
||||
import com.pingcap.tikv.exception.TiExpressionException;
|
||||
import com.pingcap.tikv.key.RowKey.DecodeResult.Status;
|
||||
import com.pingcap.tikv.util.FastByteComparisons;
|
||||
import java.util.Objects;
|
||||
|
||||
public class RowKey extends Key {
|
||||
private static final byte[] REC_PREFIX_SEP = new byte[] {'_', 'r'};
|
||||
|
||||
private final long tableId;
|
||||
private final long handle;
|
||||
private final boolean maxHandleFlag;
|
||||
|
||||
private RowKey(long tableId, long handle) {
|
||||
super(encode(tableId, handle));
|
||||
this.tableId = tableId;
|
||||
this.handle = handle;
|
||||
this.maxHandleFlag = false;
|
||||
}
|
||||
|
||||
/**
|
||||
* The RowKey indicating maximum handle (its value exceeds Long.Max_Value)
|
||||
*
|
||||
* <p>Initializes an imaginary globally MAXIMUM rowKey with tableId.
|
||||
*/
|
||||
private RowKey(long tableId) {
|
||||
super(encodeBeyondMaxHandle(tableId));
|
||||
this.tableId = tableId;
|
||||
this.handle = Long.MAX_VALUE;
|
||||
this.maxHandleFlag = true;
|
||||
}
|
||||
|
||||
public static RowKey toRowKey(long tableId, long handle) {
|
||||
return new RowKey(tableId, handle);
|
||||
}
|
||||
|
||||
public static RowKey toRowKey(long tableId, TypedKey handle) {
|
||||
Object obj = handle.getValue();
|
||||
if (obj instanceof Long) {
|
||||
return new RowKey(tableId, (long) obj);
|
||||
}
|
||||
throw new TiExpressionException("Cannot encode row key with non-long type");
|
||||
}
|
||||
|
||||
public static RowKey createMin(long tableId) {
|
||||
return toRowKey(tableId, Long.MIN_VALUE);
|
||||
}
|
||||
|
||||
public static RowKey createBeyondMax(long tableId) {
|
||||
return new RowKey(tableId);
|
||||
}
|
||||
|
||||
private static byte[] encode(long tableId, long handle) {
|
||||
CodecDataOutput cdo = new CodecDataOutput();
|
||||
encodePrefix(cdo, tableId);
|
||||
writeLong(cdo, handle);
|
||||
return cdo.toBytes();
|
||||
}
|
||||
|
||||
private static byte[] encodeBeyondMaxHandle(long tableId) {
|
||||
return nextValue(encode(tableId, Long.MAX_VALUE));
|
||||
}
|
||||
|
||||
@Override
|
||||
public RowKey next() {
|
||||
long handle = getHandle();
|
||||
boolean maxHandleFlag = getMaxHandleFlag();
|
||||
if (maxHandleFlag) {
|
||||
throw new TiClientInternalException("Handle overflow for Long MAX");
|
||||
}
|
||||
if (handle == Long.MAX_VALUE) {
|
||||
return createBeyondMax(tableId);
|
||||
}
|
||||
return new RowKey(tableId, handle + 1);
|
||||
}
|
||||
|
||||
public long getTableId() {
|
||||
return tableId;
|
||||
}
|
||||
|
||||
public long getHandle() {
|
||||
return handle;
|
||||
}
|
||||
|
||||
private boolean getMaxHandleFlag() {
|
||||
return maxHandleFlag;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return Long.toString(handle);
|
||||
}
|
||||
|
||||
private static void encodePrefix(CodecDataOutput cdo, long tableId) {
|
||||
cdo.write(TBL_PREFIX);
|
||||
writeLong(cdo, tableId);
|
||||
cdo.write(REC_PREFIX_SEP);
|
||||
}
|
||||
|
||||
public static class DecodeResult {
|
||||
public long handle;
|
||||
|
||||
public enum Status {
|
||||
MIN,
|
||||
MAX,
|
||||
EQUAL,
|
||||
LESS,
|
||||
GREATER,
|
||||
UNKNOWN_INF
|
||||
}
|
||||
|
||||
public Status status;
|
||||
}
|
||||
|
||||
public static void tryDecodeRowKey(long tableId, byte[] rowKey, DecodeResult outResult) {
|
||||
Objects.requireNonNull(rowKey, "rowKey cannot be null");
|
||||
if (rowKey.length == 0) {
|
||||
outResult.status = Status.UNKNOWN_INF;
|
||||
return;
|
||||
}
|
||||
CodecDataOutput cdo = new CodecDataOutput();
|
||||
encodePrefix(cdo, tableId);
|
||||
byte[] tablePrefix = cdo.toBytes();
|
||||
|
||||
int res =
|
||||
FastByteComparisons.compareTo(
|
||||
tablePrefix,
|
||||
0,
|
||||
tablePrefix.length,
|
||||
rowKey,
|
||||
0,
|
||||
Math.min(rowKey.length, tablePrefix.length));
|
||||
|
||||
if (res > 0) {
|
||||
outResult.status = Status.MIN;
|
||||
return;
|
||||
}
|
||||
if (res < 0) {
|
||||
outResult.status = Status.MAX;
|
||||
return;
|
||||
}
|
||||
|
||||
CodecDataInput cdi = new CodecDataInput(rowKey);
|
||||
cdi.skipBytes(tablePrefix.length);
|
||||
if (cdi.available() == 8) {
|
||||
outResult.status = Status.EQUAL;
|
||||
} else if (cdi.available() < 8) {
|
||||
outResult.status = Status.LESS;
|
||||
} else if (cdi.available() > 8) {
|
||||
outResult.status = Status.GREATER;
|
||||
}
|
||||
outResult.handle = IntegerCodec.readPartialLong(cdi);
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,84 @@
|
|||
/*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package com.pingcap.tikv.key;
|
||||
|
||||
import static java.util.Objects.requireNonNull;
|
||||
|
||||
import com.pingcap.tikv.codec.CodecDataInput;
|
||||
import com.pingcap.tikv.codec.CodecDataOutput;
|
||||
import com.pingcap.tikv.exception.TypeException;
|
||||
import com.pingcap.tikv.types.DataType;
|
||||
|
||||
public class TypedKey extends Key {
|
||||
private final DataType type;
|
||||
|
||||
public TypedKey(Object val, DataType type, int prefixLength) {
|
||||
super(encodeKey(val, type, prefixLength));
|
||||
this.type = type;
|
||||
}
|
||||
|
||||
public DataType getType() {
|
||||
return type;
|
||||
}
|
||||
|
||||
public Object getValue() {
|
||||
CodecDataInput cdi = new CodecDataInput(value);
|
||||
return type.decode(cdi);
|
||||
}
|
||||
|
||||
public static TypedKey toTypedKey(Object val, DataType type) {
|
||||
return toTypedKey(val, type, DataType.UNSPECIFIED_LEN);
|
||||
}
|
||||
|
||||
/**
|
||||
* Map a typed value into TypedKey, only encoding first prefixLength bytes When prefixLength is
|
||||
* DataType.UNSPECIFIED_LEN, encode full length of value
|
||||
*
|
||||
* @param val value
|
||||
* @param type type of value
|
||||
* @param prefixLength described above
|
||||
* @return an encoded TypedKey
|
||||
*/
|
||||
public static TypedKey toTypedKey(Object val, DataType type, int prefixLength) {
|
||||
requireNonNull(type, "type is null");
|
||||
return new TypedKey(val, type, prefixLength);
|
||||
}
|
||||
|
||||
private static byte[] encodeKey(Object val, DataType type, int prefixLength) {
|
||||
CodecDataOutput cdo = new CodecDataOutput();
|
||||
type.encodeKey(cdo, val, type, prefixLength);
|
||||
return cdo.toBytes();
|
||||
}
|
||||
|
||||
public TypedKey next(int prefixLength) {
|
||||
Object val = getValue();
|
||||
if (val instanceof String) {
|
||||
return toTypedKey(nextValue(((String) val).getBytes()), type, prefixLength);
|
||||
} else if (val instanceof byte[]) {
|
||||
return toTypedKey(nextValue(((byte[]) val)), type, prefixLength);
|
||||
} else {
|
||||
throw new TypeException(
|
||||
"Type for TypedKey in next() function must be either String or Byte array");
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
CodecDataInput cdi = new CodecDataInput(value);
|
||||
Object val = type.decode(cdi);
|
||||
return String.format("%s", val);
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,45 @@
|
|||
/*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package com.pingcap.tikv.meta;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonCreator;
|
||||
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
|
||||
/** This class is mapping TiDB's CIStr/ For internal use only. */
|
||||
@JsonIgnoreProperties(ignoreUnknown = true)
|
||||
public class CIStr {
|
||||
private final String o; // original
|
||||
private final String l;
|
||||
|
||||
@JsonCreator
|
||||
private CIStr(@JsonProperty("O") String o, @JsonProperty("L") String l) {
|
||||
this.o = o;
|
||||
this.l = l;
|
||||
}
|
||||
|
||||
public static CIStr newCIStr(String str) {
|
||||
return new CIStr(str, str.toLowerCase());
|
||||
}
|
||||
|
||||
public String getO() {
|
||||
return o;
|
||||
}
|
||||
|
||||
public String getL() {
|
||||
return l;
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,273 @@
|
|||
/*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package com.pingcap.tikv.meta;
|
||||
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import java.util.Map;
|
||||
|
||||
public class Collation {
|
||||
public static int translate(String collation) {
|
||||
Integer code = collationMap.get(collation);
|
||||
if (code == null) {
|
||||
return DEF_COLLATION_CODE;
|
||||
}
|
||||
return code;
|
||||
}
|
||||
|
||||
public static String translate(int code) {
|
||||
String collation = collationCodeMap.get(code);
|
||||
if (collation == null) {
|
||||
return "";
|
||||
}
|
||||
return collation;
|
||||
}
|
||||
|
||||
public static final int DEF_COLLATION_CODE = 83;
|
||||
|
||||
private static final Map<String, Integer> collationMap;
|
||||
private static final Map<Integer, String> collationCodeMap;
|
||||
|
||||
static {
|
||||
collationMap =
|
||||
ImmutableMap.<String, Integer>builder()
|
||||
.put("big5_chinese_ci", 1)
|
||||
.put("latin2_czech_cs", 2)
|
||||
.put("dec8_swedish_ci", 3)
|
||||
.put("cp850_general_ci", 4)
|
||||
.put("latin1_german1_ci", 5)
|
||||
.put("hp8_english_ci", 6)
|
||||
.put("koi8r_general_ci", 7)
|
||||
.put("latin1_swedish_ci", 8)
|
||||
.put("latin2_general_ci", 9)
|
||||
.put("swe7_swedish_ci", 10)
|
||||
.put("ascii_general_ci", 11)
|
||||
.put("ujis_japanese_ci", 12)
|
||||
.put("sjis_japanese_ci", 13)
|
||||
.put("cp1251_bulgarian_ci", 14)
|
||||
.put("latin1_danish_ci", 15)
|
||||
.put("hebrew_general_ci", 16)
|
||||
.put("tis620_thai_ci", 18)
|
||||
.put("euckr_korean_ci", 19)
|
||||
.put("latin7_estonian_cs", 20)
|
||||
.put("latin2_hungarian_ci", 21)
|
||||
.put("koi8u_general_ci", 22)
|
||||
.put("cp1251_ukrainian_ci", 23)
|
||||
.put("gb2312_chinese_ci", 24)
|
||||
.put("greek_general_ci", 25)
|
||||
.put("cp1250_general_ci", 26)
|
||||
.put("latin2_croatian_ci", 27)
|
||||
.put("gbk_chinese_ci", 28)
|
||||
.put("cp1257_lithuanian_ci", 29)
|
||||
.put("latin5_turkish_ci", 30)
|
||||
.put("latin1_german2_ci", 31)
|
||||
.put("armscii8_general_ci", 32)
|
||||
.put("utf8_general_ci", 33)
|
||||
.put("cp1250_czech_cs", 34)
|
||||
.put("ucs2_general_ci", 35)
|
||||
.put("cp866_general_ci", 36)
|
||||
.put("keybcs2_general_ci", 37)
|
||||
.put("macce_general_ci", 38)
|
||||
.put("macroman_general_ci", 39)
|
||||
.put("cp852_general_ci", 40)
|
||||
.put("latin7_general_ci", 41)
|
||||
.put("latin7_general_cs", 42)
|
||||
.put("macce_bin", 43)
|
||||
.put("cp1250_croatian_ci", 44)
|
||||
.put("utf8mb4_general_ci", 45)
|
||||
.put("utf8mb4_bin", 46)
|
||||
.put("latin1_bin", 47)
|
||||
.put("latin1_general_ci", 48)
|
||||
.put("latin1_general_cs", 49)
|
||||
.put("cp1251_bin", 50)
|
||||
.put("cp1251_general_ci", 51)
|
||||
.put("cp1251_general_cs", 52)
|
||||
.put("macroman_bin", 53)
|
||||
.put("utf16_general_ci", 54)
|
||||
.put("utf16_bin", 55)
|
||||
.put("utf16le_general_ci", 56)
|
||||
.put("cp1256_general_ci", 57)
|
||||
.put("cp1257_bin", 58)
|
||||
.put("cp1257_general_ci", 59)
|
||||
.put("utf32_general_ci", 60)
|
||||
.put("utf32_bin", 61)
|
||||
.put("utf16le_bin", 62)
|
||||
.put("binary", 63)
|
||||
.put("armscii8_bin", 64)
|
||||
.put("ascii_bin", 65)
|
||||
.put("cp1250_bin", 66)
|
||||
.put("cp1256_bin", 67)
|
||||
.put("cp866_bin", 68)
|
||||
.put("dec8_bin", 69)
|
||||
.put("greek_bin", 70)
|
||||
.put("hebrew_bin", 71)
|
||||
.put("hp8_bin", 72)
|
||||
.put("keybcs2_bin", 73)
|
||||
.put("koi8r_bin", 74)
|
||||
.put("koi8u_bin", 75)
|
||||
.put("latin2_bin", 77)
|
||||
.put("latin5_bin", 78)
|
||||
.put("latin7_bin", 79)
|
||||
.put("cp850_bin", 80)
|
||||
.put("cp852_bin", 81)
|
||||
.put("swe7_bin", 82)
|
||||
.put("utf8_bin", 83)
|
||||
.put("big5_bin", 84)
|
||||
.put("euckr_bin", 85)
|
||||
.put("gb2312_bin", 86)
|
||||
.put("gbk_bin", 87)
|
||||
.put("sjis_bin", 88)
|
||||
.put("tis620_bin", 89)
|
||||
.put("ucs2_bin", 90)
|
||||
.put("ujis_bin", 91)
|
||||
.put("geostd8_general_ci", 92)
|
||||
.put("geostd8_bin", 93)
|
||||
.put("latin1_spanish_ci", 94)
|
||||
.put("cp932_japanese_ci", 95)
|
||||
.put("cp932_bin", 96)
|
||||
.put("eucjpms_japanese_ci", 97)
|
||||
.put("eucjpms_bin", 98)
|
||||
.put("cp1250_polish_ci", 99)
|
||||
.put("utf16_unicode_ci", 101)
|
||||
.put("utf16_icelandic_ci", 102)
|
||||
.put("utf16_latvian_ci", 103)
|
||||
.put("utf16_romanian_ci", 104)
|
||||
.put("utf16_slovenian_ci", 105)
|
||||
.put("utf16_polish_ci", 106)
|
||||
.put("utf16_estonian_ci", 107)
|
||||
.put("utf16_spanish_ci", 108)
|
||||
.put("utf16_swedish_ci", 109)
|
||||
.put("utf16_turkish_ci", 110)
|
||||
.put("utf16_czech_ci", 111)
|
||||
.put("utf16_danish_ci", 112)
|
||||
.put("utf16_lithuanian_ci", 113)
|
||||
.put("utf16_slovak_ci", 114)
|
||||
.put("utf16_spanish2_ci", 115)
|
||||
.put("utf16_roman_ci", 116)
|
||||
.put("utf16_persian_ci", 117)
|
||||
.put("utf16_esperanto_ci", 118)
|
||||
.put("utf16_hungarian_ci", 119)
|
||||
.put("utf16_sinhala_ci", 120)
|
||||
.put("utf16_german2_ci", 121)
|
||||
.put("utf16_croatian_ci", 122)
|
||||
.put("utf16_unicode_520_ci", 123)
|
||||
.put("utf16_vietnamese_ci", 124)
|
||||
.put("ucs2_unicode_ci", 128)
|
||||
.put("ucs2_icelandic_ci", 129)
|
||||
.put("ucs2_latvian_ci", 130)
|
||||
.put("ucs2_romanian_ci", 131)
|
||||
.put("ucs2_slovenian_ci", 132)
|
||||
.put("ucs2_polish_ci", 133)
|
||||
.put("ucs2_estonian_ci", 134)
|
||||
.put("ucs2_spanish_ci", 135)
|
||||
.put("ucs2_swedish_ci", 136)
|
||||
.put("ucs2_turkish_ci", 137)
|
||||
.put("ucs2_czech_ci", 138)
|
||||
.put("ucs2_danish_ci", 139)
|
||||
.put("ucs2_lithuanian_ci", 140)
|
||||
.put("ucs2_slovak_ci", 141)
|
||||
.put("ucs2_spanish2_ci", 142)
|
||||
.put("ucs2_roman_ci", 143)
|
||||
.put("ucs2_persian_ci", 144)
|
||||
.put("ucs2_esperanto_ci", 145)
|
||||
.put("ucs2_hungarian_ci", 146)
|
||||
.put("ucs2_sinhala_ci", 147)
|
||||
.put("ucs2_german2_ci", 148)
|
||||
.put("ucs2_croatian_ci", 149)
|
||||
.put("ucs2_unicode_520_ci", 150)
|
||||
.put("ucs2_vietnamese_ci", 151)
|
||||
.put("ucs2_general_mysql500_ci", 159)
|
||||
.put("utf32_unicode_ci", 160)
|
||||
.put("utf32_icelandic_ci", 161)
|
||||
.put("utf32_latvian_ci", 162)
|
||||
.put("utf32_romanian_ci", 163)
|
||||
.put("utf32_slovenian_ci", 164)
|
||||
.put("utf32_polish_ci", 165)
|
||||
.put("utf32_estonian_ci", 166)
|
||||
.put("utf32_spanish_ci", 167)
|
||||
.put("utf32_swedish_ci", 168)
|
||||
.put("utf32_turkish_ci", 169)
|
||||
.put("utf32_czech_ci", 170)
|
||||
.put("utf32_danish_ci", 171)
|
||||
.put("utf32_lithuanian_ci", 172)
|
||||
.put("utf32_slovak_ci", 173)
|
||||
.put("utf32_spanish2_ci", 174)
|
||||
.put("utf32_roman_ci", 175)
|
||||
.put("utf32_persian_ci", 176)
|
||||
.put("utf32_esperanto_ci", 177)
|
||||
.put("utf32_hungarian_ci", 178)
|
||||
.put("utf32_sinhala_ci", 179)
|
||||
.put("utf32_german2_ci", 180)
|
||||
.put("utf32_croatian_ci", 181)
|
||||
.put("utf32_unicode_520_ci", 182)
|
||||
.put("utf32_vietnamese_ci", 183)
|
||||
.put("utf8_unicode_ci", 192)
|
||||
.put("utf8_icelandic_ci", 193)
|
||||
.put("utf8_latvian_ci", 194)
|
||||
.put("utf8_romanian_ci", 195)
|
||||
.put("utf8_slovenian_ci", 196)
|
||||
.put("utf8_polish_ci", 197)
|
||||
.put("utf8_estonian_ci", 198)
|
||||
.put("utf8_spanish_ci", 199)
|
||||
.put("utf8_swedish_ci", 200)
|
||||
.put("utf8_turkish_ci", 201)
|
||||
.put("utf8_czech_ci", 202)
|
||||
.put("utf8_danish_ci", 203)
|
||||
.put("utf8_lithuanian_ci", 204)
|
||||
.put("utf8_slovak_ci", 205)
|
||||
.put("utf8_spanish2_ci", 206)
|
||||
.put("utf8_roman_ci", 207)
|
||||
.put("utf8_persian_ci", 208)
|
||||
.put("utf8_esperanto_ci", 209)
|
||||
.put("utf8_hungarian_ci", 210)
|
||||
.put("utf8_sinhala_ci", 211)
|
||||
.put("utf8_german2_ci", 212)
|
||||
.put("utf8_croatian_ci", 213)
|
||||
.put("utf8_unicode_520_ci", 214)
|
||||
.put("utf8_vietnamese_ci", 215)
|
||||
.put("utf8_general_mysql500_ci", 223)
|
||||
.put("utf8mb4_unicode_ci", 224)
|
||||
.put("utf8mb4_icelandic_ci", 225)
|
||||
.put("utf8mb4_latvian_ci", 226)
|
||||
.put("utf8mb4_romanian_ci", 227)
|
||||
.put("utf8mb4_slovenian_ci", 228)
|
||||
.put("utf8mb4_polish_ci", 229)
|
||||
.put("utf8mb4_estonian_ci", 230)
|
||||
.put("utf8mb4_spanish_ci", 231)
|
||||
.put("utf8mb4_swedish_ci", 232)
|
||||
.put("utf8mb4_turkish_ci", 233)
|
||||
.put("utf8mb4_czech_ci", 234)
|
||||
.put("utf8mb4_danish_ci", 235)
|
||||
.put("utf8mb4_lithuanian_ci", 236)
|
||||
.put("utf8mb4_slovak_ci", 237)
|
||||
.put("utf8mb4_spanish2_ci", 238)
|
||||
.put("utf8mb4_roman_ci", 239)
|
||||
.put("utf8mb4_persian_ci", 240)
|
||||
.put("utf8mb4_esperanto_ci", 241)
|
||||
.put("utf8mb4_hungarian_ci", 242)
|
||||
.put("utf8mb4_sinhala_ci", 243)
|
||||
.put("utf8mb4_german2_ci", 244)
|
||||
.put("utf8mb4_croatian_ci", 245)
|
||||
.put("utf8mb4_unicode_520_ci", 246)
|
||||
.put("utf8mb4_vietnamese_ci", 247)
|
||||
.build();
|
||||
|
||||
ImmutableMap.Builder<Integer, String> builder = ImmutableMap.builder();
|
||||
for (String collation : collationMap.keySet()) {
|
||||
builder.put(collationMap.get(collation), collation);
|
||||
}
|
||||
collationCodeMap = builder.build();
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,55 @@
|
|||
/*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package com.pingcap.tikv.meta;
|
||||
|
||||
import com.pingcap.tikv.exception.TiClientInternalException;
|
||||
|
||||
// Actually we are not using either real btree or hash index
|
||||
// TiDB has its own way for indexing as key value pair.
|
||||
public enum IndexType {
|
||||
IndexTypeInvalid(0),
|
||||
IndexTypeBtree(1),
|
||||
IndexTypeHash(2);
|
||||
|
||||
private final int type;
|
||||
|
||||
IndexType(int type) {
|
||||
this.type = type;
|
||||
}
|
||||
|
||||
public static IndexType fromValue(int type) {
|
||||
for (IndexType e : IndexType.values()) {
|
||||
if (e.type == type) {
|
||||
return e;
|
||||
}
|
||||
}
|
||||
throw new TiClientInternalException("Invalid index type code: " + type);
|
||||
}
|
||||
|
||||
public int getTypeCode() {
|
||||
return type;
|
||||
}
|
||||
|
||||
public String toString() {
|
||||
switch (this.type) {
|
||||
case 1:
|
||||
return "BTREE";
|
||||
case 2:
|
||||
return "HASH";
|
||||
}
|
||||
return "Invalid";
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,46 @@
|
|||
/*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package com.pingcap.tikv.meta;
|
||||
|
||||
import com.pingcap.tikv.exception.TiClientInternalException;
|
||||
|
||||
public enum SchemaState {
|
||||
StateNone(0),
|
||||
StateDeleteOnly(1),
|
||||
StateWriteOnly(2),
|
||||
StateWriteReorganization(3),
|
||||
StateDeleteReorganization(4),
|
||||
StatePublic(5);
|
||||
|
||||
private final int state;
|
||||
|
||||
SchemaState(int state) {
|
||||
this.state = state;
|
||||
}
|
||||
|
||||
public static SchemaState fromValue(int b) {
|
||||
for (SchemaState e : SchemaState.values()) {
|
||||
if (e.state == b) {
|
||||
return e;
|
||||
}
|
||||
}
|
||||
throw new TiClientInternalException("Invalid SchemaState code: " + b);
|
||||
}
|
||||
|
||||
public int getStateCode() {
|
||||
return state;
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,346 @@
|
|||
/*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package com.pingcap.tikv.meta;
|
||||
|
||||
import static java.util.Objects.requireNonNull;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonCreator;
|
||||
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
import com.google.protobuf.ByteString;
|
||||
import com.pingcap.tidb.tipb.ColumnInfo;
|
||||
import com.pingcap.tikv.codec.CodecDataOutput;
|
||||
import com.pingcap.tikv.types.DataType;
|
||||
import com.pingcap.tikv.types.DataType.EncodeType;
|
||||
import com.pingcap.tikv.types.DataTypeFactory;
|
||||
import com.pingcap.tikv.types.IntegerType;
|
||||
import java.io.Serializable;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
|
||||
@JsonIgnoreProperties(ignoreUnknown = true)
|
||||
public class TiColumnInfo implements Serializable {
|
||||
private final long id;
|
||||
private final String name;
|
||||
private final int offset;
|
||||
private final DataType type;
|
||||
private final SchemaState schemaState;
|
||||
private final String comment;
|
||||
private final boolean isPrimaryKey;
|
||||
private final String defaultValue;
|
||||
private final String originDefaultValue;
|
||||
|
||||
public static TiColumnInfo getRowIdColumn(int offset) {
|
||||
return new TiColumnInfo(-1, "_tidb_rowid", offset, IntegerType.ROW_ID_TYPE, true);
|
||||
}
|
||||
|
||||
@VisibleForTesting private static final int PK_MASK = 0x2;
|
||||
|
||||
@JsonCreator
|
||||
public TiColumnInfo(
|
||||
@JsonProperty("id") long id,
|
||||
@JsonProperty("name") CIStr name,
|
||||
@JsonProperty("offset") int offset,
|
||||
@JsonProperty("type") InternalTypeHolder type,
|
||||
@JsonProperty("state") int schemaState,
|
||||
@JsonProperty("origin_default") String originalDefaultValue,
|
||||
@JsonProperty("default") String defaultValue,
|
||||
@JsonProperty("comment") String comment) {
|
||||
this.id = id;
|
||||
this.name = requireNonNull(name, "column name is null").getL();
|
||||
this.offset = offset;
|
||||
this.type = DataTypeFactory.of(requireNonNull(type, "type is null"));
|
||||
this.schemaState = SchemaState.fromValue(schemaState);
|
||||
this.comment = comment;
|
||||
this.defaultValue = defaultValue;
|
||||
this.originDefaultValue = originalDefaultValue;
|
||||
// I don't think pk flag should be set on type
|
||||
// Refactor against original tidb code
|
||||
this.isPrimaryKey = (type.getFlag() & PK_MASK) > 0;
|
||||
}
|
||||
|
||||
public TiColumnInfo(
|
||||
long id,
|
||||
String name,
|
||||
int offset,
|
||||
DataType type,
|
||||
SchemaState schemaState,
|
||||
String originalDefaultValue,
|
||||
String defaultValue,
|
||||
String comment) {
|
||||
this.id = id;
|
||||
this.name = requireNonNull(name, "column name is null").toLowerCase();
|
||||
this.offset = offset;
|
||||
this.type = requireNonNull(type, "data type is null");
|
||||
this.schemaState = schemaState;
|
||||
this.comment = comment;
|
||||
this.defaultValue = defaultValue;
|
||||
this.originDefaultValue = originalDefaultValue;
|
||||
this.isPrimaryKey = (type.getFlag() & PK_MASK) > 0;
|
||||
}
|
||||
|
||||
public TiColumnInfo copyWithoutPrimaryKey() {
|
||||
InternalTypeHolder typeHolder = type.toTypeHolder();
|
||||
typeHolder.setFlag(type.getFlag() & (~TiColumnInfo.PK_MASK));
|
||||
DataType newType = DataTypeFactory.of(typeHolder);
|
||||
return new TiColumnInfo(
|
||||
this.id,
|
||||
this.name,
|
||||
this.offset,
|
||||
newType,
|
||||
this.schemaState,
|
||||
this.originDefaultValue,
|
||||
this.defaultValue,
|
||||
this.comment);
|
||||
}
|
||||
|
||||
@VisibleForTesting
|
||||
public TiColumnInfo(long id, String name, int offset, DataType type, boolean isPrimaryKey) {
|
||||
this.id = id;
|
||||
this.name = requireNonNull(name, "column name is null").toLowerCase();
|
||||
this.offset = offset;
|
||||
this.type = requireNonNull(type, "data type is null");
|
||||
this.schemaState = SchemaState.StatePublic;
|
||||
this.comment = "";
|
||||
this.isPrimaryKey = isPrimaryKey;
|
||||
this.originDefaultValue = "1";
|
||||
this.defaultValue = "";
|
||||
}
|
||||
|
||||
public long getId() {
|
||||
return this.id;
|
||||
}
|
||||
|
||||
public String getName() {
|
||||
return this.name;
|
||||
}
|
||||
|
||||
public boolean matchName(String name) {
|
||||
return this.name.equalsIgnoreCase(name);
|
||||
}
|
||||
|
||||
public int getOffset() {
|
||||
return this.offset;
|
||||
}
|
||||
|
||||
public DataType getType() {
|
||||
return type;
|
||||
}
|
||||
|
||||
public SchemaState getSchemaState() {
|
||||
return schemaState;
|
||||
}
|
||||
|
||||
public String getComment() {
|
||||
return comment;
|
||||
}
|
||||
|
||||
public boolean isPrimaryKey() {
|
||||
return isPrimaryKey;
|
||||
}
|
||||
|
||||
public String getDefaultValue() {
|
||||
return defaultValue;
|
||||
}
|
||||
|
||||
public String getOriginDefaultValue() {
|
||||
return originDefaultValue;
|
||||
}
|
||||
|
||||
public ByteString getOriginDefaultValueAsByteString() {
|
||||
CodecDataOutput cdo = new CodecDataOutput();
|
||||
type.encode(cdo, EncodeType.VALUE, type.getOriginDefaultValue(originDefaultValue));
|
||||
return cdo.toByteString();
|
||||
}
|
||||
|
||||
@JsonIgnoreProperties(ignoreUnknown = true)
|
||||
public static class InternalTypeHolder {
|
||||
private int tp;
|
||||
private int flag;
|
||||
private long flen;
|
||||
private int decimal;
|
||||
private String charset;
|
||||
private String collate;
|
||||
private String defaultValue;
|
||||
private String originDefaultValue;
|
||||
private List<String> elems;
|
||||
|
||||
public void setTp(int tp) {
|
||||
this.tp = tp;
|
||||
}
|
||||
|
||||
public void setFlag(int flag) {
|
||||
this.flag = flag;
|
||||
}
|
||||
|
||||
public void setFlen(long flen) {
|
||||
this.flen = flen;
|
||||
}
|
||||
|
||||
public void setDecimal(int decimal) {
|
||||
this.decimal = decimal;
|
||||
}
|
||||
|
||||
public void setCharset(String charset) {
|
||||
this.charset = charset;
|
||||
}
|
||||
|
||||
public void setCollate(String collate) {
|
||||
this.collate = collate;
|
||||
}
|
||||
|
||||
public void setDefaultValue(String defaultValue) {
|
||||
this.defaultValue = defaultValue;
|
||||
}
|
||||
|
||||
public void setOriginDefaultValue(String originDefaultValue) {
|
||||
this.originDefaultValue = originDefaultValue;
|
||||
}
|
||||
|
||||
public void setElems(List<String> elems) {
|
||||
this.elems = elems;
|
||||
}
|
||||
|
||||
interface Builder<E extends DataType> {
|
||||
E build(InternalTypeHolder holder);
|
||||
}
|
||||
|
||||
@JsonCreator
|
||||
public InternalTypeHolder(
|
||||
@JsonProperty("Tp") int tp,
|
||||
@JsonProperty("Flag") int flag,
|
||||
@JsonProperty("Flen") long flen,
|
||||
@JsonProperty("Decimal") int decimal,
|
||||
@JsonProperty("Charset") String charset,
|
||||
@JsonProperty("origin_default") String originalDefaultValue,
|
||||
@JsonProperty("default") String defaultValue,
|
||||
@JsonProperty("Collate") String collate,
|
||||
@JsonProperty("Elems") List<String> elems) {
|
||||
this.tp = tp;
|
||||
this.flag = flag;
|
||||
this.flen = flen;
|
||||
this.decimal = decimal;
|
||||
this.charset = charset;
|
||||
this.collate = collate;
|
||||
this.defaultValue = defaultValue;
|
||||
this.originDefaultValue = originalDefaultValue;
|
||||
this.elems = elems;
|
||||
}
|
||||
|
||||
public InternalTypeHolder(ColumnInfo c) {
|
||||
this.tp = c.getTp();
|
||||
this.flag = c.getFlag();
|
||||
this.flen = c.getColumnLen();
|
||||
this.decimal = c.getDecimal();
|
||||
this.charset = "";
|
||||
this.collate = Collation.translate(c.getCollation());
|
||||
this.elems = c.getElemsList();
|
||||
this.defaultValue = c.getDefaultVal().toStringUtf8();
|
||||
// TODO: we may need write a functon about get origin default value according to the string.
|
||||
this.originDefaultValue = "";
|
||||
}
|
||||
|
||||
public int getTp() {
|
||||
return tp;
|
||||
}
|
||||
|
||||
public int getFlag() {
|
||||
return flag;
|
||||
}
|
||||
|
||||
public long getFlen() {
|
||||
return flen;
|
||||
}
|
||||
|
||||
public int getDecimal() {
|
||||
return decimal;
|
||||
}
|
||||
|
||||
public String getCharset() {
|
||||
return charset;
|
||||
}
|
||||
|
||||
public String getCollate() {
|
||||
return collate;
|
||||
}
|
||||
|
||||
public List<String> getElems() {
|
||||
return elems;
|
||||
}
|
||||
|
||||
public String getDefaultValue() {
|
||||
return defaultValue;
|
||||
}
|
||||
|
||||
public String getOriginDefaultValue() {
|
||||
return originDefaultValue;
|
||||
}
|
||||
}
|
||||
|
||||
TiIndexColumn toFakeIndexColumn() {
|
||||
// we don't use original length of column since for a clustered index column
|
||||
// it always full index instead of prefix index
|
||||
return new TiIndexColumn(CIStr.newCIStr(getName()), getOffset(), DataType.UNSPECIFIED_LEN);
|
||||
}
|
||||
|
||||
TiIndexColumn toIndexColumn() {
|
||||
return new TiIndexColumn(CIStr.newCIStr(getName()), getOffset(), getType().getLength());
|
||||
}
|
||||
|
||||
public ColumnInfo toProto(TiTableInfo table) {
|
||||
return toProtoBuilder(table).build();
|
||||
}
|
||||
|
||||
ColumnInfo.Builder toProtoBuilder(TiTableInfo table) {
|
||||
return ColumnInfo.newBuilder()
|
||||
.setColumnId(id)
|
||||
.setTp(type.getTypeCode())
|
||||
.setCollation(type.getCollationCode())
|
||||
.setColumnLen((int) type.getLength())
|
||||
.setDecimal(type.getDecimal())
|
||||
.setFlag(type.getFlag())
|
||||
.setDefaultVal(getOriginDefaultValueAsByteString())
|
||||
.setPkHandle(table.isPkHandle() && isPrimaryKey())
|
||||
.addAllElems(type.getElems());
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object other) {
|
||||
if (other == this) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (!(other instanceof TiColumnInfo)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
TiColumnInfo col = (TiColumnInfo) other;
|
||||
return Objects.equals(id, col.id)
|
||||
&& Objects.equals(name, col.name)
|
||||
&& Objects.equals(type, col.type)
|
||||
&& Objects.equals(schemaState, col.schemaState)
|
||||
&& isPrimaryKey == col.isPrimaryKey
|
||||
&& Objects.equals(defaultValue, col.defaultValue)
|
||||
&& Objects.equals(originDefaultValue, col.originDefaultValue);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(
|
||||
id, name, type, schemaState, isPrimaryKey, defaultValue, originDefaultValue);
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,822 @@
|
|||
/*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package com.pingcap.tikv.meta;
|
||||
|
||||
import static com.google.common.base.Preconditions.checkArgument;
|
||||
import static com.pingcap.tikv.predicates.PredicateUtils.mergeCNFExpressions;
|
||||
import static java.util.Objects.requireNonNull;
|
||||
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
import com.google.common.base.Joiner;
|
||||
import com.google.common.collect.ImmutableList;
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import com.pingcap.tidb.tipb.*;
|
||||
import com.pingcap.tikv.codec.KeyUtils;
|
||||
import com.pingcap.tikv.exception.DAGRequestException;
|
||||
import com.pingcap.tikv.exception.TiClientInternalException;
|
||||
import com.pingcap.tikv.expression.ByItem;
|
||||
import com.pingcap.tikv.expression.ColumnRef;
|
||||
import com.pingcap.tikv.expression.Expression;
|
||||
import com.pingcap.tikv.expression.visitor.ExpressionTypeCoercer;
|
||||
import com.pingcap.tikv.expression.visitor.MetaResolver;
|
||||
import com.pingcap.tikv.expression.visitor.ProtoConverter;
|
||||
import com.pingcap.tikv.key.RowKey;
|
||||
import com.pingcap.tikv.kvproto.Coprocessor;
|
||||
import com.pingcap.tikv.types.DataType;
|
||||
import com.pingcap.tikv.util.KeyRangeUtils;
|
||||
import com.pingcap.tikv.util.Pair;
|
||||
import java.io.*;
|
||||
import java.util.*;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
/**
|
||||
* Type TiDAGRequest.
|
||||
*
|
||||
* <p>Used for constructing a new DAG request to TiKV
|
||||
*/
|
||||
public class TiDAGRequest implements Serializable {
|
||||
public static class Builder {
|
||||
private List<String> requiredCols = new ArrayList<>();
|
||||
private List<Expression> filters = new ArrayList<>();
|
||||
private List<ByItem> orderBys = new ArrayList<>();
|
||||
private List<Coprocessor.KeyRange> ranges = new ArrayList<>();
|
||||
private TiTableInfo tableInfo;
|
||||
private int limit;
|
||||
private long startTs;
|
||||
|
||||
public static Builder newBuilder() {
|
||||
return new Builder();
|
||||
}
|
||||
|
||||
public Builder setFullTableScan(TiTableInfo tableInfo) {
|
||||
requireNonNull(tableInfo);
|
||||
setTableInfo(tableInfo);
|
||||
RowKey start = RowKey.createMin(tableInfo.getId());
|
||||
RowKey end = RowKey.createBeyondMax(tableInfo.getId());
|
||||
ranges.add(KeyRangeUtils.makeCoprocRange(start.toByteString(), end.toByteString()));
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder setLimit(int limit) {
|
||||
this.limit = limit;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder setTableInfo(TiTableInfo tableInfo) {
|
||||
this.tableInfo = tableInfo;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder addRequiredCols(String... cols) {
|
||||
this.requiredCols.addAll(Arrays.asList(cols));
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder addRequiredCols(List<String> cols) {
|
||||
this.requiredCols.addAll(cols);
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder addFilter(Expression filter) {
|
||||
this.filters.add(filter);
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder addOrderBy(ByItem item) {
|
||||
this.orderBys.add(item);
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder setStartTs(long ts) {
|
||||
this.startTs = ts;
|
||||
return this;
|
||||
}
|
||||
|
||||
public TiDAGRequest build(PushDownType pushDownType) {
|
||||
TiDAGRequest req = new TiDAGRequest(pushDownType);
|
||||
req.setTableInfo(tableInfo);
|
||||
req.addRanges(ranges);
|
||||
filters.forEach(req::addFilter);
|
||||
if (!orderBys.isEmpty()) {
|
||||
orderBys.forEach(req::addOrderByItem);
|
||||
}
|
||||
if (limit != 0) {
|
||||
req.setLimit(limit);
|
||||
}
|
||||
requiredCols.forEach(c -> req.addRequiredColumn(ColumnRef.create(c)));
|
||||
req.setStartTs(startTs);
|
||||
|
||||
req.resolve();
|
||||
return req;
|
||||
}
|
||||
}
|
||||
|
||||
public TiDAGRequest(PushDownType pushDownType) {
|
||||
this.pushDownType = pushDownType;
|
||||
}
|
||||
|
||||
public TiDAGRequest(PushDownType pushDownType, int timeZoneOffset) {
|
||||
this(pushDownType);
|
||||
this.timeZoneOffset = timeZoneOffset;
|
||||
}
|
||||
|
||||
public enum TruncateMode {
|
||||
IgnoreTruncation(0x1),
|
||||
TruncationAsWarning(0x2);
|
||||
|
||||
private final long mask;
|
||||
|
||||
TruncateMode(long mask) {
|
||||
this.mask = mask;
|
||||
}
|
||||
|
||||
public long mask(long flags) {
|
||||
return flags | mask;
|
||||
}
|
||||
}
|
||||
|
||||
/** Whether we use streaming to push down the request */
|
||||
public enum PushDownType {
|
||||
STREAMING,
|
||||
NORMAL
|
||||
}
|
||||
|
||||
/** Predefined executor priority map. */
|
||||
private static final Map<ExecType, Integer> EXEC_TYPE_PRIORITY_MAP =
|
||||
ImmutableMap.<ExecType, Integer>builder()
|
||||
.put(ExecType.TypeTableScan, 0)
|
||||
.put(ExecType.TypeIndexScan, 0)
|
||||
.put(ExecType.TypeSelection, 1)
|
||||
.put(ExecType.TypeAggregation, 2)
|
||||
.put(ExecType.TypeTopN, 3)
|
||||
.put(ExecType.TypeLimit, 4)
|
||||
.build();
|
||||
|
||||
private TiTableInfo tableInfo;
|
||||
private TiIndexInfo indexInfo;
|
||||
private final List<ColumnRef> fields = new ArrayList<>();
|
||||
private final List<Expression> filters = new ArrayList<>();
|
||||
private final List<ByItem> groupByItems = new ArrayList<>();
|
||||
private final List<ByItem> orderByItems = new ArrayList<>();
|
||||
private List<Expression> pushdownFilters = null;
|
||||
// System like Spark has different type promotion rules
|
||||
// we need a cast to target when given
|
||||
private final List<Pair<Expression, DataType>> aggregates = new ArrayList<>();
|
||||
private final List<Coprocessor.KeyRange> keyRanges = new ArrayList<>();
|
||||
// If index scanning of this request is not possible in some scenario, we downgrade it to a table
|
||||
// scan and use
|
||||
// downGradeRanges instead of index scan ranges stored in keyRanges along with downgradeFilters to
|
||||
// perform a
|
||||
// table scan.
|
||||
private List<Expression> downgradeFilters = new ArrayList<>();
|
||||
|
||||
private int limit;
|
||||
private int timeZoneOffset;
|
||||
private long flags;
|
||||
private long startTs;
|
||||
private Expression having;
|
||||
private boolean distinct;
|
||||
private boolean handleNeeded;
|
||||
private boolean isDoubleRead;
|
||||
private final PushDownType pushDownType;
|
||||
private IdentityHashMap<Expression, DataType> typeMap;
|
||||
private double estimatedCount = -1;
|
||||
|
||||
private static ColumnInfo handleColumn =
|
||||
ColumnInfo.newBuilder()
|
||||
.setColumnId(-1)
|
||||
.setPkHandle(true)
|
||||
// We haven't changed the field name in protobuf file, but
|
||||
// we need to set this to true in order to retrieve the handle,
|
||||
// so the name 'setPkHandle' may sounds strange.
|
||||
.build();
|
||||
|
||||
private List<Expression> getAllExpressions() {
|
||||
ImmutableList.Builder<Expression> builder = ImmutableList.builder();
|
||||
builder.addAll(getFields());
|
||||
builder.addAll(getFilters());
|
||||
builder.addAll(getAggregates());
|
||||
getGroupByItems().forEach(item -> builder.add(item.getExpr()));
|
||||
getOrderByItems().forEach(item -> builder.add(item.getExpr()));
|
||||
if (having != null) {
|
||||
builder.add(having);
|
||||
}
|
||||
return builder.build();
|
||||
}
|
||||
|
||||
public DataType getExpressionType(Expression expression) {
|
||||
requireNonNull(typeMap, "request is not resolved");
|
||||
return typeMap.get(expression);
|
||||
}
|
||||
|
||||
public void resolve() {
|
||||
MetaResolver resolver = new MetaResolver(tableInfo);
|
||||
ExpressionTypeCoercer inferrer = new ExpressionTypeCoercer();
|
||||
resolver.resolve(getAllExpressions());
|
||||
inferrer.infer(getAllExpressions());
|
||||
typeMap = inferrer.getTypeMap();
|
||||
}
|
||||
|
||||
/**
|
||||
* Unify indexScan and tableScan building logic since they are very much alike. DAGRequest for
|
||||
* IndexScan should also contain filters and aggregation, so we can reuse this part of logic.
|
||||
*
|
||||
* <p>DAGRequest is made up of a chain of executors with strict orders: TableScan/IndexScan >
|
||||
* Selection > Aggregation > TopN/Limit a DAGRequest must contain one and only one TableScan or
|
||||
* IndexScan.
|
||||
*
|
||||
* @param isIndexScan whether the dagRequest to build is an IndexScan
|
||||
* @return final DAGRequest built
|
||||
*/
|
||||
public DAGRequest buildScan(boolean isIndexScan) {
|
||||
checkArgument(startTs != 0, "timestamp is 0");
|
||||
DAGRequest.Builder dagRequestBuilder = DAGRequest.newBuilder();
|
||||
Executor.Builder executorBuilder = Executor.newBuilder();
|
||||
IndexScan.Builder indexScanBuilder = IndexScan.newBuilder();
|
||||
TableScan.Builder tblScanBuilder = TableScan.newBuilder();
|
||||
// find a column's offset in fields
|
||||
Map<ColumnRef, Integer> colOffsetInFieldMap = new HashMap<>();
|
||||
// find a column's position in index
|
||||
Map<TiColumnInfo, Integer> colPosInIndexMap = new HashMap<>();
|
||||
|
||||
if (isIndexScan) {
|
||||
// IndexScan
|
||||
if (indexInfo == null) {
|
||||
throw new TiClientInternalException("Index is empty for index scan");
|
||||
}
|
||||
List<TiColumnInfo> columnInfoList = tableInfo.getColumns();
|
||||
boolean hasPk = false;
|
||||
// We extract index column info
|
||||
List<Integer> indexColOffsets =
|
||||
indexInfo
|
||||
.getIndexColumns()
|
||||
.stream()
|
||||
.map(TiIndexColumn::getOffset)
|
||||
.collect(Collectors.toList());
|
||||
|
||||
int idxPos = 0;
|
||||
// for index scan builder, columns are added by its order in index
|
||||
for (Integer idx : indexColOffsets) {
|
||||
TiColumnInfo tiColumnInfo = columnInfoList.get(idx);
|
||||
ColumnInfo columnInfo = tiColumnInfo.toProto(tableInfo);
|
||||
colPosInIndexMap.put(tiColumnInfo, idxPos++);
|
||||
|
||||
ColumnInfo.Builder colBuilder = ColumnInfo.newBuilder(columnInfo);
|
||||
if (columnInfo.getColumnId() == -1) {
|
||||
hasPk = true;
|
||||
colBuilder.setPkHandle(true);
|
||||
}
|
||||
indexScanBuilder.addColumns(colBuilder);
|
||||
}
|
||||
|
||||
if (isDoubleRead()) {
|
||||
// double read case
|
||||
if (!hasPk) {
|
||||
indexScanBuilder.addColumns(handleColumn);
|
||||
}
|
||||
|
||||
int colCount = indexScanBuilder.getColumnsCount();
|
||||
// double read case: need to retrieve handle
|
||||
dagRequestBuilder.addOutputOffsets(colCount != 0 ? colCount - 1 : 0);
|
||||
} else {
|
||||
int colCount = indexScanBuilder.getColumnsCount();
|
||||
boolean pkIsNeeded = false;
|
||||
// =================== IMPORTANT ======================
|
||||
// offset for dagRequest should be in accordance with fields
|
||||
for (ColumnRef col : getFields()) {
|
||||
Integer pos = colPosInIndexMap.get(col.getColumnInfo());
|
||||
if (pos != null) {
|
||||
TiColumnInfo columnInfo = columnInfoList.get(indexColOffsets.get(pos));
|
||||
if (col.getColumnInfo().equals(columnInfo)) {
|
||||
dagRequestBuilder.addOutputOffsets(pos);
|
||||
colOffsetInFieldMap.put(col, pos);
|
||||
}
|
||||
}
|
||||
// if a column of field is not contained in index selected,
|
||||
// logically it must be the pk column and
|
||||
// the pkIsHandle must be true. Extra check here.
|
||||
else if (col.getColumnInfo().isPrimaryKey() && tableInfo.isPkHandle()) {
|
||||
pkIsNeeded = true;
|
||||
// offset should be processed for each primary key encountered
|
||||
dagRequestBuilder.addOutputOffsets(colCount);
|
||||
// for index scan, column offset must be in the order of index->handle
|
||||
colOffsetInFieldMap.put(col, indexColOffsets.size());
|
||||
} else {
|
||||
throw new DAGRequestException(
|
||||
"columns other than primary key and index key exist in fields while index single read: "
|
||||
+ col.getName());
|
||||
}
|
||||
}
|
||||
// pk is not included in index but still needed
|
||||
if (pkIsNeeded) {
|
||||
indexScanBuilder.addColumns(handleColumn);
|
||||
}
|
||||
}
|
||||
executorBuilder.setTp(ExecType.TypeIndexScan);
|
||||
|
||||
indexScanBuilder.setTableId(tableInfo.getId()).setIndexId(indexInfo.getId());
|
||||
dagRequestBuilder.addExecutors(executorBuilder.setIdxScan(indexScanBuilder).build());
|
||||
} else {
|
||||
// TableScan
|
||||
executorBuilder.setTp(ExecType.TypeTableScan);
|
||||
tblScanBuilder.setTableId(tableInfo.getId());
|
||||
// Step1. Add columns to first executor
|
||||
for (int i = 0; i < getFields().size(); i++) {
|
||||
ColumnRef col = getFields().get(i);
|
||||
tblScanBuilder.addColumns(col.getColumnInfo().toProto(tableInfo));
|
||||
colOffsetInFieldMap.put(col, i);
|
||||
}
|
||||
// Currently, according to TiKV's implementation, if handle
|
||||
// is needed, we should add an extra column with an ID of -1
|
||||
// to the TableScan executor
|
||||
if (isHandleNeeded()) {
|
||||
tblScanBuilder.addColumns(handleColumn);
|
||||
}
|
||||
dagRequestBuilder.addExecutors(executorBuilder.setTblScan(tblScanBuilder));
|
||||
|
||||
// column offset should be in accordance with fields
|
||||
for (int i = 0; i < getFields().size(); i++) {
|
||||
dagRequestBuilder.addOutputOffsets(i);
|
||||
}
|
||||
|
||||
// if handle is needed, we should append one output offset
|
||||
if (isHandleNeeded()) {
|
||||
dagRequestBuilder.addOutputOffsets(tableInfo.getColumns().size());
|
||||
}
|
||||
}
|
||||
|
||||
if (!isIndexScan || (isIndexScan() && !isDoubleRead())) {
|
||||
// clear executorBuilder
|
||||
executorBuilder.clear();
|
||||
|
||||
// Step2. Add others
|
||||
// DO NOT EDIT EXPRESSION CONSTRUCTION ORDER
|
||||
// Or make sure the construction order is below:
|
||||
// TableScan/IndexScan > Selection > Aggregation > TopN/Limit
|
||||
Expression whereExpr = mergeCNFExpressions(getFilters());
|
||||
if (whereExpr != null) {
|
||||
executorBuilder.setTp(ExecType.TypeSelection);
|
||||
dagRequestBuilder.addExecutors(
|
||||
executorBuilder.setSelection(
|
||||
Selection.newBuilder()
|
||||
.addConditions(ProtoConverter.toProto(whereExpr, colOffsetInFieldMap))));
|
||||
executorBuilder.clear();
|
||||
}
|
||||
|
||||
if (!getGroupByItems().isEmpty() || !getAggregates().isEmpty()) {
|
||||
Aggregation.Builder aggregationBuilder = Aggregation.newBuilder();
|
||||
getGroupByItems()
|
||||
.forEach(
|
||||
tiByItem ->
|
||||
aggregationBuilder.addGroupBy(
|
||||
ProtoConverter.toProto(tiByItem.getExpr(), colOffsetInFieldMap)));
|
||||
getAggregates()
|
||||
.forEach(
|
||||
tiExpr ->
|
||||
aggregationBuilder.addAggFunc(
|
||||
ProtoConverter.toProto(tiExpr, colOffsetInFieldMap)));
|
||||
executorBuilder.setTp(ExecType.TypeAggregation);
|
||||
dagRequestBuilder.addExecutors(executorBuilder.setAggregation(aggregationBuilder));
|
||||
executorBuilder.clear();
|
||||
}
|
||||
|
||||
if (!getOrderByItems().isEmpty()) {
|
||||
TopN.Builder topNBuilder = TopN.newBuilder();
|
||||
getOrderByItems()
|
||||
.forEach(
|
||||
tiByItem ->
|
||||
topNBuilder.addOrderBy(
|
||||
com.pingcap.tidb.tipb.ByItem.newBuilder()
|
||||
.setExpr(
|
||||
ProtoConverter.toProto(tiByItem.getExpr(), colOffsetInFieldMap))
|
||||
.setDesc(tiByItem.isDesc())));
|
||||
executorBuilder.setTp(ExecType.TypeTopN);
|
||||
topNBuilder.setLimit(getLimit());
|
||||
dagRequestBuilder.addExecutors(executorBuilder.setTopN(topNBuilder));
|
||||
executorBuilder.clear();
|
||||
} else if (getLimit() != 0) {
|
||||
Limit.Builder limitBuilder = Limit.newBuilder();
|
||||
limitBuilder.setLimit(getLimit());
|
||||
executorBuilder.setTp(ExecType.TypeLimit);
|
||||
dagRequestBuilder.addExecutors(executorBuilder.setLimit(limitBuilder));
|
||||
executorBuilder.clear();
|
||||
}
|
||||
}
|
||||
|
||||
DAGRequest request =
|
||||
dagRequestBuilder
|
||||
.setTimeZoneOffset(timeZoneOffset)
|
||||
.setFlags(flags)
|
||||
.setStartTs(startTs)
|
||||
.build();
|
||||
|
||||
validateRequest(request);
|
||||
|
||||
return request;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a DAG request is valid.
|
||||
*
|
||||
* <p>Note: When constructing a DAG request, a executor with an ExecType of higher priority should
|
||||
* always be placed before those lower ones.
|
||||
*
|
||||
* @param dagRequest Request DAG.
|
||||
*/
|
||||
private void validateRequest(DAGRequest dagRequest) {
|
||||
requireNonNull(dagRequest);
|
||||
// A DAG request must has at least one executor.
|
||||
if (dagRequest.getExecutorsCount() < 1) {
|
||||
throw new DAGRequestException("Invalid executors count:" + dagRequest.getExecutorsCount());
|
||||
}
|
||||
|
||||
ExecType formerType = dagRequest.getExecutors(0).getTp();
|
||||
if (formerType != ExecType.TypeTableScan && formerType != ExecType.TypeIndexScan) {
|
||||
throw new DAGRequestException(
|
||||
"Invalid first executor type:"
|
||||
+ formerType
|
||||
+ ", must one of TypeTableScan or TypeIndexScan");
|
||||
}
|
||||
|
||||
for (int i = 1; i < dagRequest.getExecutorsCount(); i++) {
|
||||
ExecType currentType = dagRequest.getExecutors(i).getTp();
|
||||
if (EXEC_TYPE_PRIORITY_MAP.get(currentType) < EXEC_TYPE_PRIORITY_MAP.get(formerType)) {
|
||||
throw new DAGRequestException("Invalid executor priority.");
|
||||
}
|
||||
formerType = currentType;
|
||||
}
|
||||
}
|
||||
|
||||
public TiDAGRequest setTableInfo(TiTableInfo tableInfo) {
|
||||
this.tableInfo = requireNonNull(tableInfo, "tableInfo is null");
|
||||
return this;
|
||||
}
|
||||
|
||||
public TiTableInfo getTableInfo() {
|
||||
return this.tableInfo;
|
||||
}
|
||||
|
||||
public TiDAGRequest setIndexInfo(TiIndexInfo indexInfo) {
|
||||
this.indexInfo = requireNonNull(indexInfo, "indexInfo is null");
|
||||
return this;
|
||||
}
|
||||
|
||||
public TiIndexInfo getIndexInfo() {
|
||||
return indexInfo;
|
||||
}
|
||||
|
||||
public void clearIndexInfo() {
|
||||
indexInfo = null;
|
||||
}
|
||||
|
||||
public int getLimit() {
|
||||
return limit;
|
||||
}
|
||||
|
||||
/**
|
||||
* add limit clause to select query.
|
||||
*
|
||||
* @param limit is just a integer.
|
||||
* @return a SelectBuilder
|
||||
*/
|
||||
public TiDAGRequest setLimit(int limit) {
|
||||
this.limit = limit;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* set timezone offset
|
||||
*
|
||||
* @param timeZoneOffset timezone offset
|
||||
* @return a TiDAGRequest
|
||||
*/
|
||||
public TiDAGRequest setTimeZoneOffset(int timeZoneOffset) {
|
||||
this.timeZoneOffset = timeZoneOffset;
|
||||
return this;
|
||||
}
|
||||
|
||||
int getTimeZoneOffset() {
|
||||
return timeZoneOffset;
|
||||
}
|
||||
|
||||
/**
|
||||
* set truncate mode
|
||||
*
|
||||
* @param mode truncate mode
|
||||
* @return a TiDAGRequest
|
||||
*/
|
||||
public TiDAGRequest setTruncateMode(TiDAGRequest.TruncateMode mode) {
|
||||
flags = requireNonNull(mode, "mode is null").mask(flags);
|
||||
return this;
|
||||
}
|
||||
|
||||
@VisibleForTesting
|
||||
public long getFlags() {
|
||||
return flags;
|
||||
}
|
||||
|
||||
/**
|
||||
* set start timestamp for the transaction
|
||||
*
|
||||
* @param startTs timestamp
|
||||
* @return a TiDAGRequest
|
||||
*/
|
||||
public TiDAGRequest setStartTs(long startTs) {
|
||||
this.startTs = startTs;
|
||||
return this;
|
||||
}
|
||||
|
||||
long getStartTs() {
|
||||
return startTs;
|
||||
}
|
||||
|
||||
/**
|
||||
* set having clause to select query
|
||||
*
|
||||
* @param having is a expression represents Having
|
||||
* @return a TiDAGRequest
|
||||
*/
|
||||
public TiDAGRequest setHaving(Expression having) {
|
||||
this.having = requireNonNull(having, "having is null");
|
||||
return this;
|
||||
}
|
||||
|
||||
public TiDAGRequest setDistinct(boolean distinct) {
|
||||
this.distinct = distinct;
|
||||
return this;
|
||||
}
|
||||
|
||||
public boolean isDistinct() {
|
||||
return distinct;
|
||||
}
|
||||
|
||||
public TiDAGRequest addAggregate(Expression expr, DataType targetType) {
|
||||
requireNonNull(expr, "aggregation expr is null");
|
||||
aggregates.add(Pair.create(expr, targetType));
|
||||
return this;
|
||||
}
|
||||
|
||||
public List<Expression> getAggregates() {
|
||||
return aggregates.stream().map(p -> p.first).collect(Collectors.toList());
|
||||
}
|
||||
|
||||
public List<Pair<Expression, DataType>> getAggregatePairs() {
|
||||
return aggregates;
|
||||
}
|
||||
|
||||
/**
|
||||
* add a order by clause to select query.
|
||||
*
|
||||
* @param byItem is a TiByItem.
|
||||
* @return a SelectBuilder
|
||||
*/
|
||||
public TiDAGRequest addOrderByItem(ByItem byItem) {
|
||||
orderByItems.add(requireNonNull(byItem, "byItem is null"));
|
||||
return this;
|
||||
}
|
||||
|
||||
List<ByItem> getOrderByItems() {
|
||||
return orderByItems;
|
||||
}
|
||||
|
||||
/**
|
||||
* add a group by clause to select query
|
||||
*
|
||||
* @param byItem is a TiByItem
|
||||
* @return a SelectBuilder
|
||||
*/
|
||||
public TiDAGRequest addGroupByItem(ByItem byItem) {
|
||||
groupByItems.add(requireNonNull(byItem, "byItem is null"));
|
||||
return this;
|
||||
}
|
||||
|
||||
public List<ByItem> getGroupByItems() {
|
||||
return groupByItems;
|
||||
}
|
||||
|
||||
/**
|
||||
* Field is not support in TiDB yet, for here we simply allow TiColumnRef instead of TiExpr like
|
||||
* in SelectRequest proto
|
||||
*
|
||||
* <p>
|
||||
*
|
||||
* <p>This interface allows duplicate columns and it's user's responsibility to do dedup since we
|
||||
* need to ensure exact order and items preserved during decoding
|
||||
*
|
||||
* @param column is column referred during selectReq
|
||||
*/
|
||||
public TiDAGRequest addRequiredColumn(ColumnRef column) {
|
||||
fields.add(requireNonNull(column, "columnRef is null"));
|
||||
return this;
|
||||
}
|
||||
|
||||
public List<ColumnRef> getFields() {
|
||||
return fields;
|
||||
}
|
||||
|
||||
/**
|
||||
* set key range of scan
|
||||
*
|
||||
* @param ranges key range of scan
|
||||
*/
|
||||
public TiDAGRequest addRanges(List<Coprocessor.KeyRange> ranges) {
|
||||
keyRanges.addAll(requireNonNull(ranges, "KeyRange is null"));
|
||||
return this;
|
||||
}
|
||||
|
||||
public void resetFilters(List<Expression> filters) {
|
||||
this.filters.clear();
|
||||
this.filters.addAll(filters);
|
||||
}
|
||||
|
||||
public List<Coprocessor.KeyRange> getRanges() {
|
||||
return keyRanges;
|
||||
}
|
||||
|
||||
public TiDAGRequest addFilter(Expression filter) {
|
||||
this.filters.add(requireNonNull(filter, "filters expr is null"));
|
||||
return this;
|
||||
}
|
||||
|
||||
public List<Expression> getDowngradeFilters() {
|
||||
return downgradeFilters;
|
||||
}
|
||||
|
||||
public TiDAGRequest addDowngradeFilter(Expression filter) {
|
||||
this.downgradeFilters.add(requireNonNull(filter, "downgrade filter is null"));
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check whether the DAG request has any aggregate expression.
|
||||
*
|
||||
* @return the boolean
|
||||
*/
|
||||
public boolean hasAggregate() {
|
||||
return !getAggregates().isEmpty();
|
||||
}
|
||||
|
||||
/**
|
||||
* Check whether the DAG request has any group by expression.
|
||||
*
|
||||
* @return the boolean
|
||||
*/
|
||||
public boolean hasGroupBy() {
|
||||
return !getGroupByItems().isEmpty();
|
||||
}
|
||||
|
||||
public List<Expression> getFilters() {
|
||||
return filters;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns whether handle is needed.
|
||||
*
|
||||
* @return the boolean
|
||||
*/
|
||||
public boolean isHandleNeeded() {
|
||||
return handleNeeded;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets handle needed.
|
||||
*
|
||||
* @param handleNeeded the handle needed
|
||||
*/
|
||||
public void setHandleNeeded(boolean handleNeeded) {
|
||||
this.handleNeeded = handleNeeded;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns whether needs double read
|
||||
*
|
||||
* @return boolean
|
||||
*/
|
||||
public boolean isDoubleRead() {
|
||||
return isDoubleRead;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets isDoubleRead
|
||||
*
|
||||
* @param isDoubleRead if is double read
|
||||
*/
|
||||
public void setIsDoubleRead(boolean isDoubleRead) {
|
||||
this.isDoubleRead = isDoubleRead;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns whether this request is of indexScanType
|
||||
*
|
||||
* @return true iff indexInfo is provided, false otherwise
|
||||
*/
|
||||
public boolean isIndexScan() {
|
||||
return indexInfo != null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Whether we use streaming processing to retrieve data
|
||||
*
|
||||
* @return push down type.
|
||||
*/
|
||||
public PushDownType getPushDownType() {
|
||||
return pushDownType;
|
||||
}
|
||||
|
||||
/** Set the estimated row count will be fetched from this request. */
|
||||
public void setEstimatedCount(double estimatedCount) {
|
||||
this.estimatedCount = estimatedCount;
|
||||
}
|
||||
|
||||
/** Get the estimated row count will be fetched from this request. */
|
||||
public double getEstimatedCount() {
|
||||
return estimatedCount;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
StringBuilder sb = new StringBuilder();
|
||||
if (tableInfo != null) {
|
||||
sb.append(String.format("[table: %s] ", tableInfo.getName()));
|
||||
}
|
||||
|
||||
if (indexInfo != null) {
|
||||
sb.append(String.format("[Index: %s] ", indexInfo.getName()));
|
||||
}
|
||||
|
||||
if (!getFields().isEmpty()) {
|
||||
sb.append(", Columns: ");
|
||||
Joiner.on(", ").skipNulls().appendTo(sb, getFields());
|
||||
}
|
||||
|
||||
if (!getDowngradeFilters().isEmpty()) {
|
||||
// should be called after all parameters are set
|
||||
if (pushdownFilters == null) {
|
||||
pushdownFilters = new ArrayList<>(getDowngradeFilters());
|
||||
pushdownFilters.removeAll(new HashSet<>(getFilters()));
|
||||
}
|
||||
if (!pushdownFilters.isEmpty()) {
|
||||
sb.append(", Pushdown Filter: ");
|
||||
Joiner.on(", ").skipNulls().appendTo(sb, pushdownFilters);
|
||||
}
|
||||
}
|
||||
|
||||
if (!getFilters().isEmpty()) {
|
||||
sb.append(", Residual Filter: ");
|
||||
Joiner.on(", ").skipNulls().appendTo(sb, getFilters());
|
||||
}
|
||||
|
||||
// Key ranges might be also useful
|
||||
if (!getRanges().isEmpty()) {
|
||||
sb.append(", KeyRange: ");
|
||||
getRanges().forEach(x -> sb.append(KeyUtils.formatBytes(x)));
|
||||
}
|
||||
|
||||
if (!getAggregates().isEmpty()) {
|
||||
sb.append(", Aggregates: ");
|
||||
Joiner.on(", ").skipNulls().appendTo(sb, getAggregates());
|
||||
}
|
||||
|
||||
if (!getGroupByItems().isEmpty()) {
|
||||
sb.append(", Group By: ");
|
||||
Joiner.on(", ").skipNulls().appendTo(sb, getGroupByItems());
|
||||
}
|
||||
|
||||
if (!getOrderByItems().isEmpty()) {
|
||||
sb.append(", Order By: ");
|
||||
Joiner.on(", ").skipNulls().appendTo(sb, getOrderByItems());
|
||||
}
|
||||
|
||||
if (getLimit() != 0) {
|
||||
sb.append(", Limit: ");
|
||||
sb.append("[").append(limit).append("]");
|
||||
}
|
||||
return sb.toString();
|
||||
}
|
||||
|
||||
public TiDAGRequest copy() {
|
||||
try {
|
||||
ByteArrayOutputStream baos = new ByteArrayOutputStream();
|
||||
ObjectOutputStream oos = new ObjectOutputStream(baos);
|
||||
oos.writeObject(this);
|
||||
ByteArrayInputStream bais = new ByteArrayInputStream(baos.toByteArray());
|
||||
ObjectInputStream ois = new ObjectInputStream(bais);
|
||||
return ((TiDAGRequest) ois.readObject());
|
||||
} catch (Exception e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,109 @@
|
|||
/*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package com.pingcap.tikv.meta;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonCreator;
|
||||
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
import java.util.List;
|
||||
|
||||
@JsonIgnoreProperties(ignoreUnknown = true)
|
||||
public class TiDBInfo {
|
||||
private final long id;
|
||||
private final String name;
|
||||
private final String charset;
|
||||
private final String collate;
|
||||
private final List<TiTableInfo> tables;
|
||||
private final SchemaState schemaState;
|
||||
|
||||
@JsonCreator
|
||||
public TiDBInfo(
|
||||
@JsonProperty("id") long id,
|
||||
@JsonProperty("db_name") CIStr name,
|
||||
@JsonProperty("charset") String charset,
|
||||
@JsonProperty("collate") String collate,
|
||||
@JsonProperty("-") List<TiTableInfo> tables,
|
||||
@JsonProperty("state") int schemaState) {
|
||||
this.id = id;
|
||||
this.name = name.getL();
|
||||
this.charset = charset;
|
||||
this.collate = collate;
|
||||
this.tables = tables;
|
||||
this.schemaState = SchemaState.fromValue(schemaState);
|
||||
}
|
||||
|
||||
private TiDBInfo(
|
||||
long id,
|
||||
String name,
|
||||
String charset,
|
||||
String collate,
|
||||
List<TiTableInfo> tables,
|
||||
SchemaState schemaState) {
|
||||
this.id = id;
|
||||
this.name = name;
|
||||
this.charset = charset;
|
||||
this.collate = collate;
|
||||
this.tables = tables;
|
||||
this.schemaState = schemaState;
|
||||
}
|
||||
|
||||
public TiDBInfo rename(String newName) {
|
||||
return new TiDBInfo(id, newName, charset, collate, tables, schemaState);
|
||||
}
|
||||
|
||||
public long getId() {
|
||||
return id;
|
||||
}
|
||||
|
||||
public String getName() {
|
||||
return name;
|
||||
}
|
||||
|
||||
public String getCharset() {
|
||||
return charset;
|
||||
}
|
||||
|
||||
public String getCollate() {
|
||||
return collate;
|
||||
}
|
||||
|
||||
public List<TiTableInfo> getTables() {
|
||||
return tables;
|
||||
}
|
||||
|
||||
SchemaState getSchemaState() {
|
||||
return schemaState;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object other) {
|
||||
if (other == this) {
|
||||
return true;
|
||||
}
|
||||
if (!(other instanceof TiDBInfo)) {
|
||||
return false;
|
||||
}
|
||||
TiDBInfo otherDB = (TiDBInfo) other;
|
||||
return otherDB.getId() == getId() && otherDB.getName().equals(getName());
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
final int prime = 31;
|
||||
int result = prime + Long.hashCode(getId());
|
||||
return result * prime + getName().hashCode();
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,65 @@
|
|||
/*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package com.pingcap.tikv.meta;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonCreator;
|
||||
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
import com.pingcap.tikv.types.DataType;
|
||||
import java.io.Serializable;
|
||||
|
||||
@JsonIgnoreProperties(ignoreUnknown = true)
|
||||
public class TiIndexColumn implements Serializable {
|
||||
private String name;
|
||||
private int offset;
|
||||
private long length;
|
||||
|
||||
@JsonCreator
|
||||
public TiIndexColumn(
|
||||
@JsonProperty("name") CIStr name,
|
||||
@JsonProperty("offset") int offset,
|
||||
@JsonProperty("length") long length) {
|
||||
this.name = name.getL();
|
||||
this.offset = offset;
|
||||
this.length = length;
|
||||
}
|
||||
|
||||
public String getName() {
|
||||
return name;
|
||||
}
|
||||
|
||||
public int getOffset() {
|
||||
return offset;
|
||||
}
|
||||
|
||||
public long getLength() {
|
||||
return length;
|
||||
}
|
||||
|
||||
public boolean isPrefixIndex() {
|
||||
return length != DataType.UNSPECIFIED_LEN;
|
||||
}
|
||||
|
||||
public boolean matchName(String otherName) {
|
||||
return name.equalsIgnoreCase(otherName);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return String.format(
|
||||
"%s {name: %s, offset: %d, length: %d}", getClass().getSimpleName(), name, offset, length);
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,167 @@
|
|||
/*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package com.pingcap.tikv.meta;
|
||||
|
||||
import static java.util.Objects.requireNonNull;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonCreator;
|
||||
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
import com.google.common.base.Joiner;
|
||||
import com.google.common.collect.ImmutableList;
|
||||
import com.pingcap.tidb.tipb.ColumnInfo;
|
||||
import com.pingcap.tidb.tipb.IndexInfo;
|
||||
import java.io.Serializable;
|
||||
import java.util.List;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
@JsonIgnoreProperties(ignoreUnknown = true)
|
||||
public class TiIndexInfo implements Serializable {
|
||||
private final long id;
|
||||
private final String name;
|
||||
private final String tableName;
|
||||
private final List<TiIndexColumn> indexColumns;
|
||||
private final boolean isUnique;
|
||||
private final boolean isPrimary;
|
||||
private final SchemaState schemaState;
|
||||
private final String comment;
|
||||
private final IndexType indexType;
|
||||
private final boolean isFakePrimaryKey;
|
||||
|
||||
@JsonCreator
|
||||
@VisibleForTesting
|
||||
public TiIndexInfo(
|
||||
@JsonProperty("id") long id,
|
||||
@JsonProperty("idx_name") CIStr name,
|
||||
@JsonProperty("tbl_name") CIStr tableName,
|
||||
@JsonProperty("idx_cols") List<TiIndexColumn> indexColumns,
|
||||
@JsonProperty("is_unique") boolean isUnique,
|
||||
@JsonProperty("is_primary") boolean isPrimary,
|
||||
@JsonProperty("state") int schemaState,
|
||||
@JsonProperty("comment") String comment,
|
||||
@JsonProperty("index_type") int indexType,
|
||||
// This is a fake property and added JsonProperty only to
|
||||
// to bypass Jackson frameworks's check
|
||||
@JsonProperty("___isFakePrimaryKey") boolean isFakePrimaryKey) {
|
||||
this.id = id;
|
||||
this.name = requireNonNull(name, "index name is null").getL();
|
||||
this.tableName = requireNonNull(tableName, "table name is null").getL();
|
||||
this.indexColumns = ImmutableList.copyOf(requireNonNull(indexColumns, "indexColumns is null"));
|
||||
this.isUnique = isUnique;
|
||||
this.isPrimary = isPrimary;
|
||||
this.schemaState = SchemaState.fromValue(schemaState);
|
||||
this.comment = comment;
|
||||
this.indexType = IndexType.fromValue(indexType);
|
||||
this.isFakePrimaryKey = isFakePrimaryKey;
|
||||
}
|
||||
|
||||
public static TiIndexInfo generateFakePrimaryKeyIndex(TiTableInfo table) {
|
||||
TiColumnInfo pkColumn = table.getPrimaryKeyColumn();
|
||||
if (pkColumn != null) {
|
||||
return new TiIndexInfo(
|
||||
-1,
|
||||
CIStr.newCIStr("fake_pk_" + table.getId()),
|
||||
CIStr.newCIStr(table.getName()),
|
||||
ImmutableList.of(pkColumn.toFakeIndexColumn()),
|
||||
true,
|
||||
true,
|
||||
SchemaState.StatePublic.getStateCode(),
|
||||
"Fake Column",
|
||||
IndexType.IndexTypeHash.getTypeCode(),
|
||||
true);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
public long getId() {
|
||||
return id;
|
||||
}
|
||||
|
||||
public String getName() {
|
||||
return name;
|
||||
}
|
||||
|
||||
public String getTableName() {
|
||||
return tableName;
|
||||
}
|
||||
|
||||
public List<TiIndexColumn> getIndexColumns() {
|
||||
return indexColumns;
|
||||
}
|
||||
|
||||
public boolean isUnique() {
|
||||
return isUnique;
|
||||
}
|
||||
|
||||
public boolean isPrimary() {
|
||||
return isPrimary;
|
||||
}
|
||||
|
||||
public SchemaState getSchemaState() {
|
||||
return schemaState;
|
||||
}
|
||||
|
||||
public String getComment() {
|
||||
return comment;
|
||||
}
|
||||
|
||||
public IndexType getIndexType() {
|
||||
return indexType;
|
||||
}
|
||||
|
||||
public IndexInfo toProto(TiTableInfo tableInfo) {
|
||||
IndexInfo.Builder builder =
|
||||
IndexInfo.newBuilder().setTableId(tableInfo.getId()).setIndexId(id).setUnique(isUnique);
|
||||
|
||||
List<TiColumnInfo> columns = tableInfo.getColumns();
|
||||
|
||||
for (TiIndexColumn indexColumn : getIndexColumns()) {
|
||||
int offset = indexColumn.getOffset();
|
||||
TiColumnInfo column = columns.get(offset);
|
||||
builder.addColumns(column.toProto(tableInfo));
|
||||
}
|
||||
|
||||
if (tableInfo.isPkHandle()) {
|
||||
for (TiColumnInfo column : columns) {
|
||||
if (!column.isPrimaryKey()) {
|
||||
continue;
|
||||
}
|
||||
ColumnInfo pbColumn = column.toProtoBuilder(tableInfo).setPkHandle(true).build();
|
||||
builder.addColumns(pbColumn);
|
||||
}
|
||||
}
|
||||
return builder.build();
|
||||
}
|
||||
|
||||
public boolean isFakePrimaryKey() {
|
||||
return isFakePrimaryKey;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return String.format(
|
||||
"%s[%s]",
|
||||
name,
|
||||
Joiner.on(",")
|
||||
.skipNulls()
|
||||
.join(
|
||||
indexColumns
|
||||
.stream()
|
||||
.map(column -> column.getName())
|
||||
.collect(Collectors.toList())));
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,28 @@
|
|||
package com.pingcap.tikv.meta;
|
||||
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
import java.io.Serializable;
|
||||
import org.codehaus.jackson.annotate.JsonCreator;
|
||||
import org.codehaus.jackson.annotate.JsonIgnoreProperties;
|
||||
import org.codehaus.jackson.annotate.JsonProperty;
|
||||
|
||||
@JsonIgnoreProperties(ignoreUnknown = true)
|
||||
public class TiPartitionDef implements Serializable {
|
||||
private final long id;
|
||||
private final CIStr name;
|
||||
private final String[] lessThan;
|
||||
private final String comment;
|
||||
|
||||
@JsonCreator
|
||||
@VisibleForTesting
|
||||
public TiPartitionDef(
|
||||
@JsonProperty("id") long id,
|
||||
@JsonProperty("name") CIStr name,
|
||||
@JsonProperty("less_than") String[] lessThan,
|
||||
@JsonProperty("comment") String comment) {
|
||||
this.id = id;
|
||||
this.name = name;
|
||||
this.lessThan = lessThan;
|
||||
this.comment = comment;
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,36 @@
|
|||
package com.pingcap.tikv.meta;
|
||||
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
import org.codehaus.jackson.annotate.JsonCreator;
|
||||
import org.codehaus.jackson.annotate.JsonIgnoreProperties;
|
||||
import org.codehaus.jackson.annotate.JsonProperty;
|
||||
|
||||
@JsonIgnoreProperties(ignoreUnknown = true)
|
||||
public class TiPartitionInfo {
|
||||
public static enum PartitionType {
|
||||
RangePartition,
|
||||
HashPartition,
|
||||
ListPartition,
|
||||
}
|
||||
|
||||
private final PartitionType type;
|
||||
private final String expr;
|
||||
private final CIStr[] columns;
|
||||
private final boolean enable;
|
||||
private final TiPartitionDef[] defs;
|
||||
|
||||
@JsonCreator
|
||||
@VisibleForTesting
|
||||
public TiPartitionInfo(
|
||||
@JsonProperty("type") PartitionType type,
|
||||
@JsonProperty("expr") String expr,
|
||||
@JsonProperty("columns") CIStr[] columns,
|
||||
@JsonProperty("enable") boolean enable,
|
||||
@JsonProperty("definitions") TiPartitionDef[] defs) {
|
||||
this.type = type;
|
||||
this.expr = expr;
|
||||
this.columns = columns;
|
||||
this.enable = enable;
|
||||
this.defs = defs;
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,199 @@
|
|||
/*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package com.pingcap.tikv.meta;
|
||||
|
||||
import static java.util.Objects.requireNonNull;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonCreator;
|
||||
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
import com.google.common.collect.ImmutableList;
|
||||
import com.pingcap.tidb.tipb.TableInfo;
|
||||
import com.pingcap.tikv.exception.TiClientInternalException;
|
||||
import com.pingcap.tikv.meta.TiColumnInfo.InternalTypeHolder;
|
||||
import com.pingcap.tikv.types.DataType;
|
||||
import com.pingcap.tikv.types.DataTypeFactory;
|
||||
import java.io.Serializable;
|
||||
import java.util.List;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
@JsonIgnoreProperties(ignoreUnknown = true)
|
||||
public class TiTableInfo implements Serializable {
|
||||
private final long id;
|
||||
private final String name;
|
||||
private final String charset;
|
||||
private final String collate;
|
||||
private final List<TiColumnInfo> columns;
|
||||
private final List<TiIndexInfo> indices;
|
||||
private final boolean pkIsHandle;
|
||||
private final String comment;
|
||||
private final long autoIncId;
|
||||
private final long maxColumnId;
|
||||
private final long maxIndexId;
|
||||
private final long oldSchemaId;
|
||||
private final TiPartitionInfo partitionInfo;
|
||||
|
||||
@JsonCreator
|
||||
public TiTableInfo(
|
||||
@JsonProperty("id") long id,
|
||||
@JsonProperty("name") CIStr name,
|
||||
@JsonProperty("charset") String charset,
|
||||
@JsonProperty("collate") String collate,
|
||||
@JsonProperty("pk_is_handle") boolean pkIsHandle,
|
||||
@JsonProperty("cols") List<TiColumnInfo> columns,
|
||||
@JsonProperty("index_info") List<TiIndexInfo> indices,
|
||||
@JsonProperty("comment") String comment,
|
||||
@JsonProperty("auto_inc_id") long autoIncId,
|
||||
@JsonProperty("max_col_id") long maxColumnId,
|
||||
@JsonProperty("max_idx_id") long maxIndexId,
|
||||
@JsonProperty("old_schema_id") long oldSchemaId,
|
||||
@JsonProperty("partition") TiPartitionInfo partitionInfo) {
|
||||
this.id = id;
|
||||
this.name = name.getL();
|
||||
this.charset = charset;
|
||||
this.collate = collate;
|
||||
this.columns = ImmutableList.copyOf(requireNonNull(columns, "columns is null"));
|
||||
this.pkIsHandle = pkIsHandle;
|
||||
this.indices = indices != null ? ImmutableList.copyOf(indices) : ImmutableList.of();
|
||||
this.comment = comment;
|
||||
this.autoIncId = autoIncId;
|
||||
this.maxColumnId = maxColumnId;
|
||||
this.maxIndexId = maxIndexId;
|
||||
this.oldSchemaId = oldSchemaId;
|
||||
this.partitionInfo = partitionInfo;
|
||||
}
|
||||
|
||||
public long getId() {
|
||||
return id;
|
||||
}
|
||||
|
||||
public String getName() {
|
||||
return name;
|
||||
}
|
||||
|
||||
public String getCharset() {
|
||||
return charset;
|
||||
}
|
||||
|
||||
public String getCollate() {
|
||||
return collate;
|
||||
}
|
||||
|
||||
public List<TiColumnInfo> getColumns() {
|
||||
return columns;
|
||||
}
|
||||
|
||||
public TiColumnInfo getColumn(int offset) {
|
||||
if (offset < 0 || offset >= columns.size()) {
|
||||
throw new TiClientInternalException(String.format("Column offset %d out of bound", offset));
|
||||
}
|
||||
return columns.get(offset);
|
||||
}
|
||||
|
||||
public boolean isPkHandle() {
|
||||
return pkIsHandle;
|
||||
}
|
||||
|
||||
public List<TiIndexInfo> getIndices() {
|
||||
return indices;
|
||||
}
|
||||
|
||||
public String getComment() {
|
||||
return comment;
|
||||
}
|
||||
|
||||
public long getAutoIncId() {
|
||||
return autoIncId;
|
||||
}
|
||||
|
||||
public long getMaxColumnId() {
|
||||
return maxColumnId;
|
||||
}
|
||||
|
||||
public long getMaxIndexId() {
|
||||
return maxIndexId;
|
||||
}
|
||||
|
||||
public long getOldSchemaId() {
|
||||
return oldSchemaId;
|
||||
}
|
||||
|
||||
public TableInfo toProto() {
|
||||
return TableInfo.newBuilder()
|
||||
.setTableId(getId())
|
||||
.addAllColumns(
|
||||
getColumns().stream().map(col -> col.toProto(this)).collect(Collectors.toList()))
|
||||
.build();
|
||||
}
|
||||
|
||||
// Only Integer Column will be a PK column
|
||||
// and there exists only one PK column
|
||||
public TiColumnInfo getPrimaryKeyColumn() {
|
||||
if (isPkHandle()) {
|
||||
for (TiColumnInfo col : getColumns()) {
|
||||
if (col.isPrimaryKey()) {
|
||||
return col;
|
||||
}
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
public TiTableInfo copyTableWithRowId() {
|
||||
if (!isPkHandle()) {
|
||||
ImmutableList.Builder<TiColumnInfo> newColumns = ImmutableList.builder();
|
||||
for (TiColumnInfo col : getColumns()) {
|
||||
DataType type = col.getType();
|
||||
InternalTypeHolder typeHolder = type.toTypeHolder();
|
||||
typeHolder.setFlag(type.getFlag() & (~DataType.PriKeyFlag));
|
||||
DataType newType = DataTypeFactory.of(typeHolder);
|
||||
TiColumnInfo newCol =
|
||||
new TiColumnInfo(
|
||||
col.getId(),
|
||||
col.getName(),
|
||||
col.getOffset(),
|
||||
newType,
|
||||
col.getSchemaState(),
|
||||
col.getOriginDefaultValue(),
|
||||
col.getDefaultValue(),
|
||||
col.getComment());
|
||||
newColumns.add(newCol.copyWithoutPrimaryKey());
|
||||
}
|
||||
newColumns.add(TiColumnInfo.getRowIdColumn(getColumns().size()));
|
||||
return new TiTableInfo(
|
||||
getId(),
|
||||
CIStr.newCIStr(getName()),
|
||||
getCharset(),
|
||||
getCollate(),
|
||||
true,
|
||||
newColumns.build(),
|
||||
getIndices(),
|
||||
getComment(),
|
||||
getAutoIncId(),
|
||||
getMaxColumnId(),
|
||||
getMaxIndexId(),
|
||||
getOldSchemaId(),
|
||||
partitionInfo);
|
||||
} else {
|
||||
return this;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return toProto().toString();
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,43 @@
|
|||
/*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package com.pingcap.tikv.meta;
|
||||
|
||||
import java.io.Serializable;
|
||||
|
||||
/** TiTimestamp is the timestamp returned by timestamp oracle inside placement driver */
|
||||
public class TiTimestamp implements Serializable {
|
||||
private static final int PHYSICAL_SHIFT_BITS = 18;
|
||||
|
||||
private final long physical;
|
||||
private final long logical;
|
||||
|
||||
public TiTimestamp(long p, long l) {
|
||||
this.physical = p;
|
||||
this.logical = l;
|
||||
}
|
||||
|
||||
public long getVersion() {
|
||||
return (physical << PHYSICAL_SHIFT_BITS) + logical;
|
||||
}
|
||||
|
||||
public long getPhysical() {
|
||||
return this.physical;
|
||||
}
|
||||
|
||||
public long getLogical() {
|
||||
return this.logical;
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,40 @@
|
|||
/*
|
||||
*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*
|
||||
*/
|
||||
|
||||
package com.pingcap.tikv.operation;
|
||||
|
||||
import com.pingcap.tikv.util.BackOffer;
|
||||
|
||||
public interface ErrorHandler<RespT> {
|
||||
/**
|
||||
* Handle the error received in the response after a calling process completes.
|
||||
*
|
||||
* @param backOffer Back offer used for retry
|
||||
* @param resp the response to handle
|
||||
* @return whether the caller should retry
|
||||
*/
|
||||
boolean handleResponseError(BackOffer backOffer, RespT resp);
|
||||
|
||||
/**
|
||||
* Handle the error received during a calling process before it could return a normal response.
|
||||
*
|
||||
* @param backOffer Back offer used for retry
|
||||
* @param e Exception received during a calling process
|
||||
* @return whether the caller should retry
|
||||
*/
|
||||
boolean handleRequestError(BackOffer backOffer, Exception e);
|
||||
}
|
||||
|
|
@ -0,0 +1,255 @@
|
|||
/*
|
||||
*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*
|
||||
*/
|
||||
|
||||
package com.pingcap.tikv.operation;
|
||||
|
||||
import com.google.protobuf.ByteString;
|
||||
import com.pingcap.tikv.codec.KeyUtils;
|
||||
import com.pingcap.tikv.event.CacheInvalidateEvent;
|
||||
import com.pingcap.tikv.exception.GrpcException;
|
||||
import com.pingcap.tikv.kvproto.Errorpb;
|
||||
import com.pingcap.tikv.region.RegionErrorReceiver;
|
||||
import com.pingcap.tikv.region.RegionManager;
|
||||
import com.pingcap.tikv.region.TiRegion;
|
||||
import com.pingcap.tikv.util.BackOffFunction;
|
||||
import com.pingcap.tikv.util.BackOffer;
|
||||
import io.grpc.Status;
|
||||
import io.grpc.StatusRuntimeException;
|
||||
import java.util.function.Function;
|
||||
import org.apache.log4j.Logger;
|
||||
|
||||
// TODO: consider refactor to Builder mode
|
||||
public class KVErrorHandler<RespT> implements ErrorHandler<RespT> {
|
||||
private static final Logger logger = Logger.getLogger(KVErrorHandler.class);
|
||||
private static final int NO_LEADER_STORE_ID =
|
||||
0; // if there's currently no leader of a store, store id is set to 0
|
||||
private final Function<RespT, Errorpb.Error> getRegionError;
|
||||
private final Function<CacheInvalidateEvent, Void> cacheInvalidateCallBack;
|
||||
private final RegionManager regionManager;
|
||||
private final RegionErrorReceiver recv;
|
||||
private final TiRegion ctxRegion;
|
||||
|
||||
public KVErrorHandler(
|
||||
RegionManager regionManager,
|
||||
RegionErrorReceiver recv,
|
||||
TiRegion ctxRegion,
|
||||
Function<RespT, Errorpb.Error> getRegionError) {
|
||||
this.ctxRegion = ctxRegion;
|
||||
this.recv = recv;
|
||||
this.regionManager = regionManager;
|
||||
this.getRegionError = getRegionError;
|
||||
this.cacheInvalidateCallBack =
|
||||
regionManager != null && regionManager.getSession() != null
|
||||
? regionManager.getSession().getCacheInvalidateCallback()
|
||||
: null;
|
||||
}
|
||||
|
||||
private Errorpb.Error getRegionError(RespT resp) {
|
||||
if (getRegionError != null) {
|
||||
return getRegionError.apply(resp);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
private void invalidateRegionStoreCache(TiRegion ctxRegion) {
|
||||
regionManager.invalidateRegion(ctxRegion.getId());
|
||||
regionManager.invalidateStore(ctxRegion.getLeader().getStoreId());
|
||||
notifyRegionStoreCacheInvalidate(
|
||||
ctxRegion.getId(),
|
||||
ctxRegion.getLeader().getStoreId(),
|
||||
CacheInvalidateEvent.CacheType.REGION_STORE);
|
||||
}
|
||||
|
||||
/** Used for notifying Spark driver to invalidate cache from Spark workers. */
|
||||
private void notifyRegionStoreCacheInvalidate(
|
||||
long regionId, long storeId, CacheInvalidateEvent.CacheType type) {
|
||||
if (cacheInvalidateCallBack != null) {
|
||||
cacheInvalidateCallBack.apply(new CacheInvalidateEvent(regionId, storeId, true, true, type));
|
||||
logger.info(
|
||||
"Accumulating cache invalidation info to driver:regionId="
|
||||
+ regionId
|
||||
+ ",storeId="
|
||||
+ storeId
|
||||
+ ",type="
|
||||
+ type.name());
|
||||
} else {
|
||||
logger.warn(
|
||||
"Failed to send notification back to driver since CacheInvalidateCallBack is null in executor node.");
|
||||
}
|
||||
}
|
||||
|
||||
private void notifyRegionCacheInvalidate(long regionId) {
|
||||
if (cacheInvalidateCallBack != null) {
|
||||
cacheInvalidateCallBack.apply(
|
||||
new CacheInvalidateEvent(
|
||||
regionId, 0, true, false, CacheInvalidateEvent.CacheType.REGION_STORE));
|
||||
logger.info(
|
||||
"Accumulating cache invalidation info to driver:regionId="
|
||||
+ regionId
|
||||
+ ",type="
|
||||
+ CacheInvalidateEvent.CacheType.REGION_STORE.name());
|
||||
} else {
|
||||
logger.warn(
|
||||
"Failed to send notification back to driver since CacheInvalidateCallBack is null in executor node.");
|
||||
}
|
||||
}
|
||||
|
||||
private void notifyStoreCacheInvalidate(long storeId) {
|
||||
if (cacheInvalidateCallBack != null) {
|
||||
cacheInvalidateCallBack.apply(
|
||||
new CacheInvalidateEvent(
|
||||
0, storeId, false, true, CacheInvalidateEvent.CacheType.REGION_STORE));
|
||||
} else {
|
||||
logger.warn(
|
||||
"Failed to send notification back to driver since CacheInvalidateCallBack is null in executor node.");
|
||||
}
|
||||
}
|
||||
|
||||
// Referenced from TiDB
|
||||
// store/tikv/region_request.go - onRegionError
|
||||
@Override
|
||||
public boolean handleResponseError(BackOffer backOffer, RespT resp) {
|
||||
if (resp == null) {
|
||||
String msg =
|
||||
String.format("Request Failed with unknown reason for region region [%s]", ctxRegion);
|
||||
logger.warn(msg);
|
||||
return handleRequestError(backOffer, new GrpcException(msg));
|
||||
}
|
||||
|
||||
// Region error handling logic
|
||||
Errorpb.Error error = getRegionError(resp);
|
||||
if (error != null) {
|
||||
if (error.hasNotLeader()) {
|
||||
// this error is reported from raftstore:
|
||||
// peer of current request is not leader, the following might be its causes:
|
||||
// 1. cache is outdated, region has changed its leader, can be solved by re-fetching from PD
|
||||
// 2. leader of current region is missing, need to wait and then fetch region info from PD
|
||||
long newStoreId = error.getNotLeader().getLeader().getStoreId();
|
||||
boolean retry = true;
|
||||
|
||||
// update Leader here
|
||||
logger.warn(
|
||||
String.format(
|
||||
"NotLeader Error with region id %d and store id %d, new store id %d",
|
||||
ctxRegion.getId(), ctxRegion.getLeader().getStoreId(), newStoreId));
|
||||
|
||||
BackOffFunction.BackOffFuncType backOffFuncType;
|
||||
// if there's current no leader, we do not trigger update pd cache logic
|
||||
// since issuing store = NO_LEADER_STORE_ID requests to pd will definitely fail.
|
||||
if (newStoreId != NO_LEADER_STORE_ID) {
|
||||
if (!this.regionManager.updateLeader(ctxRegion.getId(), newStoreId)
|
||||
|| !recv.onNotLeader(this.regionManager.getStoreById(newStoreId))) {
|
||||
// If update leader fails, we need to fetch new region info from pd,
|
||||
// and re-split key range for new region. Setting retry to false will
|
||||
// stop retry and enter handleCopResponse logic, which would use RegionMiss
|
||||
// backOff strategy to wait, fetch new region and re-split key range.
|
||||
// onNotLeader is only needed when updateLeader succeeds, thus switch
|
||||
// to a new store address.
|
||||
retry = false;
|
||||
}
|
||||
notifyRegionStoreCacheInvalidate(
|
||||
ctxRegion.getId(), newStoreId, CacheInvalidateEvent.CacheType.LEADER);
|
||||
|
||||
backOffFuncType = BackOffFunction.BackOffFuncType.BoUpdateLeader;
|
||||
} else {
|
||||
logger.info(
|
||||
String.format(
|
||||
"Received zero store id, from region %d try next time", ctxRegion.getId()));
|
||||
backOffFuncType = BackOffFunction.BackOffFuncType.BoRegionMiss;
|
||||
}
|
||||
|
||||
backOffer.doBackOff(backOffFuncType, new GrpcException(error.toString()));
|
||||
|
||||
return retry;
|
||||
} else if (error.hasStoreNotMatch()) {
|
||||
// this error is reported from raftstore:
|
||||
// store_id requested at the moment is inconsistent with that expected
|
||||
// Solution:re-fetch from PD
|
||||
long storeId = ctxRegion.getLeader().getStoreId();
|
||||
logger.warn(
|
||||
String.format(
|
||||
"Store Not Match happened with region id %d, store id %d",
|
||||
ctxRegion.getId(), storeId));
|
||||
|
||||
this.regionManager.invalidateStore(storeId);
|
||||
recv.onStoreNotMatch(this.regionManager.getStoreById(storeId));
|
||||
notifyStoreCacheInvalidate(storeId);
|
||||
return true;
|
||||
} else if (error.hasStaleEpoch()) {
|
||||
// this error is reported from raftstore:
|
||||
// region has outdated version,please try later.
|
||||
logger.warn(String.format("Stale Epoch encountered for region [%s]", ctxRegion));
|
||||
this.regionManager.onRegionStale(ctxRegion.getId());
|
||||
notifyRegionCacheInvalidate(ctxRegion.getId());
|
||||
return false;
|
||||
} else if (error.hasServerIsBusy()) {
|
||||
// this error is reported from kv:
|
||||
// will occur when write pressure is high. Please try later.
|
||||
logger.warn(
|
||||
String.format(
|
||||
"Server is busy for region [%s], reason: %s",
|
||||
ctxRegion, error.getServerIsBusy().getReason()));
|
||||
backOffer.doBackOff(
|
||||
BackOffFunction.BackOffFuncType.BoServerBusy,
|
||||
new StatusRuntimeException(
|
||||
Status.fromCode(Status.Code.UNAVAILABLE).withDescription(error.toString())));
|
||||
return true;
|
||||
} else if (error.hasStaleCommand()) {
|
||||
// this error is reported from raftstore:
|
||||
// command outdated, please try later
|
||||
logger.warn(String.format("Stale command for region [%s]", ctxRegion));
|
||||
return true;
|
||||
} else if (error.hasRaftEntryTooLarge()) {
|
||||
logger.warn(String.format("Raft too large for region [%s]", ctxRegion));
|
||||
throw new StatusRuntimeException(
|
||||
Status.fromCode(Status.Code.UNAVAILABLE).withDescription(error.toString()));
|
||||
} else if (error.hasKeyNotInRegion()) {
|
||||
// this error is reported from raftstore:
|
||||
// key requested is not in current region
|
||||
// should not happen here.
|
||||
ByteString invalidKey = error.getKeyNotInRegion().getKey();
|
||||
logger.error(
|
||||
String.format(
|
||||
"Key not in region [%s] for key [%s], this error should not happen here.",
|
||||
ctxRegion, KeyUtils.formatBytes(invalidKey)));
|
||||
throw new StatusRuntimeException(Status.UNKNOWN.withDescription(error.toString()));
|
||||
}
|
||||
|
||||
logger.warn(String.format("Unknown error for region [%s]", ctxRegion));
|
||||
// For other errors, we only drop cache here.
|
||||
// Upper level may split this task.
|
||||
invalidateRegionStoreCache(ctxRegion);
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean handleRequestError(BackOffer backOffer, Exception e) {
|
||||
regionManager.onRequestFail(ctxRegion.getId(), ctxRegion.getLeader().getStoreId());
|
||||
notifyRegionStoreCacheInvalidate(
|
||||
ctxRegion.getId(),
|
||||
ctxRegion.getLeader().getStoreId(),
|
||||
CacheInvalidateEvent.CacheType.REQ_FAILED);
|
||||
|
||||
backOffer.doBackOff(
|
||||
BackOffFunction.BackOffFuncType.BoTiKVRPC,
|
||||
new GrpcException(
|
||||
"send tikv request error: " + e.getMessage() + ", try next peer later", e));
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,56 @@
|
|||
/*
|
||||
*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*
|
||||
*/
|
||||
|
||||
package com.pingcap.tikv.operation;
|
||||
|
||||
import com.pingcap.tikv.PDClient;
|
||||
import com.pingcap.tikv.exception.GrpcException;
|
||||
import com.pingcap.tikv.kvproto.Pdpb;
|
||||
import com.pingcap.tikv.util.BackOffFunction;
|
||||
import com.pingcap.tikv.util.BackOffer;
|
||||
import java.util.function.Function;
|
||||
|
||||
public class PDErrorHandler<RespT> implements ErrorHandler<RespT> {
|
||||
private final Function<RespT, Pdpb.Error> getError;
|
||||
private final PDClient client;
|
||||
|
||||
public PDErrorHandler(Function<RespT, Pdpb.Error> errorExtractor, PDClient client) {
|
||||
this.getError = errorExtractor;
|
||||
this.client = client;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean handleResponseError(BackOffer backOffer, RespT resp) {
|
||||
if (resp == null) {
|
||||
return false;
|
||||
}
|
||||
Pdpb.Error error = getError.apply(resp);
|
||||
if (error != null) {
|
||||
client.updateLeader();
|
||||
backOffer.doBackOff(
|
||||
BackOffFunction.BackOffFuncType.BoPDRPC, new GrpcException(error.toString()));
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean handleRequestError(BackOffer backOffer, Exception e) {
|
||||
backOffer.doBackOff(BackOffFunction.BackOffFuncType.BoPDRPC, e);
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,123 @@
|
|||
/*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package com.pingcap.tikv.operation;
|
||||
|
||||
import com.pingcap.tikv.expression.ByItem;
|
||||
import com.pingcap.tikv.expression.Expression;
|
||||
import com.pingcap.tikv.meta.TiDAGRequest;
|
||||
import com.pingcap.tikv.operation.transformer.Cast;
|
||||
import com.pingcap.tikv.operation.transformer.NoOp;
|
||||
import com.pingcap.tikv.operation.transformer.RowTransformer;
|
||||
import com.pingcap.tikv.types.DataType;
|
||||
import com.pingcap.tikv.types.IntegerType;
|
||||
import com.pingcap.tikv.util.Pair;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* SchemaInfer extract row's type after query is executed. It is pretty rough version. Optimization
|
||||
* is on the way. The problem we have right now is that TiDB promote Sum to Decimal which is not
|
||||
* compatible with column's type. The solution we come up with right now is use record column's type
|
||||
* ad finalFieldType and build another list recording TiExpr's type as fieldType for row reading.
|
||||
* Once we finish row reading, we first check each element in fieldType and finalFieldType share the
|
||||
* same type or not. If yes, no need for casting. If no, casting is needed here.
|
||||
*/
|
||||
public class SchemaInfer {
|
||||
private List<DataType> types;
|
||||
private RowTransformer rt;
|
||||
|
||||
public static SchemaInfer create(TiDAGRequest dagRequest) {
|
||||
return new SchemaInfer(dagRequest);
|
||||
}
|
||||
|
||||
protected SchemaInfer(TiDAGRequest dagRequest) {
|
||||
types = new ArrayList<>();
|
||||
extractFieldTypes(dagRequest);
|
||||
extractHandleType(dagRequest);
|
||||
buildTransform(dagRequest);
|
||||
}
|
||||
|
||||
private void extractHandleType(TiDAGRequest dagRequest) {
|
||||
if (dagRequest.isHandleNeeded()) {
|
||||
// DataType of handle is long
|
||||
types.add(IntegerType.INT);
|
||||
}
|
||||
}
|
||||
|
||||
private void buildTransform(TiDAGRequest dagRequest) {
|
||||
RowTransformer.Builder rowTrans = RowTransformer.newBuilder();
|
||||
// Update:
|
||||
// Switching to DAG mode will eliminate first blob
|
||||
// TODO:check correctness of ↑
|
||||
// 1. if group by is empty, first column should be "single group"
|
||||
// which is a string
|
||||
// 2. if multiple group by items present, it is wrapped inside
|
||||
// a byte array. we make a multiple decoding
|
||||
// 3. for no aggregation case, make only projected columns
|
||||
|
||||
// append aggregates if present
|
||||
if (dagRequest.hasAggregate()) {
|
||||
for (Pair<Expression, DataType> pair : dagRequest.getAggregatePairs()) {
|
||||
rowTrans.addProjection(new Cast(pair.second));
|
||||
}
|
||||
if (dagRequest.hasGroupBy()) {
|
||||
for (ByItem byItem : dagRequest.getGroupByItems()) {
|
||||
rowTrans.addProjection(new NoOp(dagRequest.getExpressionType(byItem.getExpr())));
|
||||
}
|
||||
}
|
||||
} else {
|
||||
for (Expression field : dagRequest.getFields()) {
|
||||
rowTrans.addProjection(new NoOp(dagRequest.getExpressionType(field)));
|
||||
}
|
||||
}
|
||||
rowTrans.addSourceFieldTypes(types);
|
||||
rt = rowTrans.build();
|
||||
}
|
||||
|
||||
/**
|
||||
* TODO: order by extract field types from tiSelectRequest for reading data to row.
|
||||
*
|
||||
* @param dagRequest is SelectRequest
|
||||
*/
|
||||
private void extractFieldTypes(TiDAGRequest dagRequest) {
|
||||
if (dagRequest.hasAggregate()) {
|
||||
dagRequest.getAggregates().forEach(expr -> types.add(dagRequest.getExpressionType(expr)));
|
||||
// In DAG mode, if there is any group by statement in a request, all the columns specified
|
||||
// in group by expression will be returned, so when we decode a result row, we need to pay
|
||||
// extra attention to decoding.
|
||||
if (dagRequest.hasGroupBy()) {
|
||||
for (ByItem item : dagRequest.getGroupByItems()) {
|
||||
types.add(dagRequest.getExpressionType(item.getExpr()));
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// Extract all column type information from TiExpr
|
||||
dagRequest.getFields().forEach(expr -> types.add(expr.getType()));
|
||||
}
|
||||
}
|
||||
|
||||
public DataType getType(int index) {
|
||||
return types.get(index);
|
||||
}
|
||||
|
||||
public List<DataType> getTypes() {
|
||||
return types;
|
||||
}
|
||||
|
||||
public RowTransformer getRowTransformer() {
|
||||
return this.rt;
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,105 @@
|
|||
/*
|
||||
*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*
|
||||
*/
|
||||
|
||||
package com.pingcap.tikv.operation.iterator;
|
||||
|
||||
import com.google.protobuf.ByteString;
|
||||
import com.pingcap.tidb.tipb.Chunk;
|
||||
import com.pingcap.tikv.exception.TiClientInternalException;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
|
||||
public abstract class ChunkIterator<T> implements Iterator<T> {
|
||||
|
||||
private final List<Chunk> chunks;
|
||||
protected int chunkIndex;
|
||||
protected int metaIndex;
|
||||
protected int bufOffset;
|
||||
protected boolean eof;
|
||||
|
||||
public static ChunkIterator<ByteString> getRawBytesChunkIterator(List<Chunk> chunks) {
|
||||
return new ChunkIterator<ByteString>(chunks) {
|
||||
@Override
|
||||
public ByteString next() {
|
||||
Chunk c = chunks.get(chunkIndex);
|
||||
long endOffset = c.getRowsMeta(metaIndex).getLength() + bufOffset;
|
||||
if (endOffset > Integer.MAX_VALUE) {
|
||||
throw new TiClientInternalException("Offset exceeded MAX_INT.");
|
||||
}
|
||||
|
||||
ByteString result = c.getRowsData().substring(bufOffset, (int) endOffset);
|
||||
advance();
|
||||
return result;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
public static ChunkIterator<Long> getHandleChunkIterator(List<Chunk> chunks) {
|
||||
return new ChunkIterator<Long>(chunks) {
|
||||
@Override
|
||||
public Long next() {
|
||||
Chunk c = chunks.get(chunkIndex);
|
||||
long result = c.getRowsMeta(metaIndex).getHandle();
|
||||
advance();
|
||||
return result;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
protected ChunkIterator(List<Chunk> chunks) {
|
||||
// Read and then advance semantics
|
||||
this.chunks = chunks;
|
||||
this.chunkIndex = 0;
|
||||
this.metaIndex = 0;
|
||||
this.bufOffset = 0;
|
||||
if (chunks.size() == 0
|
||||
|| chunks.get(0).getRowsMetaCount() == 0
|
||||
|| chunks.get(0).getRowsData().size() == 0) {
|
||||
eof = true;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasNext() {
|
||||
return !eof;
|
||||
}
|
||||
|
||||
private boolean seekNextNonEmptyChunk() {
|
||||
// loop until the end of chunk list or first non empty chunk
|
||||
do {
|
||||
chunkIndex += 1;
|
||||
} while (chunkIndex < chunks.size() && chunks.get(chunkIndex).getRowsMetaCount() == 0);
|
||||
// return if remaining things left
|
||||
return chunkIndex < chunks.size();
|
||||
}
|
||||
|
||||
protected void advance() {
|
||||
if (eof) {
|
||||
return;
|
||||
}
|
||||
Chunk c = chunks.get(chunkIndex);
|
||||
bufOffset += c.getRowsMeta(metaIndex++).getLength();
|
||||
if (metaIndex >= c.getRowsMetaCount()) {
|
||||
if (seekNextNonEmptyChunk()) {
|
||||
metaIndex = 0;
|
||||
bufOffset = 0;
|
||||
} else {
|
||||
eof = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,46 @@
|
|||
/*
|
||||
* Copyright 2018 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package com.pingcap.tikv.operation.iterator;
|
||||
|
||||
import com.google.protobuf.ByteString;
|
||||
import com.pingcap.tikv.TiSession;
|
||||
import com.pingcap.tikv.kvproto.Metapb;
|
||||
import com.pingcap.tikv.region.RegionStoreClient;
|
||||
import com.pingcap.tikv.region.TiRegion;
|
||||
import com.pingcap.tikv.util.BackOffer;
|
||||
import com.pingcap.tikv.util.ConcreteBackOffer;
|
||||
import com.pingcap.tikv.util.Pair;
|
||||
|
||||
public class ConcreteScanIterator extends ScanIterator {
|
||||
private final long version;
|
||||
|
||||
public ConcreteScanIterator(ByteString startKey, TiSession session, long version) {
|
||||
// Passing endKey as ByteString.EMPTY means that endKey is +INF by default,
|
||||
super(startKey, ByteString.EMPTY, Integer.MAX_VALUE, session);
|
||||
this.version = version;
|
||||
}
|
||||
|
||||
TiRegion loadCurrentRegionToCache() throws Exception {
|
||||
Pair<TiRegion, Metapb.Store> pair = regionCache.getRegionStorePairByKey(startKey);
|
||||
TiRegion region = pair.first;
|
||||
Metapb.Store store = pair.second;
|
||||
try (RegionStoreClient client = RegionStoreClient.create(region, store, session)) {
|
||||
BackOffer backOffer = ConcreteBackOffer.newScannerNextMaxBackOff();
|
||||
currentCache = client.scan(backOffer, startKey, version);
|
||||
return region;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,111 @@
|
|||
/*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package com.pingcap.tikv.operation.iterator;
|
||||
|
||||
import static java.util.Objects.requireNonNull;
|
||||
|
||||
import com.pingcap.tidb.tipb.Chunk;
|
||||
import com.pingcap.tidb.tipb.DAGRequest;
|
||||
import com.pingcap.tikv.TiSession;
|
||||
import com.pingcap.tikv.codec.CodecDataInput;
|
||||
import com.pingcap.tikv.meta.TiDAGRequest;
|
||||
import com.pingcap.tikv.operation.SchemaInfer;
|
||||
import com.pingcap.tikv.row.Row;
|
||||
import com.pingcap.tikv.row.RowReader;
|
||||
import com.pingcap.tikv.row.RowReaderFactory;
|
||||
import com.pingcap.tikv.types.DataType;
|
||||
import com.pingcap.tikv.types.IntegerType;
|
||||
import com.pingcap.tikv.util.RangeSplitter.RegionTask;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.NoSuchElementException;
|
||||
|
||||
public abstract class CoprocessIterator<T> implements Iterator<T> {
|
||||
protected final TiSession session;
|
||||
protected final List<RegionTask> regionTasks;
|
||||
protected DAGRequest dagRequest;
|
||||
protected static final DataType[] handleTypes = new DataType[] {IntegerType.INT};
|
||||
// protected final ExecutorCompletionService<Iterator<SelectResponse>> completionService;
|
||||
protected RowReader rowReader;
|
||||
protected CodecDataInput dataInput;
|
||||
protected boolean eof = false;
|
||||
protected int taskIndex;
|
||||
protected int chunkIndex;
|
||||
protected List<Chunk> chunkList;
|
||||
protected SchemaInfer schemaInfer;
|
||||
|
||||
CoprocessIterator(
|
||||
DAGRequest req, List<RegionTask> regionTasks, TiSession session, SchemaInfer infer) {
|
||||
this.dagRequest = req;
|
||||
this.session = session;
|
||||
this.regionTasks = regionTasks;
|
||||
this.schemaInfer = infer;
|
||||
}
|
||||
|
||||
abstract void submitTasks();
|
||||
|
||||
public static CoprocessIterator<Row> getRowIterator(
|
||||
TiDAGRequest req, List<RegionTask> regionTasks, TiSession session) {
|
||||
return new DAGIterator<Row>(
|
||||
req.buildScan(req.isIndexScan() && !req.isDoubleRead()),
|
||||
regionTasks,
|
||||
session,
|
||||
SchemaInfer.create(req),
|
||||
req.getPushDownType()) {
|
||||
@Override
|
||||
public Row next() {
|
||||
if (hasNext()) {
|
||||
return rowReader.readRow(schemaInfer.getTypes().toArray(new DataType[0]));
|
||||
} else {
|
||||
throw new NoSuchElementException();
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
public static CoprocessIterator<Long> getHandleIterator(
|
||||
TiDAGRequest req, List<RegionTask> regionTasks, TiSession session) {
|
||||
return new DAGIterator<Long>(
|
||||
req.buildScan(true), regionTasks, session, SchemaInfer.create(req), req.getPushDownType()) {
|
||||
@Override
|
||||
public Long next() {
|
||||
if (hasNext()) {
|
||||
return rowReader.readRow(handleTypes).getLong(0);
|
||||
} else {
|
||||
throw new NoSuchElementException();
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
boolean tryAdvanceChunkIndex() {
|
||||
if (chunkList == null || chunkIndex >= chunkList.size() - 1) {
|
||||
return false;
|
||||
}
|
||||
|
||||
chunkIndex++;
|
||||
return true;
|
||||
}
|
||||
|
||||
void createDataInputReader() {
|
||||
requireNonNull(chunkList, "Chunk list should not be null.");
|
||||
if (0 > chunkIndex || chunkIndex >= chunkList.size()) {
|
||||
throw new IllegalArgumentException();
|
||||
}
|
||||
dataInput = new CodecDataInput(chunkList.get(chunkIndex).getRowsData());
|
||||
rowReader = RowReaderFactory.createRowReader(dataInput);
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,220 @@
|
|||
package com.pingcap.tikv.operation.iterator;
|
||||
|
||||
import static com.pingcap.tikv.meta.TiDAGRequest.PushDownType.STREAMING;
|
||||
|
||||
import com.pingcap.tidb.tipb.Chunk;
|
||||
import com.pingcap.tidb.tipb.DAGRequest;
|
||||
import com.pingcap.tidb.tipb.SelectResponse;
|
||||
import com.pingcap.tikv.TiSession;
|
||||
import com.pingcap.tikv.exception.RegionTaskException;
|
||||
import com.pingcap.tikv.exception.TiClientInternalException;
|
||||
import com.pingcap.tikv.kvproto.Coprocessor;
|
||||
import com.pingcap.tikv.kvproto.Metapb;
|
||||
import com.pingcap.tikv.meta.TiDAGRequest.PushDownType;
|
||||
import com.pingcap.tikv.operation.SchemaInfer;
|
||||
import com.pingcap.tikv.region.RegionStoreClient;
|
||||
import com.pingcap.tikv.region.TiRegion;
|
||||
import com.pingcap.tikv.util.BackOffer;
|
||||
import com.pingcap.tikv.util.ConcreteBackOffer;
|
||||
import com.pingcap.tikv.util.RangeSplitter;
|
||||
import java.util.ArrayDeque;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Queue;
|
||||
import java.util.concurrent.ExecutorCompletionService;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
public abstract class DAGIterator<T> extends CoprocessIterator<T> {
|
||||
private ExecutorCompletionService<Iterator<SelectResponse>> streamingService;
|
||||
private ExecutorCompletionService<SelectResponse> dagService;
|
||||
private SelectResponse response;
|
||||
private static final Logger logger = LoggerFactory.getLogger(DAGIterator.class.getName());
|
||||
|
||||
private Iterator<SelectResponse> responseIterator;
|
||||
|
||||
private final PushDownType pushDownType;
|
||||
|
||||
DAGIterator(
|
||||
DAGRequest req,
|
||||
List<RangeSplitter.RegionTask> regionTasks,
|
||||
TiSession session,
|
||||
SchemaInfer infer,
|
||||
PushDownType pushDownType) {
|
||||
super(req, regionTasks, session, infer);
|
||||
this.pushDownType = pushDownType;
|
||||
switch (pushDownType) {
|
||||
case NORMAL:
|
||||
dagService = new ExecutorCompletionService<>(session.getThreadPoolForTableScan());
|
||||
break;
|
||||
case STREAMING:
|
||||
streamingService = new ExecutorCompletionService<>(session.getThreadPoolForTableScan());
|
||||
break;
|
||||
}
|
||||
submitTasks();
|
||||
}
|
||||
|
||||
@Override
|
||||
void submitTasks() {
|
||||
for (RangeSplitter.RegionTask task : regionTasks) {
|
||||
switch (pushDownType) {
|
||||
case STREAMING:
|
||||
streamingService.submit(() -> processByStreaming(task));
|
||||
break;
|
||||
case NORMAL:
|
||||
dagService.submit(() -> process(task));
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasNext() {
|
||||
if (eof) {
|
||||
return false;
|
||||
}
|
||||
|
||||
while (chunkList == null || chunkIndex >= chunkList.size() || dataInput.available() <= 0) {
|
||||
// First we check if our chunk list has remaining chunk
|
||||
if (tryAdvanceChunkIndex()) {
|
||||
createDataInputReader();
|
||||
}
|
||||
// If not, check next region/response
|
||||
else if (pushDownType == STREAMING) {
|
||||
if (!advanceNextResponse() && !readNextRegionChunks()) {
|
||||
return false;
|
||||
}
|
||||
} else if (!readNextRegionChunks()) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
private boolean hasMoreResponse() {
|
||||
switch (pushDownType) {
|
||||
case STREAMING:
|
||||
return responseIterator != null && responseIterator.hasNext();
|
||||
case NORMAL:
|
||||
return response != null;
|
||||
}
|
||||
|
||||
throw new IllegalArgumentException("Invalid push down type:" + pushDownType);
|
||||
}
|
||||
|
||||
private boolean advanceNextResponse() {
|
||||
if (!hasMoreResponse()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
switch (pushDownType) {
|
||||
case STREAMING:
|
||||
chunkList = responseIterator.next().getChunksList();
|
||||
break;
|
||||
case NORMAL:
|
||||
chunkList = response.getChunksList();
|
||||
break;
|
||||
}
|
||||
|
||||
if (chunkList == null || chunkList.isEmpty()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
chunkIndex = 0;
|
||||
createDataInputReader();
|
||||
return true;
|
||||
}
|
||||
|
||||
private boolean readNextRegionChunks() {
|
||||
if (eof || regionTasks == null || taskIndex >= regionTasks.size()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
try {
|
||||
switch (pushDownType) {
|
||||
case STREAMING:
|
||||
responseIterator = streamingService.take().get();
|
||||
break;
|
||||
case NORMAL:
|
||||
response = dagService.take().get();
|
||||
break;
|
||||
}
|
||||
|
||||
} catch (Exception e) {
|
||||
throw new TiClientInternalException("Error reading region:", e);
|
||||
}
|
||||
|
||||
taskIndex++;
|
||||
return advanceNextResponse();
|
||||
}
|
||||
|
||||
private SelectResponse process(RangeSplitter.RegionTask regionTask) {
|
||||
Queue<RangeSplitter.RegionTask> remainTasks = new ArrayDeque<>();
|
||||
Queue<SelectResponse> responseQueue = new ArrayDeque<>();
|
||||
remainTasks.add(regionTask);
|
||||
BackOffer backOffer = ConcreteBackOffer.newCopNextMaxBackOff();
|
||||
// In case of one region task spilt into several others, we ues a queue to properly handle all
|
||||
// the remaining tasks.
|
||||
while (!remainTasks.isEmpty()) {
|
||||
RangeSplitter.RegionTask task = remainTasks.poll();
|
||||
if (task == null) continue;
|
||||
List<Coprocessor.KeyRange> ranges = task.getRanges();
|
||||
TiRegion region = task.getRegion();
|
||||
Metapb.Store store = task.getStore();
|
||||
|
||||
try {
|
||||
RegionStoreClient client = RegionStoreClient.create(region, store, session);
|
||||
Collection<RangeSplitter.RegionTask> tasks =
|
||||
client.coprocess(backOffer, dagRequest, ranges, responseQueue);
|
||||
if (tasks != null) {
|
||||
remainTasks.addAll(tasks);
|
||||
}
|
||||
} catch (Throwable e) {
|
||||
// Handle region task failed
|
||||
logger.error(
|
||||
"Process region tasks failed, remain "
|
||||
+ remainTasks.size()
|
||||
+ " tasks not executed due to",
|
||||
e);
|
||||
// Rethrow to upper levels
|
||||
eof = true;
|
||||
throw new RegionTaskException("Handle region task failed:", e);
|
||||
}
|
||||
}
|
||||
|
||||
// Add all chunks to the final result
|
||||
List<Chunk> resultChunk = new ArrayList<>();
|
||||
while (!responseQueue.isEmpty()) {
|
||||
SelectResponse response = responseQueue.poll();
|
||||
if (response != null) {
|
||||
resultChunk.addAll(response.getChunksList());
|
||||
}
|
||||
}
|
||||
|
||||
return SelectResponse.newBuilder().addAllChunks(resultChunk).build();
|
||||
}
|
||||
|
||||
private Iterator<SelectResponse> processByStreaming(RangeSplitter.RegionTask regionTask) {
|
||||
List<Coprocessor.KeyRange> ranges = regionTask.getRanges();
|
||||
TiRegion region = regionTask.getRegion();
|
||||
Metapb.Store store = regionTask.getStore();
|
||||
|
||||
RegionStoreClient client;
|
||||
try {
|
||||
client = RegionStoreClient.create(region, store, session);
|
||||
Iterator<SelectResponse> responseIterator = client.coprocessStreaming(dagRequest, ranges);
|
||||
if (responseIterator == null) {
|
||||
eof = true;
|
||||
return null;
|
||||
}
|
||||
return responseIterator;
|
||||
} catch (Exception e) {
|
||||
// TODO: Fix stale error handling in streaming
|
||||
// see:https://github.com/pingcap/tikv-client-lib-java/pull/149
|
||||
throw new TiClientInternalException("Error Closing Store client.", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,105 @@
|
|||
/*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package com.pingcap.tikv.operation.iterator;
|
||||
|
||||
import com.pingcap.tikv.Snapshot;
|
||||
import com.pingcap.tikv.TiConfiguration;
|
||||
import com.pingcap.tikv.TiSession;
|
||||
import com.pingcap.tikv.exception.TiClientInternalException;
|
||||
import com.pingcap.tikv.meta.TiDAGRequest;
|
||||
import com.pingcap.tikv.row.Row;
|
||||
import com.pingcap.tikv.util.RangeSplitter;
|
||||
import com.pingcap.tikv.util.RangeSplitter.RegionTask;
|
||||
import gnu.trove.list.array.TLongArrayList;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.NoSuchElementException;
|
||||
import java.util.concurrent.ExecutorCompletionService;
|
||||
|
||||
public class IndexScanIterator implements Iterator<Row> {
|
||||
private final Iterator<Long> handleIterator;
|
||||
private final TiDAGRequest dagReq;
|
||||
private final Snapshot snapshot;
|
||||
private Iterator<Row> rowIterator;
|
||||
private final ExecutorCompletionService<Iterator<Row>> completionService;
|
||||
|
||||
private int batchCount = 0;
|
||||
private final int batchSize;
|
||||
|
||||
public IndexScanIterator(Snapshot snapshot, TiDAGRequest req, Iterator<Long> handleIterator) {
|
||||
TiSession session = snapshot.getSession();
|
||||
TiConfiguration conf = session.getConf();
|
||||
this.dagReq = req;
|
||||
this.handleIterator = handleIterator;
|
||||
this.snapshot = snapshot;
|
||||
this.batchSize = conf.getIndexScanBatchSize();
|
||||
this.completionService = new ExecutorCompletionService<>(session.getThreadPoolForIndexScan());
|
||||
}
|
||||
|
||||
private TLongArrayList feedBatch() {
|
||||
TLongArrayList handles = new TLongArrayList(512);
|
||||
while (handleIterator.hasNext()) {
|
||||
handles.add(handleIterator.next());
|
||||
if (batchSize <= handles.size()) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
return handles;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasNext() {
|
||||
try {
|
||||
if (rowIterator == null) {
|
||||
TiSession session = snapshot.getSession();
|
||||
while (handleIterator.hasNext()) {
|
||||
TLongArrayList handles = feedBatch();
|
||||
batchCount++;
|
||||
completionService.submit(
|
||||
() -> {
|
||||
List<RegionTask> tasks =
|
||||
RangeSplitter.newSplitter(session.getRegionManager())
|
||||
.splitAndSortHandlesByRegion(dagReq.getTableInfo().getId(), handles);
|
||||
return CoprocessIterator.getRowIterator(dagReq, tasks, session);
|
||||
});
|
||||
}
|
||||
while (batchCount > 0) {
|
||||
rowIterator = completionService.take().get();
|
||||
batchCount--;
|
||||
|
||||
if (rowIterator.hasNext()) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (rowIterator == null) {
|
||||
return false;
|
||||
}
|
||||
} catch (Exception e) {
|
||||
throw new TiClientInternalException("Error reading rows from handle", e);
|
||||
}
|
||||
return rowIterator.hasNext();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Row next() {
|
||||
if (hasNext()) {
|
||||
return rowIterator.next();
|
||||
} else {
|
||||
throw new NoSuchElementException();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,64 @@
|
|||
/*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package com.pingcap.tikv.operation.iterator;
|
||||
|
||||
import com.google.protobuf.ByteString;
|
||||
import com.pingcap.tikv.TiSession;
|
||||
import com.pingcap.tikv.key.Key;
|
||||
import com.pingcap.tikv.kvproto.Metapb;
|
||||
import com.pingcap.tikv.region.RegionStoreClient;
|
||||
import com.pingcap.tikv.region.TiRegion;
|
||||
import com.pingcap.tikv.util.BackOffer;
|
||||
import com.pingcap.tikv.util.ConcreteBackOffer;
|
||||
import com.pingcap.tikv.util.Pair;
|
||||
|
||||
public class RawScanIterator extends ScanIterator {
|
||||
|
||||
public RawScanIterator(ByteString startKey, ByteString endKey, int limit, TiSession session) {
|
||||
super(startKey, endKey, limit, session);
|
||||
}
|
||||
|
||||
TiRegion loadCurrentRegionToCache() throws Exception {
|
||||
Pair<TiRegion, Metapb.Store> pair = regionCache.getRegionStorePairByRawKey(startKey);
|
||||
TiRegion region = pair.first;
|
||||
Metapb.Store store = pair.second;
|
||||
try (RegionStoreClient client = RegionStoreClient.create(region, store, session)) {
|
||||
BackOffer backOffer = ConcreteBackOffer.newScannerNextMaxBackOff();
|
||||
if (limit <= 0) {
|
||||
currentCache = null;
|
||||
} else {
|
||||
currentCache = client.rawScan(backOffer, startKey, limit);
|
||||
}
|
||||
return region;
|
||||
}
|
||||
}
|
||||
|
||||
private boolean notEndOfScan() {
|
||||
return limit > 0
|
||||
&& !(lastBatch
|
||||
&& (index >= currentCache.size()
|
||||
|| Key.toRawKey(currentCache.get(index).getKey()).compareTo(endKey) >= 0));
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasNext() {
|
||||
if (isCacheDrained() && cacheLoadFails()) {
|
||||
endOfScan = true;
|
||||
return false;
|
||||
}
|
||||
return notEndOfScan();
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,137 @@
|
|||
/*
|
||||
* Copyright 2018 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package com.pingcap.tikv.operation.iterator;
|
||||
|
||||
import static java.util.Objects.requireNonNull;
|
||||
|
||||
import com.google.protobuf.ByteString;
|
||||
import com.pingcap.tikv.TiSession;
|
||||
import com.pingcap.tikv.exception.TiClientInternalException;
|
||||
import com.pingcap.tikv.key.Key;
|
||||
import com.pingcap.tikv.kvproto.Kvrpcpb;
|
||||
import com.pingcap.tikv.region.RegionManager;
|
||||
import com.pingcap.tikv.region.TiRegion;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
|
||||
public abstract class ScanIterator implements Iterator<Kvrpcpb.KvPair> {
|
||||
protected final TiSession session;
|
||||
protected final RegionManager regionCache;
|
||||
|
||||
protected List<Kvrpcpb.KvPair> currentCache;
|
||||
protected ByteString startKey;
|
||||
protected int index = -1;
|
||||
protected int limit;
|
||||
protected boolean endOfScan = false;
|
||||
|
||||
protected Key endKey;
|
||||
protected boolean hasEndKey;
|
||||
protected boolean lastBatch = false;
|
||||
|
||||
ScanIterator(ByteString startKey, ByteString endKey, int limit, TiSession session) {
|
||||
this.startKey = requireNonNull(startKey, "start key is null");
|
||||
if (startKey.isEmpty()) {
|
||||
throw new IllegalArgumentException("start key cannot be empty");
|
||||
}
|
||||
this.endKey = Key.toRawKey(requireNonNull(endKey, "end key is null"));
|
||||
this.hasEndKey = !endKey.equals(ByteString.EMPTY);
|
||||
this.limit = limit;
|
||||
this.session = session;
|
||||
this.regionCache = session.getRegionManager();
|
||||
}
|
||||
|
||||
abstract TiRegion loadCurrentRegionToCache() throws Exception;
|
||||
|
||||
// return true if current cache is not loaded or empty
|
||||
boolean cacheLoadFails() {
|
||||
if (endOfScan || lastBatch) {
|
||||
return true;
|
||||
}
|
||||
if (startKey.isEmpty()) {
|
||||
return true;
|
||||
}
|
||||
try {
|
||||
TiRegion region = loadCurrentRegionToCache();
|
||||
ByteString curRegionEndKey = region.getEndKey();
|
||||
// currentCache is null means no keys found, whereas currentCache is empty means no values
|
||||
// found
|
||||
// the difference lies in whether to continue scanning, because chances are that the same key
|
||||
// is
|
||||
// split in another region because of pending entries, region split, e.t.c.
|
||||
// See https://github.com/pingcap/tispark/issues/393 for details
|
||||
if (currentCache == null) {
|
||||
return true;
|
||||
}
|
||||
index = 0;
|
||||
Key lastKey = Key.EMPTY;
|
||||
// Session should be single-threaded itself
|
||||
// so that we don't worry about conf change in the middle
|
||||
// of a transaction. Otherwise below code might lose data
|
||||
if (currentCache.size() < session.getConf().getScanBatchSize()) {
|
||||
startKey = curRegionEndKey;
|
||||
} else {
|
||||
// Start new scan from exact next key in current region
|
||||
lastKey = Key.toRawKey(currentCache.get(currentCache.size() - 1).getKey());
|
||||
startKey = lastKey.next().toByteString();
|
||||
}
|
||||
// notify last batch if lastKey is greater than or equal to endKey
|
||||
if (hasEndKey && lastKey.compareTo(endKey) >= 0) {
|
||||
lastBatch = true;
|
||||
startKey = null;
|
||||
}
|
||||
} catch (Exception e) {
|
||||
throw new TiClientInternalException("Error scanning data from region.", e);
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
boolean isCacheDrained() {
|
||||
return currentCache == null || limit <= 0 || index >= currentCache.size() || index == -1;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasNext() {
|
||||
if (isCacheDrained() && cacheLoadFails()) {
|
||||
endOfScan = true;
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
private Kvrpcpb.KvPair getCurrent() {
|
||||
if (isCacheDrained()) {
|
||||
return null;
|
||||
}
|
||||
if (index < currentCache.size()) {
|
||||
--limit;
|
||||
return currentCache.get(index++);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Kvrpcpb.KvPair next() {
|
||||
Kvrpcpb.KvPair kv = getCurrent();
|
||||
if (kv == null) {
|
||||
// cache drained
|
||||
if (cacheLoadFails()) {
|
||||
return null;
|
||||
}
|
||||
return getCurrent();
|
||||
}
|
||||
return kv;
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,97 @@
|
|||
/*
|
||||
*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*
|
||||
*/
|
||||
|
||||
package com.pingcap.tikv.operation.transformer;
|
||||
|
||||
import com.pingcap.tikv.row.Row;
|
||||
import com.pingcap.tikv.types.*;
|
||||
import java.math.BigDecimal;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
|
||||
public class Cast extends NoOp {
|
||||
public Cast(DataType type) {
|
||||
super(type);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void set(Object value, Row row, int pos) {
|
||||
Object casted;
|
||||
if (value == null) {
|
||||
row.set(pos, targetDataType, null);
|
||||
return;
|
||||
}
|
||||
if (targetDataType instanceof IntegerType) {
|
||||
casted = castToLong(value);
|
||||
} else if (targetDataType instanceof StringType) {
|
||||
casted = castToString(value);
|
||||
} else if (targetDataType instanceof BytesType) {
|
||||
casted = castToBinary(value);
|
||||
} else if (targetDataType instanceof DecimalType) {
|
||||
casted = castToDecimal(value);
|
||||
} else if (targetDataType instanceof RealType) {
|
||||
casted = castToDouble(value);
|
||||
} else {
|
||||
casted = value;
|
||||
}
|
||||
row.set(pos, targetDataType, casted);
|
||||
}
|
||||
|
||||
private Double castToDouble(Object obj) {
|
||||
if (obj instanceof Number) {
|
||||
Number num = (Number) obj;
|
||||
return num.doubleValue();
|
||||
}
|
||||
throw new UnsupportedOperationException("can not cast un-number to double ");
|
||||
}
|
||||
|
||||
private BigDecimal castToDecimal(Object obj) {
|
||||
if (obj instanceof Number) {
|
||||
Number num = (Number) obj;
|
||||
return new BigDecimal(num.doubleValue());
|
||||
}
|
||||
throw new UnsupportedOperationException(
|
||||
"Cannot cast to BigDecimal: " + (obj == null ? "null" : obj.getClass().getSimpleName()));
|
||||
}
|
||||
|
||||
private Long castToLong(Object obj) {
|
||||
if (obj instanceof Number) {
|
||||
Number num = (Number) obj;
|
||||
return num.longValue();
|
||||
}
|
||||
throw new UnsupportedOperationException("can not cast un-number to long ");
|
||||
}
|
||||
|
||||
private String castToString(Object obj) {
|
||||
String result;
|
||||
if (obj instanceof byte[]) {
|
||||
result = new String((byte[]) obj, StandardCharsets.UTF_8);
|
||||
} else if (obj instanceof char[]) {
|
||||
result = new String((char[]) obj);
|
||||
} else {
|
||||
result = String.valueOf(obj);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
private byte[] castToBinary(Object obj) {
|
||||
if (obj instanceof byte[]) {
|
||||
return (byte[]) obj;
|
||||
} else {
|
||||
return obj.toString().getBytes();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,60 @@
|
|||
/*
|
||||
*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*
|
||||
*/
|
||||
|
||||
package com.pingcap.tikv.operation.transformer;
|
||||
|
||||
import static java.util.Objects.requireNonNull;
|
||||
|
||||
import com.google.common.collect.ImmutableList;
|
||||
import com.pingcap.tikv.codec.CodecDataInput;
|
||||
import com.pingcap.tikv.row.Row;
|
||||
import com.pingcap.tikv.types.DataType;
|
||||
import java.util.List;
|
||||
|
||||
public class MultiKeyDecoder implements Projection {
|
||||
public MultiKeyDecoder(List<DataType> dataTypes) {
|
||||
this.resultTypes = requireNonNull(dataTypes).toArray(new DataType[0]);
|
||||
}
|
||||
|
||||
private DataType[] resultTypes;
|
||||
|
||||
@Override
|
||||
public void set(Object value, Row row, int pos) {
|
||||
byte[] rowData = (byte[]) value;
|
||||
CodecDataInput cdi = new CodecDataInput(rowData);
|
||||
|
||||
for (int i = 0; i < resultTypes.length; i++) {
|
||||
DataType type = resultTypes[i];
|
||||
if (type.isNextNull(cdi)) {
|
||||
cdi.readUnsignedByte();
|
||||
row.setNull(i + pos);
|
||||
} else {
|
||||
row.set(i + pos, type, type.decode(cdi));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public int size() {
|
||||
return resultTypes.length;
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<DataType> getTypes() {
|
||||
return ImmutableList.copyOf(resultTypes);
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,47 @@
|
|||
/*
|
||||
*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*
|
||||
*/
|
||||
|
||||
package com.pingcap.tikv.operation.transformer;
|
||||
|
||||
import com.google.common.collect.ImmutableList;
|
||||
import com.pingcap.tikv.row.Row;
|
||||
import com.pingcap.tikv.types.DataType;
|
||||
import java.util.List;
|
||||
|
||||
/** Noop is a base type projection, it basically do nothing but copy. */
|
||||
public class NoOp implements Projection {
|
||||
protected DataType targetDataType;
|
||||
|
||||
public NoOp(DataType dataType) {
|
||||
this.targetDataType = dataType;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void set(Object value, Row row, int pos) {
|
||||
row.set(pos, targetDataType, value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int size() {
|
||||
return 1;
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<DataType> getTypes() {
|
||||
return ImmutableList.of(targetDataType);
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,30 @@
|
|||
/*
|
||||
*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*
|
||||
*/
|
||||
|
||||
package com.pingcap.tikv.operation.transformer;
|
||||
|
||||
import com.pingcap.tikv.row.Row;
|
||||
import com.pingcap.tikv.types.DataType;
|
||||
import java.util.List;
|
||||
|
||||
public interface Projection {
|
||||
void set(Object value, Row row, int pos);
|
||||
|
||||
int size();
|
||||
|
||||
List<DataType> getTypes();
|
||||
}
|
||||
|
|
@ -0,0 +1,131 @@
|
|||
/*
|
||||
*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*
|
||||
*/
|
||||
|
||||
package com.pingcap.tikv.operation.transformer;
|
||||
|
||||
import static java.util.Objects.requireNonNull;
|
||||
|
||||
import com.google.common.collect.ImmutableList;
|
||||
import com.pingcap.tikv.row.ObjectRowImpl;
|
||||
import com.pingcap.tikv.row.Row;
|
||||
import com.pingcap.tikv.types.DataType;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
/**
|
||||
* RowTransformer is used along with SchemaInfer and row and provide some operation. If you do not
|
||||
* know your target FieldType, then you do not need use this interface. The reason we provide this
|
||||
* interface is that sometime we need use it. Suppose we have a table t1 and have two column c1 and
|
||||
* s1 select sum(c1) from t1 will return SingleGroup literally and sum(c1). SingleGroup should be
|
||||
* skipped. Hence, skip operation is needed here. Another usage is that sum(c1)'s type is decimal no
|
||||
* matter what real column type is. We need cast it to target type which is column's type. Hence,
|
||||
* cast operation is needed. RowTransformer is executed after row is already read from
|
||||
* CodecDataInput.
|
||||
*/
|
||||
public class RowTransformer {
|
||||
public static Builder newBuilder() {
|
||||
return new Builder();
|
||||
}
|
||||
|
||||
/** A Builder can build a RowTransformer. */
|
||||
public static class Builder {
|
||||
private final List<Projection> projections = new ArrayList<>();
|
||||
private final List<DataType> sourceTypes = new ArrayList<>();
|
||||
|
||||
public RowTransformer build() {
|
||||
return new RowTransformer(sourceTypes, projections);
|
||||
}
|
||||
|
||||
public Builder addProjection(Projection projection) {
|
||||
this.projections.add(projection);
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder addProjections(Projection... projections) {
|
||||
this.projections.addAll(Arrays.asList(projections));
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder addSourceFieldType(DataType fieldType) {
|
||||
this.sourceTypes.add(fieldType);
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder addSourceFieldTypes(DataType... fieldTypes) {
|
||||
this.sourceTypes.addAll(Arrays.asList(fieldTypes));
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder addSourceFieldTypes(List<DataType> fieldTypes) {
|
||||
this.sourceTypes.addAll(fieldTypes);
|
||||
return this;
|
||||
}
|
||||
}
|
||||
|
||||
private final List<Projection> projections;
|
||||
|
||||
private final List<DataType> sourceFieldTypes;
|
||||
|
||||
private RowTransformer(List<DataType> sourceTypes, List<Projection> projections) {
|
||||
this.sourceFieldTypes = ImmutableList.copyOf(requireNonNull(sourceTypes));
|
||||
this.projections = ImmutableList.copyOf(requireNonNull(projections));
|
||||
}
|
||||
|
||||
/**
|
||||
* Transforms input row to a output row according projections operator passed on creation of this
|
||||
* RowTransformer.
|
||||
*
|
||||
* @param inRow input row that need to be transformed.
|
||||
* @return a row that is already transformed.
|
||||
*/
|
||||
public Row transform(Row inRow) {
|
||||
// After transform the length of row is probably not same as the input row.
|
||||
// we need calculate the new length.
|
||||
Row outRow = ObjectRowImpl.create(newRowLength());
|
||||
|
||||
int offset = 0;
|
||||
for (int i = 0; i < inRow.fieldCount(); i++) {
|
||||
Object inVal = inRow.get(i, sourceFieldTypes.get(i));
|
||||
Projection p = getProjection(i);
|
||||
p.set(inVal, outRow, offset);
|
||||
offset += p.size();
|
||||
}
|
||||
return outRow;
|
||||
}
|
||||
|
||||
private Projection getProjection(int index) {
|
||||
return projections.get(index);
|
||||
}
|
||||
|
||||
/**
|
||||
* Collect output row's length.
|
||||
*
|
||||
* @return a int which is the new length of output row.
|
||||
*/
|
||||
private int newRowLength() {
|
||||
return this.projections.stream().reduce(0, (sum, p) -> sum += p.size(), (s1, s2) -> s1 + s2);
|
||||
}
|
||||
|
||||
public List<DataType> getTypes() {
|
||||
return projections
|
||||
.stream()
|
||||
.flatMap(proj -> proj.getTypes().stream())
|
||||
.collect(Collectors.toList());
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,40 @@
|
|||
/*
|
||||
*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*
|
||||
*/
|
||||
|
||||
package com.pingcap.tikv.operation.transformer;
|
||||
|
||||
import com.google.common.collect.ImmutableList;
|
||||
import com.pingcap.tikv.row.Row;
|
||||
import com.pingcap.tikv.types.DataType;
|
||||
import java.util.List;
|
||||
|
||||
public class Skip implements Projection {
|
||||
public static final Skip SKIP_OP = new Skip();
|
||||
|
||||
@Override
|
||||
public void set(Object value, Row row, int pos) {}
|
||||
|
||||
@Override
|
||||
public int size() {
|
||||
return 0;
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<DataType> getTypes() {
|
||||
return ImmutableList.of();
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,39 @@
|
|||
/*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package com.pingcap.tikv.policy;
|
||||
|
||||
import com.pingcap.tikv.operation.ErrorHandler;
|
||||
import com.pingcap.tikv.util.BackOffer;
|
||||
|
||||
public class RetryMaxMs<T> extends RetryPolicy<T> {
|
||||
private RetryMaxMs(ErrorHandler<T> handler, BackOffer backOffer) {
|
||||
super(handler);
|
||||
this.backOffer = backOffer;
|
||||
}
|
||||
|
||||
public static class Builder<T> implements RetryPolicy.Builder<T> {
|
||||
private BackOffer backOffer;
|
||||
|
||||
public Builder(BackOffer backOffer) {
|
||||
this.backOffer = backOffer;
|
||||
}
|
||||
|
||||
@Override
|
||||
public RetryPolicy<T> create(ErrorHandler<T> handler) {
|
||||
return new RetryMaxMs<>(handler, backOffer);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,78 @@
|
|||
/*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package com.pingcap.tikv.policy;
|
||||
|
||||
import com.google.common.collect.ImmutableSet;
|
||||
import com.pingcap.tikv.exception.GrpcException;
|
||||
import com.pingcap.tikv.operation.ErrorHandler;
|
||||
import com.pingcap.tikv.util.BackOffer;
|
||||
import com.pingcap.tikv.util.ConcreteBackOffer;
|
||||
import io.grpc.Status;
|
||||
import java.util.concurrent.Callable;
|
||||
|
||||
public abstract class RetryPolicy<RespT> {
|
||||
BackOffer backOffer = ConcreteBackOffer.newCopNextMaxBackOff();
|
||||
|
||||
// handles PD and TiKV's error.
|
||||
private ErrorHandler<RespT> handler;
|
||||
|
||||
private ImmutableSet<Status.Code> unrecoverableStatus =
|
||||
ImmutableSet.of(
|
||||
Status.Code.ALREADY_EXISTS, Status.Code.PERMISSION_DENIED,
|
||||
Status.Code.INVALID_ARGUMENT, Status.Code.NOT_FOUND,
|
||||
Status.Code.UNIMPLEMENTED, Status.Code.OUT_OF_RANGE,
|
||||
Status.Code.UNAUTHENTICATED, Status.Code.CANCELLED);
|
||||
|
||||
RetryPolicy(ErrorHandler<RespT> handler) {
|
||||
this.handler = handler;
|
||||
}
|
||||
|
||||
private void rethrowNotRecoverableException(Exception e) {
|
||||
Status status = Status.fromThrowable(e);
|
||||
if (unrecoverableStatus.contains(status.getCode())) {
|
||||
throw new GrpcException(e);
|
||||
}
|
||||
}
|
||||
|
||||
public RespT callWithRetry(Callable<RespT> proc, String methodName) {
|
||||
while (true) {
|
||||
RespT result = null;
|
||||
try {
|
||||
result = proc.call();
|
||||
} catch (Exception e) {
|
||||
rethrowNotRecoverableException(e);
|
||||
// Handle request call error
|
||||
boolean retry = handler.handleRequestError(backOffer, e);
|
||||
if (retry) {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
// Handle response error
|
||||
if (handler != null) {
|
||||
boolean retry = handler.handleResponseError(backOffer, result);
|
||||
if (retry) {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
}
|
||||
|
||||
public interface Builder<T> {
|
||||
RetryPolicy<T> create(ErrorHandler<T> handler);
|
||||
}
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue