mirror of https://github.com/tikv/client-java.git
commit
43df304e3c
117
pom.xml
117
pom.xml
|
|
@ -34,93 +34,15 @@
|
|||
|
||||
<dependencies>
|
||||
<dependency>
|
||||
<groupId>org.apache.logging.log4j</groupId>
|
||||
<artifactId>log4j-api</artifactId>
|
||||
<version>2.8.1</version>
|
||||
<scope>provided</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.logging.log4j</groupId>
|
||||
<artifactId>log4j-core</artifactId>
|
||||
<version>2.8.1</version>
|
||||
<scope>provided</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.spark</groupId>
|
||||
<artifactId>spark-core_${scala.binary.version}</artifactId>
|
||||
<version>${spark.version}</version>
|
||||
<scope>provided</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.spark</groupId>
|
||||
<artifactId>spark-catalyst_${scala.binary.version}</artifactId>
|
||||
<version>${spark.version}</version>
|
||||
<scope>provided</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.spark</groupId>
|
||||
<artifactId>spark-sql_${scala.binary.version}</artifactId>
|
||||
<version>${spark.version}</version>
|
||||
<scope>provided</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.spark</groupId>
|
||||
<artifactId>spark-hive_${scala.binary.version}</artifactId>
|
||||
<version>${spark.version}</version>
|
||||
<scope>provided</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.spark</groupId>
|
||||
<artifactId>spark-hive-thriftserver_${scala.binary.version}</artifactId>
|
||||
<version>${spark.version}</version>
|
||||
<scope>provided</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.spark</groupId>
|
||||
<artifactId>spark-unsafe_${scala.binary.version}</artifactId>
|
||||
<version>${spark.version}</version>
|
||||
<scope>provided</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.slf4j</groupId>
|
||||
<artifactId>slf4j-api</artifactId>
|
||||
<version>${slf4j.version}</version>
|
||||
<scope>provided</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.slf4j</groupId>
|
||||
<artifactId>slf4j-log4j12</artifactId>
|
||||
<version>${slf4j.version}</version>
|
||||
<scope>provided</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.slf4j</groupId>
|
||||
<artifactId>jul-to-slf4j</artifactId>
|
||||
<version>${slf4j.version}</version>
|
||||
<scope>provided</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.slf4j</groupId>
|
||||
<artifactId>jcl-over-slf4j</artifactId>
|
||||
<version>${slf4j.version}</version>
|
||||
<scope>provided</scope>
|
||||
<groupId>org.apache.commons</groupId>
|
||||
<artifactId>commons-lang3</artifactId>
|
||||
<version>3.5</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>log4j</groupId>
|
||||
<artifactId>log4j</artifactId>
|
||||
<version>${log4j.version}</version>
|
||||
</dependency>
|
||||
<!-- https://mvnrepository.com/artifact/net.sf.trove4j/trove4j -->
|
||||
<dependency>
|
||||
<groupId>net.sf.trove4j</groupId>
|
||||
<artifactId>trove4j</artifactId>
|
||||
<version>${trove4j.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.sangupta</groupId>
|
||||
<artifactId>murmur</artifactId>
|
||||
<version>1.0.0</version>
|
||||
</dependency>
|
||||
<!-- grpc dependencies -->
|
||||
<dependency>
|
||||
<groupId>io.grpc</groupId>
|
||||
|
|
@ -137,37 +59,6 @@
|
|||
<artifactId>grpc-stub</artifactId>
|
||||
<version>${grpc.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.fasterxml.jackson.core</groupId>
|
||||
<artifactId>jackson-annotations</artifactId>
|
||||
<version>${jackson.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.fasterxml.jackson.core</groupId>
|
||||
<artifactId>jackson-databind</artifactId>
|
||||
<version>${jackson.databind.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.fasterxml.jackson.module</groupId>
|
||||
<artifactId>jackson-module-scala_${scala.binary.version}</artifactId>
|
||||
<version>${jackson.version}</version>
|
||||
<exclusions>
|
||||
<exclusion>
|
||||
<groupId>com.google.guava</groupId>
|
||||
<artifactId>guava</artifactId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>joda-time</groupId>
|
||||
<artifactId>joda-time</artifactId>
|
||||
<version>${joda-time.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.joda</groupId>
|
||||
<artifactId>joda-convert</artifactId>
|
||||
<version>${joda-convert.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>io.grpc</groupId>
|
||||
<artifactId>grpc-testing</artifactId>
|
||||
|
|
@ -220,7 +111,7 @@
|
|||
<outputDirectory>${proto.folder}</outputDirectory>
|
||||
<resources>
|
||||
<resource>
|
||||
<directory>${basedir}/kvproto/vendor/github.com/gogo/protobuf</directory>
|
||||
<directory>${basedir}/kvproto/include</directory>
|
||||
<includes>
|
||||
<include>**/gogoproto/**</include>
|
||||
</includes>
|
||||
|
|
|
|||
|
|
@ -34,7 +34,6 @@ import org.tikv.codec.Codec.BytesCodec;
|
|||
import org.tikv.codec.CodecDataOutput;
|
||||
import org.tikv.exception.GrpcException;
|
||||
import org.tikv.exception.TiClientInternalException;
|
||||
import org.tikv.kvproto.Kvrpcpb.IsolationLevel;
|
||||
import org.tikv.kvproto.Metapb.Store;
|
||||
import org.tikv.kvproto.PDGrpc;
|
||||
import org.tikv.kvproto.PDGrpc.PDBlockingStub;
|
||||
|
|
@ -62,7 +61,6 @@ public class PDClient extends AbstractGRPCClient<PDBlockingStub, PDStub>
|
|||
private TsoRequest tsoReq;
|
||||
private volatile LeaderWrapper leaderWrapper;
|
||||
private ScheduledExecutorService service;
|
||||
private IsolationLevel isolationLevel;
|
||||
private List<HostAndPort> pdAddrs;
|
||||
|
||||
@Override
|
||||
|
|
|
|||
|
|
@ -15,8 +15,6 @@
|
|||
|
||||
package org.tikv;
|
||||
|
||||
import static org.tikv.operation.iterator.CoprocessIterator.getHandleIterator;
|
||||
import static org.tikv.operation.iterator.CoprocessIterator.getRowIterator;
|
||||
import static org.tikv.util.KeyRangeUtils.makeRange;
|
||||
|
||||
import com.google.common.collect.Range;
|
||||
|
|
@ -28,18 +26,13 @@ import org.tikv.exception.TiClientInternalException;
|
|||
import org.tikv.key.Key;
|
||||
import org.tikv.kvproto.Kvrpcpb.KvPair;
|
||||
import org.tikv.kvproto.Metapb.Store;
|
||||
import org.tikv.meta.TiDAGRequest;
|
||||
import org.tikv.meta.TiTimestamp;
|
||||
import org.tikv.operation.iterator.ConcreteScanIterator;
|
||||
import org.tikv.operation.iterator.IndexScanIterator;
|
||||
import org.tikv.region.RegionStoreClient;
|
||||
import org.tikv.region.TiRegion;
|
||||
import org.tikv.row.Row;
|
||||
import org.tikv.util.BackOffer;
|
||||
import org.tikv.util.ConcreteBackOffer;
|
||||
import org.tikv.util.Pair;
|
||||
import org.tikv.util.RangeSplitter;
|
||||
import org.tikv.util.RangeSplitter.RegionTask;
|
||||
|
||||
public class Snapshot {
|
||||
private final TiTimestamp timestamp;
|
||||
|
|
@ -77,59 +70,6 @@ public class Snapshot {
|
|||
return client.get(ConcreteBackOffer.newGetBackOff(), key, timestamp.getVersion());
|
||||
}
|
||||
|
||||
/**
|
||||
* Issue a table read request
|
||||
*
|
||||
* @param dagRequest DAG request for coprocessor
|
||||
* @return a Iterator that contains all result from this select request.
|
||||
*/
|
||||
public Iterator<Row> tableRead(TiDAGRequest dagRequest) {
|
||||
if (dagRequest.isIndexScan()) {
|
||||
Iterator<Long> iter =
|
||||
getHandleIterator(
|
||||
dagRequest,
|
||||
RangeSplitter.newSplitter(session.getRegionManager())
|
||||
.splitRangeByRegion(dagRequest.getRanges()),
|
||||
session);
|
||||
return new IndexScanIterator(this, dagRequest, iter);
|
||||
} else {
|
||||
return getRowIterator(
|
||||
dagRequest,
|
||||
RangeSplitter.newSplitter(session.getRegionManager())
|
||||
.splitRangeByRegion(dagRequest.getRanges()),
|
||||
session);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Below is lower level API for env like Spark which already did key range split Perform table
|
||||
* scan
|
||||
*
|
||||
* @param dagRequest DAGRequest for coprocessor
|
||||
* @param task RegionTask of the coprocessor request to send
|
||||
* @return Row iterator to iterate over resulting rows
|
||||
*/
|
||||
public Iterator<Row> tableRead(TiDAGRequest dagRequest, List<RegionTask> task) {
|
||||
if (dagRequest.isDoubleRead()) {
|
||||
Iterator<Long> iter = getHandleIterator(dagRequest, task, session);
|
||||
return new IndexScanIterator(this, dagRequest, iter);
|
||||
} else {
|
||||
return getRowIterator(dagRequest, task, session);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Below is lower level API for env like Spark which already did key range split Perform handle
|
||||
* scan
|
||||
*
|
||||
* @param dagRequest DAGRequest for coprocessor
|
||||
* @param tasks RegionTask of the coprocessor request to send
|
||||
* @return Row iterator to iterate over resulting rows
|
||||
*/
|
||||
public Iterator<Long> indexHandleRead(TiDAGRequest dagRequest, List<RegionTask> tasks) {
|
||||
return getHandleIterator(dagRequest, tasks, session);
|
||||
}
|
||||
|
||||
public Iterator<KvPair> scan(ByteString startKey) {
|
||||
return new ConcreteScanIterator(startKey, session, timestamp.getVersion());
|
||||
}
|
||||
|
|
|
|||
|
|
@ -16,31 +16,19 @@
|
|||
package org.tikv;
|
||||
|
||||
import com.google.common.net.HostAndPort;
|
||||
import com.google.common.util.concurrent.ThreadFactoryBuilder;
|
||||
import io.grpc.ManagedChannel;
|
||||
import io.grpc.ManagedChannelBuilder;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.ExecutorService;
|
||||
import java.util.concurrent.Executors;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.function.Function;
|
||||
import org.tikv.catalog.Catalog;
|
||||
import org.tikv.event.CacheInvalidateEvent;
|
||||
import org.tikv.meta.TiTimestamp;
|
||||
import org.tikv.region.RegionManager;
|
||||
import org.tikv.util.ConcreteBackOffer;
|
||||
|
||||
public class TiSession implements AutoCloseable {
|
||||
private static final Map<String, ManagedChannel> connPool = new HashMap<>();
|
||||
private final TiConfiguration conf;
|
||||
private Function<CacheInvalidateEvent, Void> cacheInvalidateCallback;
|
||||
// below object creation is either heavy or making connection (pd), pending for lazy loading
|
||||
private volatile RegionManager regionManager;
|
||||
private volatile PDClient client;
|
||||
private volatile Catalog catalog;
|
||||
private volatile ExecutorService indexScanThreadPool;
|
||||
private volatile ExecutorService tableScanThreadPool;
|
||||
|
||||
public TiSession(TiConfiguration conf) {
|
||||
this.conf = conf;
|
||||
|
|
@ -50,18 +38,6 @@ public class TiSession implements AutoCloseable {
|
|||
return conf;
|
||||
}
|
||||
|
||||
public TiTimestamp getTimestamp() {
|
||||
return getPDClient().getTimestamp(ConcreteBackOffer.newTsoBackOff());
|
||||
}
|
||||
|
||||
public Snapshot createSnapshot() {
|
||||
return new Snapshot(getTimestamp(), this);
|
||||
}
|
||||
|
||||
public Snapshot createSnapshot(TiTimestamp ts) {
|
||||
return new Snapshot(ts, this);
|
||||
}
|
||||
|
||||
public PDClient getPDClient() {
|
||||
PDClient res = client;
|
||||
if (res == null) {
|
||||
|
|
@ -75,25 +51,6 @@ public class TiSession implements AutoCloseable {
|
|||
return res;
|
||||
}
|
||||
|
||||
public Catalog getCatalog() {
|
||||
Catalog res = catalog;
|
||||
if (res == null) {
|
||||
synchronized (this) {
|
||||
if (catalog == null) {
|
||||
catalog =
|
||||
new Catalog(
|
||||
this::createSnapshot,
|
||||
conf.getMetaReloadPeriod(),
|
||||
conf.getMetaReloadPeriodUnit(),
|
||||
conf.ifShowRowId(),
|
||||
conf.getDBPrefix());
|
||||
}
|
||||
res = catalog;
|
||||
}
|
||||
}
|
||||
return res;
|
||||
}
|
||||
|
||||
public synchronized RegionManager getRegionManager() {
|
||||
RegionManager res = regionManager;
|
||||
if (res == null) {
|
||||
|
|
@ -130,58 +87,10 @@ public class TiSession implements AutoCloseable {
|
|||
return channel;
|
||||
}
|
||||
|
||||
public ExecutorService getThreadPoolForIndexScan() {
|
||||
ExecutorService res = indexScanThreadPool;
|
||||
if (res == null) {
|
||||
synchronized (this) {
|
||||
if (indexScanThreadPool == null) {
|
||||
indexScanThreadPool =
|
||||
Executors.newFixedThreadPool(
|
||||
conf.getIndexScanConcurrency(),
|
||||
new ThreadFactoryBuilder().setDaemon(true).build());
|
||||
}
|
||||
res = indexScanThreadPool;
|
||||
}
|
||||
}
|
||||
return res;
|
||||
}
|
||||
|
||||
public ExecutorService getThreadPoolForTableScan() {
|
||||
ExecutorService res = tableScanThreadPool;
|
||||
if (res == null) {
|
||||
synchronized (this) {
|
||||
if (tableScanThreadPool == null) {
|
||||
tableScanThreadPool =
|
||||
Executors.newFixedThreadPool(
|
||||
conf.getTableScanConcurrency(),
|
||||
new ThreadFactoryBuilder().setDaemon(true).build());
|
||||
}
|
||||
res = tableScanThreadPool;
|
||||
}
|
||||
}
|
||||
return res;
|
||||
}
|
||||
|
||||
public static TiSession create(TiConfiguration conf) {
|
||||
return new TiSession(conf);
|
||||
}
|
||||
|
||||
public Function<CacheInvalidateEvent, Void> getCacheInvalidateCallback() {
|
||||
return cacheInvalidateCallback;
|
||||
}
|
||||
|
||||
/**
|
||||
* This is used for setting call back function to invalidate cache information
|
||||
*
|
||||
* @param callBackFunc callback function
|
||||
*/
|
||||
public void injectCallBackFunc(Function<CacheInvalidateEvent, Void> callBackFunc) {
|
||||
this.cacheInvalidateCallback = callBackFunc;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() throws Exception {
|
||||
getThreadPoolForTableScan().shutdownNow();
|
||||
getThreadPoolForIndexScan().shutdownNow();
|
||||
}
|
||||
public void close() throws Exception {}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,226 +0,0 @@
|
|||
/*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.tikv.catalog;
|
||||
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
import com.google.common.collect.ImmutableList;
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import com.google.common.util.concurrent.ThreadFactoryBuilder;
|
||||
import java.util.Collection;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
import java.util.concurrent.ConcurrentHashMap;
|
||||
import java.util.concurrent.Executors;
|
||||
import java.util.concurrent.ScheduledExecutorService;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.function.Supplier;
|
||||
import java.util.stream.Collectors;
|
||||
import org.apache.log4j.Logger;
|
||||
import org.tikv.Snapshot;
|
||||
import org.tikv.meta.TiDBInfo;
|
||||
import org.tikv.meta.TiTableInfo;
|
||||
|
||||
public class Catalog implements AutoCloseable {
|
||||
private Supplier<Snapshot> snapshotProvider;
|
||||
private ScheduledExecutorService service;
|
||||
private CatalogCache metaCache;
|
||||
private final boolean showRowId;
|
||||
private final String dbPrefix;
|
||||
private final Logger logger = Logger.getLogger(this.getClass());
|
||||
|
||||
@Override
|
||||
public void close() throws Exception {
|
||||
if (service != null) {
|
||||
service.shutdown();
|
||||
}
|
||||
}
|
||||
|
||||
private static class CatalogCache {
|
||||
|
||||
private CatalogCache(CatalogTransaction transaction, String dbPrefix, boolean loadTables) {
|
||||
this.transaction = transaction;
|
||||
this.dbPrefix = dbPrefix;
|
||||
this.dbCache = loadDatabases(loadTables);
|
||||
this.tableCache = new ConcurrentHashMap<>();
|
||||
this.currentVersion = transaction.getLatestSchemaVersion();
|
||||
}
|
||||
|
||||
private final Map<String, TiDBInfo> dbCache;
|
||||
private final ConcurrentHashMap<TiDBInfo, Map<String, TiTableInfo>> tableCache;
|
||||
private CatalogTransaction transaction;
|
||||
private long currentVersion;
|
||||
private final String dbPrefix;
|
||||
|
||||
public CatalogTransaction getTransaction() {
|
||||
return transaction;
|
||||
}
|
||||
|
||||
public long getVersion() {
|
||||
return currentVersion;
|
||||
}
|
||||
|
||||
public TiDBInfo getDatabase(String name) {
|
||||
Objects.requireNonNull(name, "name is null");
|
||||
return dbCache.get(name.toLowerCase());
|
||||
}
|
||||
|
||||
public List<TiDBInfo> listDatabases() {
|
||||
return ImmutableList.copyOf(dbCache.values());
|
||||
}
|
||||
|
||||
public List<TiTableInfo> listTables(TiDBInfo db) {
|
||||
Map<String, TiTableInfo> tableMap = tableCache.get(db);
|
||||
if (tableMap == null) {
|
||||
tableMap = loadTables(db);
|
||||
}
|
||||
return ImmutableList.copyOf(tableMap.values());
|
||||
}
|
||||
|
||||
public TiTableInfo getTable(TiDBInfo db, String tableName) {
|
||||
Map<String, TiTableInfo> tableMap = tableCache.get(db);
|
||||
if (tableMap == null) {
|
||||
tableMap = loadTables(db);
|
||||
}
|
||||
return tableMap.get(tableName.toLowerCase());
|
||||
}
|
||||
|
||||
private Map<String, TiTableInfo> loadTables(TiDBInfo db) {
|
||||
List<TiTableInfo> tables = transaction.getTables(db.getId());
|
||||
ImmutableMap.Builder<String, TiTableInfo> builder = ImmutableMap.builder();
|
||||
for (TiTableInfo table : tables) {
|
||||
builder.put(table.getName().toLowerCase(), table);
|
||||
}
|
||||
Map<String, TiTableInfo> tableMap = builder.build();
|
||||
tableCache.put(db, tableMap);
|
||||
return tableMap;
|
||||
}
|
||||
|
||||
private Map<String, TiDBInfo> loadDatabases(boolean loadTables) {
|
||||
HashMap<String, TiDBInfo> newDBCache = new HashMap<>();
|
||||
|
||||
List<TiDBInfo> databases = transaction.getDatabases();
|
||||
databases.forEach(
|
||||
db -> {
|
||||
TiDBInfo newDBInfo = db.rename(dbPrefix + db.getName());
|
||||
newDBCache.put(newDBInfo.getName().toLowerCase(), newDBInfo);
|
||||
if (loadTables) {
|
||||
loadTables(newDBInfo);
|
||||
}
|
||||
});
|
||||
return newDBCache;
|
||||
}
|
||||
}
|
||||
|
||||
public Catalog(
|
||||
Supplier<Snapshot> snapshotProvider,
|
||||
int refreshPeriod,
|
||||
TimeUnit periodUnit,
|
||||
boolean showRowId,
|
||||
String dbPrefix) {
|
||||
this.snapshotProvider = Objects.requireNonNull(snapshotProvider, "Snapshot Provider is null");
|
||||
this.showRowId = showRowId;
|
||||
this.dbPrefix = dbPrefix;
|
||||
metaCache = new CatalogCache(new CatalogTransaction(snapshotProvider.get()), dbPrefix, false);
|
||||
service =
|
||||
Executors.newSingleThreadScheduledExecutor(
|
||||
new ThreadFactoryBuilder().setDaemon(true).build());
|
||||
service.scheduleAtFixedRate(
|
||||
() -> {
|
||||
// Wrap this with a try catch block in case schedule update fails
|
||||
try {
|
||||
reloadCache();
|
||||
} catch (Exception e) {
|
||||
logger.warn("Reload Cache failed", e);
|
||||
}
|
||||
},
|
||||
refreshPeriod,
|
||||
refreshPeriod,
|
||||
periodUnit);
|
||||
}
|
||||
|
||||
public void reloadCache(boolean loadTables) {
|
||||
Snapshot snapshot = snapshotProvider.get();
|
||||
CatalogTransaction newTrx = new CatalogTransaction(snapshot);
|
||||
long latestVersion = newTrx.getLatestSchemaVersion();
|
||||
if (latestVersion > metaCache.getVersion()) {
|
||||
metaCache = new CatalogCache(newTrx, dbPrefix, loadTables);
|
||||
}
|
||||
}
|
||||
|
||||
@VisibleForTesting
|
||||
public void reloadCache() {
|
||||
reloadCache(false);
|
||||
}
|
||||
|
||||
public List<TiDBInfo> listDatabases() {
|
||||
return metaCache.listDatabases();
|
||||
}
|
||||
|
||||
public List<TiTableInfo> listTables(TiDBInfo database) {
|
||||
Objects.requireNonNull(database, "database is null");
|
||||
if (showRowId) {
|
||||
return metaCache
|
||||
.listTables(database)
|
||||
.stream()
|
||||
.map(TiTableInfo::copyTableWithRowId)
|
||||
.collect(Collectors.toList());
|
||||
} else {
|
||||
return metaCache.listTables(database);
|
||||
}
|
||||
}
|
||||
|
||||
public TiDBInfo getDatabase(String dbName) {
|
||||
Objects.requireNonNull(dbName, "dbName is null");
|
||||
return metaCache.getDatabase(dbName);
|
||||
}
|
||||
|
||||
public TiTableInfo getTable(String dbName, String tableName) {
|
||||
TiDBInfo database = getDatabase(dbName);
|
||||
if (database == null) {
|
||||
return null;
|
||||
}
|
||||
return getTable(database, tableName);
|
||||
}
|
||||
|
||||
public TiTableInfo getTable(TiDBInfo database, String tableName) {
|
||||
Objects.requireNonNull(database, "database is null");
|
||||
Objects.requireNonNull(tableName, "tableName is null");
|
||||
TiTableInfo table = metaCache.getTable(database, tableName);
|
||||
if (showRowId) {
|
||||
return table.copyTableWithRowId();
|
||||
} else {
|
||||
return table;
|
||||
}
|
||||
}
|
||||
|
||||
@VisibleForTesting
|
||||
public TiTableInfo getTable(TiDBInfo database, long tableId) {
|
||||
Objects.requireNonNull(database, "database is null");
|
||||
Collection<TiTableInfo> tables = listTables(database);
|
||||
for (TiTableInfo table : tables) {
|
||||
if (table.getId() == tableId) {
|
||||
if (showRowId) {
|
||||
return table.copyTableWithRowId();
|
||||
} else {
|
||||
return table;
|
||||
}
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
|
@ -1,183 +0,0 @@
|
|||
/*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.tikv.catalog;
|
||||
|
||||
import static com.google.common.base.Preconditions.checkArgument;
|
||||
|
||||
import com.fasterxml.jackson.core.JsonParseException;
|
||||
import com.fasterxml.jackson.databind.JsonMappingException;
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import com.google.common.collect.ImmutableList;
|
||||
import com.google.protobuf.ByteString;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
import org.apache.log4j.Logger;
|
||||
import org.tikv.Snapshot;
|
||||
import org.tikv.codec.Codec.BytesCodec;
|
||||
import org.tikv.codec.Codec.IntegerCodec;
|
||||
import org.tikv.codec.CodecDataInput;
|
||||
import org.tikv.codec.CodecDataOutput;
|
||||
import org.tikv.codec.KeyUtils;
|
||||
import org.tikv.exception.TiClientInternalException;
|
||||
import org.tikv.kvproto.Kvrpcpb;
|
||||
import org.tikv.meta.TiDBInfo;
|
||||
import org.tikv.meta.TiTableInfo;
|
||||
import org.tikv.util.Pair;
|
||||
|
||||
public class CatalogTransaction {
|
||||
protected static final Logger logger = Logger.getLogger(Catalog.class);
|
||||
private final Snapshot snapshot;
|
||||
private final byte[] prefix;
|
||||
|
||||
private static final byte[] META_PREFIX = new byte[] {'m'};
|
||||
|
||||
private static final byte HASH_DATA_FLAG = 'h';
|
||||
private static final byte STR_DATA_FLAG = 's';
|
||||
|
||||
private static ByteString KEY_DB = ByteString.copyFromUtf8("DBs");
|
||||
private static ByteString KEY_TABLE = ByteString.copyFromUtf8("Table");
|
||||
private static ByteString KEY_SCHEMA_VERSION = ByteString.copyFromUtf8("SchemaVersionKey");
|
||||
|
||||
private static final String ENCODED_DB_PREFIX = "DB";
|
||||
|
||||
public CatalogTransaction(Snapshot snapshot) {
|
||||
this.snapshot = snapshot;
|
||||
this.prefix = META_PREFIX;
|
||||
}
|
||||
|
||||
private void encodeStringDataKey(CodecDataOutput cdo, byte[] key) {
|
||||
cdo.write(prefix);
|
||||
BytesCodec.writeBytes(cdo, key);
|
||||
IntegerCodec.writeULong(cdo, STR_DATA_FLAG);
|
||||
}
|
||||
|
||||
private void encodeHashDataKey(CodecDataOutput cdo, byte[] key, byte[] field) {
|
||||
encodeHashDataKeyPrefix(cdo, key);
|
||||
BytesCodec.writeBytes(cdo, field);
|
||||
}
|
||||
|
||||
private void encodeHashDataKeyPrefix(CodecDataOutput cdo, byte[] key) {
|
||||
cdo.write(prefix);
|
||||
BytesCodec.writeBytes(cdo, key);
|
||||
IntegerCodec.writeULong(cdo, HASH_DATA_FLAG);
|
||||
}
|
||||
|
||||
private Pair<ByteString, ByteString> decodeHashDataKey(ByteString rawKey) {
|
||||
checkArgument(
|
||||
KeyUtils.hasPrefix(rawKey, ByteString.copyFrom(prefix)),
|
||||
"invalid encoded hash data key prefix: " + new String(prefix));
|
||||
CodecDataInput cdi = new CodecDataInput(rawKey.toByteArray());
|
||||
cdi.skipBytes(prefix.length);
|
||||
byte[] key = BytesCodec.readBytes(cdi);
|
||||
long typeFlag = IntegerCodec.readULong(cdi);
|
||||
if (typeFlag != HASH_DATA_FLAG) {
|
||||
throw new TiClientInternalException("Invalid hash data flag: " + typeFlag);
|
||||
}
|
||||
byte[] field = BytesCodec.readBytes(cdi);
|
||||
return Pair.create(ByteString.copyFrom(key), ByteString.copyFrom(field));
|
||||
}
|
||||
|
||||
private ByteString hashGet(ByteString key, ByteString field) {
|
||||
CodecDataOutput cdo = new CodecDataOutput();
|
||||
encodeHashDataKey(cdo, key.toByteArray(), field.toByteArray());
|
||||
return snapshot.get(cdo.toByteString());
|
||||
}
|
||||
|
||||
private ByteString bytesGet(ByteString key) {
|
||||
CodecDataOutput cdo = new CodecDataOutput();
|
||||
encodeStringDataKey(cdo, key.toByteArray());
|
||||
return snapshot.get(cdo.toByteString());
|
||||
}
|
||||
|
||||
private List<Pair<ByteString, ByteString>> hashGetFields(ByteString key) {
|
||||
CodecDataOutput cdo = new CodecDataOutput();
|
||||
encodeHashDataKeyPrefix(cdo, key.toByteArray());
|
||||
ByteString encodedKey = cdo.toByteString();
|
||||
|
||||
Iterator<Kvrpcpb.KvPair> iterator = snapshot.scan(encodedKey);
|
||||
List<Pair<ByteString, ByteString>> fields = new ArrayList<>();
|
||||
while (iterator.hasNext()) {
|
||||
Kvrpcpb.KvPair kv = iterator.next();
|
||||
if (!KeyUtils.hasPrefix(kv.getKey(), encodedKey)) {
|
||||
break;
|
||||
}
|
||||
fields.add(Pair.create(decodeHashDataKey(kv.getKey()).second, kv.getValue()));
|
||||
}
|
||||
|
||||
return fields;
|
||||
}
|
||||
|
||||
private static ByteString encodeDatabaseID(long id) {
|
||||
return ByteString.copyFrom(String.format("%s:%d", ENCODED_DB_PREFIX, id).getBytes());
|
||||
}
|
||||
|
||||
public long getLatestSchemaVersion() {
|
||||
ByteString versionBytes = bytesGet(KEY_SCHEMA_VERSION);
|
||||
CodecDataInput cdi = new CodecDataInput(versionBytes.toByteArray());
|
||||
return Long.parseLong(new String(cdi.toByteArray(), StandardCharsets.UTF_8));
|
||||
}
|
||||
|
||||
public List<TiDBInfo> getDatabases() {
|
||||
List<Pair<ByteString, ByteString>> fields = hashGetFields(KEY_DB);
|
||||
ImmutableList.Builder<TiDBInfo> builder = ImmutableList.builder();
|
||||
for (Pair<ByteString, ByteString> pair : fields) {
|
||||
builder.add(parseFromJson(pair.second, TiDBInfo.class));
|
||||
}
|
||||
return builder.build();
|
||||
}
|
||||
|
||||
public TiDBInfo getDatabase(long id) {
|
||||
ByteString dbKey = encodeDatabaseID(id);
|
||||
ByteString json = hashGet(KEY_DB, dbKey);
|
||||
if (json == null || json.isEmpty()) {
|
||||
return null;
|
||||
}
|
||||
return parseFromJson(json, TiDBInfo.class);
|
||||
}
|
||||
|
||||
public List<TiTableInfo> getTables(long dbId) {
|
||||
ByteString dbKey = encodeDatabaseID(dbId);
|
||||
List<Pair<ByteString, ByteString>> fields = hashGetFields(dbKey);
|
||||
ImmutableList.Builder<TiTableInfo> builder = ImmutableList.builder();
|
||||
for (Pair<ByteString, ByteString> pair : fields) {
|
||||
if (KeyUtils.hasPrefix(pair.first, KEY_TABLE)) {
|
||||
builder.add(parseFromJson(pair.second, TiTableInfo.class));
|
||||
}
|
||||
}
|
||||
return builder.build();
|
||||
}
|
||||
|
||||
public static <T> T parseFromJson(ByteString json, Class<T> cls) {
|
||||
Objects.requireNonNull(json, "json is null");
|
||||
Objects.requireNonNull(cls, "cls is null");
|
||||
|
||||
logger.debug(String.format("Parse Json %s : %s", cls.getSimpleName(), json.toStringUtf8()));
|
||||
ObjectMapper mapper = new ObjectMapper();
|
||||
try {
|
||||
return mapper.readValue(json.toStringUtf8(), cls);
|
||||
} catch (JsonParseException | JsonMappingException e) {
|
||||
String errMsg =
|
||||
String.format(
|
||||
"Invalid JSON value for Type %s: %s\n", cls.getSimpleName(), json.toStringUtf8());
|
||||
throw new TiClientInternalException(errMsg, e);
|
||||
} catch (Exception e1) {
|
||||
throw new TiClientInternalException("Error parsing Json", e1);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -17,11 +17,7 @@ package org.tikv.codec;
|
|||
|
||||
import static com.google.common.base.Preconditions.checkArgument;
|
||||
|
||||
import gnu.trove.list.array.TIntArrayList;
|
||||
import java.math.BigDecimal;
|
||||
import java.sql.Date;
|
||||
import java.util.Arrays;
|
||||
import org.joda.time.*;
|
||||
import org.tikv.exception.InvalidCodecFormatException;
|
||||
|
||||
public class Codec {
|
||||
|
|
@ -319,338 +315,4 @@ public class Codec {
|
|||
return bytes;
|
||||
}
|
||||
}
|
||||
|
||||
public static class RealCodec {
|
||||
|
||||
private static final long signMask = 0x8000000000000000L;
|
||||
|
||||
/**
|
||||
* Decode as float
|
||||
*
|
||||
* @param cdi source of data
|
||||
* @return decoded unsigned long value
|
||||
*/
|
||||
public static double readDouble(CodecDataInput cdi) {
|
||||
long u = IntegerCodec.readULong(cdi);
|
||||
if (u < 0) {
|
||||
u &= Long.MAX_VALUE;
|
||||
} else {
|
||||
u = ~u;
|
||||
}
|
||||
return Double.longBitsToDouble(u);
|
||||
}
|
||||
|
||||
private static long encodeDoubleToCmpLong(double val) {
|
||||
long u = Double.doubleToRawLongBits(val);
|
||||
if (val >= 0) {
|
||||
u |= signMask;
|
||||
} else {
|
||||
u = ~u;
|
||||
}
|
||||
return u;
|
||||
}
|
||||
|
||||
public static void writeDoubleFully(CodecDataOutput cdo, double val) {
|
||||
cdo.writeByte(FLOATING_FLAG);
|
||||
writeDouble(cdo, val);
|
||||
}
|
||||
|
||||
/**
|
||||
* Encoding a double value to byte buffer
|
||||
*
|
||||
* @param cdo For outputting data in bytes array
|
||||
* @param val The data to encode
|
||||
*/
|
||||
public static void writeDouble(CodecDataOutput cdo, double val) {
|
||||
IntegerCodec.writeULong(cdo, encodeDoubleToCmpLong(val));
|
||||
}
|
||||
}
|
||||
|
||||
public static class DecimalCodec {
|
||||
|
||||
/**
|
||||
* read a decimal value from CodecDataInput
|
||||
*
|
||||
* @param cdi cdi is source data.
|
||||
*/
|
||||
public static BigDecimal readDecimal(CodecDataInput cdi) {
|
||||
if (cdi.available() < 3) {
|
||||
throw new IllegalArgumentException("insufficient bytes to read value");
|
||||
}
|
||||
|
||||
// 64 should be larger enough for avoiding unnecessary growth.
|
||||
TIntArrayList data = new TIntArrayList(64);
|
||||
int precision = cdi.readUnsignedByte();
|
||||
int frac = cdi.readUnsignedByte();
|
||||
int length = precision + frac;
|
||||
int curPos = cdi.size() - cdi.available();
|
||||
for (int i = 0; i < length; i++) {
|
||||
if (cdi.eof()) {
|
||||
break;
|
||||
}
|
||||
data.add(cdi.readUnsignedByte());
|
||||
}
|
||||
|
||||
MyDecimal dec = new MyDecimal();
|
||||
int binSize = dec.fromBin(precision, frac, data.toArray());
|
||||
cdi.mark(curPos + binSize);
|
||||
cdi.reset();
|
||||
return dec.toDecimal();
|
||||
}
|
||||
|
||||
/**
|
||||
* write a decimal value from CodecDataInput
|
||||
*
|
||||
* @param cdo cdo is destination data.
|
||||
* @param dec is decimal value that will be written into cdo.
|
||||
*/
|
||||
static void writeDecimal(CodecDataOutput cdo, MyDecimal dec) {
|
||||
int[] data = dec.toBin(dec.precision(), dec.frac());
|
||||
cdo.writeByte(dec.precision());
|
||||
cdo.writeByte(dec.frac());
|
||||
for (int aData : data) {
|
||||
cdo.writeByte(aData & 0xFF);
|
||||
}
|
||||
}
|
||||
|
||||
public static void writeDecimalFully(CodecDataOutput cdo, BigDecimal val) {
|
||||
cdo.writeByte(DECIMAL_FLAG);
|
||||
writeDecimal(cdo, val);
|
||||
}
|
||||
|
||||
/**
|
||||
* Encoding a double value to byte buffer
|
||||
*
|
||||
* @param cdo For outputting data in bytes array
|
||||
* @param val The data to encode
|
||||
*/
|
||||
public static void writeDecimal(CodecDataOutput cdo, BigDecimal val) {
|
||||
MyDecimal dec = new MyDecimal();
|
||||
dec.fromString(val.toPlainString());
|
||||
writeDecimal(cdo, dec);
|
||||
}
|
||||
}
|
||||
|
||||
public static class DateTimeCodec {
|
||||
|
||||
/**
|
||||
* Encode a DateTime to a packed long converting to specific timezone
|
||||
*
|
||||
* @param dateTime dateTime that need to be encoded.
|
||||
* @param tz timezone used for converting to localDateTime
|
||||
* @return a packed long.
|
||||
*/
|
||||
static long toPackedLong(DateTime dateTime, DateTimeZone tz) {
|
||||
LocalDateTime localDateTime = dateTime.withZone(tz).toLocalDateTime();
|
||||
return toPackedLong(
|
||||
localDateTime.getYear(),
|
||||
localDateTime.getMonthOfYear(),
|
||||
localDateTime.getDayOfMonth(),
|
||||
localDateTime.getHourOfDay(),
|
||||
localDateTime.getMinuteOfHour(),
|
||||
localDateTime.getSecondOfMinute(),
|
||||
localDateTime.getMillisOfSecond() * 1000);
|
||||
}
|
||||
|
||||
/**
|
||||
* Encode a date/time parts to a packed long.
|
||||
*
|
||||
* @return a packed long.
|
||||
*/
|
||||
static long toPackedLong(
|
||||
int year, int month, int day, int hour, int minute, int second, int micro) {
|
||||
long ymd = (year * 13 + month) << 5 | day;
|
||||
long hms = hour << 12 | minute << 6 | second;
|
||||
return ((ymd << 17 | hms) << 24) | micro;
|
||||
}
|
||||
|
||||
/**
|
||||
* Read datetime from packed Long which contains all parts of a datetime namely, year, month,
|
||||
* day and hour, min and sec, millisec. The original representation does not indicate any
|
||||
* timezone information In Timestamp type, it should be interpreted as UTC while in DateType it
|
||||
* is interpreted as local timezone
|
||||
*
|
||||
* @param packed long value that packs date / time parts
|
||||
* @param tz timezone to interpret datetime parts
|
||||
* @return decoded DateTime using provided timezone
|
||||
*/
|
||||
static DateTime fromPackedLong(long packed, DateTimeZone tz) {
|
||||
// TODO: As for JDBC behavior, it can be configured to "round" or "toNull"
|
||||
// for now we didn't pass in session so we do a toNull behavior
|
||||
if (packed == 0) {
|
||||
return null;
|
||||
}
|
||||
long ymdhms = packed >> 24;
|
||||
long ymd = ymdhms >> 17;
|
||||
int day = (int) (ymd & ((1 << 5) - 1));
|
||||
long ym = ymd >> 5;
|
||||
int month = (int) (ym % 13);
|
||||
int year = (int) (ym / 13);
|
||||
|
||||
int hms = (int) (ymdhms & ((1 << 17) - 1));
|
||||
int second = hms & ((1 << 6) - 1);
|
||||
int minute = (hms >> 6) & ((1 << 6) - 1);
|
||||
int hour = hms >> 12;
|
||||
int microsec = (int) (packed % (1 << 24));
|
||||
|
||||
try {
|
||||
return new DateTime(year, month, day, hour, minute, second, microsec / 1000, tz);
|
||||
} catch (IllegalInstantException e) {
|
||||
LocalDateTime localDateTime =
|
||||
new LocalDateTime(year, month, day, hour, minute, second, microsec / 1000);
|
||||
DateTime dt = localDateTime.toLocalDate().toDateTimeAtStartOfDay(tz);
|
||||
long millis = dt.getMillis() + localDateTime.toLocalTime().getMillisOfDay();
|
||||
return new DateTime(millis, tz);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Encode DateTime as packed long converting into specified timezone All timezone conversion
|
||||
* should be done beforehand
|
||||
*
|
||||
* @param cdo encoding output
|
||||
* @param dateTime value to encode
|
||||
* @param tz timezone used to converting local time
|
||||
*/
|
||||
public static void writeDateTimeFully(CodecDataOutput cdo, DateTime dateTime, DateTimeZone tz) {
|
||||
long val = DateTimeCodec.toPackedLong(dateTime, tz);
|
||||
IntegerCodec.writeULongFully(cdo, val, true);
|
||||
}
|
||||
|
||||
/**
|
||||
* Encode DateTime as packed long converting into specified timezone All timezone conversion
|
||||
* should be done beforehand The encoded value has no data type flag
|
||||
*
|
||||
* @param cdo encoding output
|
||||
* @param dateTime value to encode
|
||||
* @param tz timezone used to converting local time
|
||||
*/
|
||||
public static void writeDateTimeProto(CodecDataOutput cdo, DateTime dateTime, DateTimeZone tz) {
|
||||
long val = DateTimeCodec.toPackedLong(dateTime, tz);
|
||||
IntegerCodec.writeULong(cdo, val);
|
||||
}
|
||||
|
||||
/**
|
||||
* Read datetime from packed Long encoded as unsigned var-len integer converting into specified
|
||||
* timezone
|
||||
*
|
||||
* @see DateTimeCodec#fromPackedLong(long, DateTimeZone)
|
||||
* @param cdi codec buffer input
|
||||
* @param tz timezone to interpret datetime parts
|
||||
* @return decoded DateTime using provided timezone
|
||||
*/
|
||||
public static DateTime readFromUVarInt(CodecDataInput cdi, DateTimeZone tz) {
|
||||
return DateTimeCodec.fromPackedLong(IntegerCodec.readUVarLong(cdi), tz);
|
||||
}
|
||||
|
||||
/**
|
||||
* Read datetime from packed Long as unsigned fixed-len integer
|
||||
*
|
||||
* @see DateTimeCodec#fromPackedLong(long, DateTimeZone)
|
||||
* @param cdi codec buffer input
|
||||
* @param tz timezone to interpret datetime parts
|
||||
* @return decoded DateTime using provided timezone
|
||||
*/
|
||||
public static DateTime readFromUInt(CodecDataInput cdi, DateTimeZone tz) {
|
||||
return DateTimeCodec.fromPackedLong(IntegerCodec.readULong(cdi), tz);
|
||||
}
|
||||
}
|
||||
|
||||
public static class DateCodec {
|
||||
|
||||
/**
|
||||
* Encode a UTC Date to a packed long converting to specific timezone
|
||||
*
|
||||
* @param date date that need to be encoded.
|
||||
* @param tz timezone used for converting to localDate
|
||||
* @return a packed long.
|
||||
*/
|
||||
static long toPackedLong(Date date, DateTimeZone tz) {
|
||||
return toPackedLong(date.getTime(), tz);
|
||||
}
|
||||
|
||||
static long toPackedLong(long utcMillsTs, DateTimeZone tz) {
|
||||
LocalDate date = new LocalDate(utcMillsTs, tz);
|
||||
return toPackedLong(date);
|
||||
}
|
||||
|
||||
static long toPackedLong(LocalDate date) {
|
||||
return Codec.DateCodec.toPackedLong(
|
||||
date.getYear(), date.getMonthOfYear(), date.getDayOfMonth());
|
||||
}
|
||||
|
||||
/**
|
||||
* Encode a date part to a packed long.
|
||||
*
|
||||
* @return a packed long.
|
||||
*/
|
||||
static long toPackedLong(int year, int month, int day) {
|
||||
long ymd = (year * 13 + month) << 5 | day;
|
||||
return ymd << 41;
|
||||
}
|
||||
|
||||
static LocalDate fromPackedLong(long packed) {
|
||||
// TODO: As for JDBC behavior, it can be configured to "round" or "toNull"
|
||||
// for now we didn't pass in session so we do a toNull behavior
|
||||
if (packed == 0) {
|
||||
return null;
|
||||
}
|
||||
long ymd = packed >> 41;
|
||||
int day = (int) (ymd & ((1 << 5) - 1));
|
||||
long ym = ymd >> 5;
|
||||
int month = (int) (ym % 13);
|
||||
int year = (int) (ym / 13);
|
||||
|
||||
return new LocalDate(year, month, day, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Encode Date as packed long converting into specified timezone All timezone conversion should
|
||||
* be done beforehand
|
||||
*
|
||||
* @param cdo encoding output
|
||||
* @param date value to encode
|
||||
* @param tz timezone used to converting local time
|
||||
*/
|
||||
public static void writeDateFully(CodecDataOutput cdo, Date date, DateTimeZone tz) {
|
||||
long val = DateCodec.toPackedLong(date, tz);
|
||||
IntegerCodec.writeULongFully(cdo, val, true);
|
||||
}
|
||||
|
||||
/**
|
||||
* Encode Date as packed long converting into specified timezone All timezone conversion should
|
||||
* be done beforehand The encoded value has no data type flag
|
||||
*
|
||||
* @param cdo encoding output
|
||||
* @param date value to encode
|
||||
* @param tz timezone used to converting local time
|
||||
*/
|
||||
public static void writeDateProto(CodecDataOutput cdo, Date date, DateTimeZone tz) {
|
||||
long val = DateCodec.toPackedLong(date, tz);
|
||||
IntegerCodec.writeULong(cdo, val);
|
||||
}
|
||||
|
||||
/**
|
||||
* Read date from packed Long encoded as unsigned var-len integer converting into specified
|
||||
* timezone
|
||||
*
|
||||
* @see DateCodec#fromPackedLong(long)
|
||||
* @param cdi codec buffer input
|
||||
* @return decoded DateTime using provided timezone
|
||||
*/
|
||||
public static LocalDate readFromUVarInt(CodecDataInput cdi) {
|
||||
return DateCodec.fromPackedLong(IntegerCodec.readUVarLong(cdi));
|
||||
}
|
||||
|
||||
/**
|
||||
* Read date from packed Long as unsigned fixed-len integer
|
||||
*
|
||||
* @see DateCodec#fromPackedLong(long)
|
||||
* @param cdi codec buffer input
|
||||
* @return decoded DateTime using provided timezone
|
||||
*/
|
||||
public static LocalDate readFromUInt(CodecDataInput cdi) {
|
||||
return DateCodec.fromPackedLong(IntegerCodec.readULong(cdi));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,101 +0,0 @@
|
|||
/*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.tikv.event;
|
||||
|
||||
import java.io.Serializable;
|
||||
|
||||
public class CacheInvalidateEvent implements Serializable {
|
||||
public enum CacheType implements Serializable {
|
||||
REGION_STORE,
|
||||
REQ_FAILED,
|
||||
LEADER
|
||||
}
|
||||
|
||||
private long regionId;
|
||||
private long storeId;
|
||||
private boolean invalidateRegion;
|
||||
private boolean invalidateStore;
|
||||
private CacheType cacheType;
|
||||
|
||||
public CacheInvalidateEvent(
|
||||
long regionId, long storeId, boolean updateRegion, boolean updateStore, CacheType type) {
|
||||
this.regionId = regionId;
|
||||
this.storeId = storeId;
|
||||
this.cacheType = type;
|
||||
if (updateRegion) {
|
||||
invalidateRegion();
|
||||
}
|
||||
|
||||
if (updateStore) {
|
||||
invalidateStore();
|
||||
}
|
||||
}
|
||||
|
||||
public long getRegionId() {
|
||||
return regionId;
|
||||
}
|
||||
|
||||
public long getStoreId() {
|
||||
return storeId;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (obj == this) {
|
||||
return true;
|
||||
} else if (obj instanceof CacheInvalidateEvent) {
|
||||
CacheInvalidateEvent event = (CacheInvalidateEvent) obj;
|
||||
return event.getRegionId() == getRegionId()
|
||||
&& event.getStoreId() == getStoreId()
|
||||
&& event.getCacheType() == getCacheType();
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
int result = 1106;
|
||||
result += result * 31 + getStoreId();
|
||||
result += result * 31 + getRegionId();
|
||||
result += result * 31 + getCacheType().name().hashCode();
|
||||
return result;
|
||||
}
|
||||
|
||||
public void invalidateRegion() {
|
||||
invalidateRegion = true;
|
||||
}
|
||||
|
||||
public void invalidateStore() {
|
||||
invalidateStore = true;
|
||||
}
|
||||
|
||||
public boolean shouldUpdateRegion() {
|
||||
return invalidateRegion;
|
||||
}
|
||||
|
||||
public boolean shouldUpdateStore() {
|
||||
return invalidateStore;
|
||||
}
|
||||
|
||||
public CacheType getCacheType() {
|
||||
return cacheType;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return String.format("RegionId=%d,StoreId=%d,Type=%s", regionId, storeId, cacheType.name());
|
||||
}
|
||||
}
|
||||
|
|
@ -1,26 +0,0 @@
|
|||
/*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.tikv.exception;
|
||||
|
||||
public class CastingException extends RuntimeException {
|
||||
public CastingException(Exception e) {
|
||||
super(e);
|
||||
}
|
||||
|
||||
public CastingException(String msg) {
|
||||
super(msg);
|
||||
}
|
||||
}
|
||||
|
|
@ -1,22 +0,0 @@
|
|||
/*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.tikv.exception;
|
||||
|
||||
public class DAGRequestException extends RuntimeException {
|
||||
public DAGRequestException(String msg) {
|
||||
super(msg);
|
||||
}
|
||||
}
|
||||
|
|
@ -1,22 +0,0 @@
|
|||
/*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.tikv.exception;
|
||||
|
||||
public class IgnoreUnsupportedTypeException extends RuntimeException {
|
||||
public IgnoreUnsupportedTypeException(String msg) {
|
||||
super(msg);
|
||||
}
|
||||
}
|
||||
|
|
@ -1,22 +0,0 @@
|
|||
/*
|
||||
* Copyright 2018 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.tikv.exception;
|
||||
|
||||
public class RegionTaskException extends RuntimeException {
|
||||
public RegionTaskException(String msg, Throwable throwable) {
|
||||
super(msg, throwable);
|
||||
}
|
||||
}
|
||||
|
|
@ -1,37 +0,0 @@
|
|||
/*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.tikv.exception;
|
||||
|
||||
import com.pingcap.tidb.tipb.Error;
|
||||
|
||||
public class SelectException extends RuntimeException {
|
||||
private final Error err;
|
||||
|
||||
public SelectException(Error err, String msg) {
|
||||
super(msg);
|
||||
this.err = err;
|
||||
}
|
||||
|
||||
// TODO: improve this
|
||||
public SelectException(String msg) {
|
||||
super(msg);
|
||||
this.err = null;
|
||||
}
|
||||
|
||||
public Error getError() {
|
||||
return err;
|
||||
}
|
||||
}
|
||||
|
|
@ -1,26 +0,0 @@
|
|||
/*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.tikv.exception;
|
||||
|
||||
public class TiExpressionException extends RuntimeException {
|
||||
public TiExpressionException(String msg) {
|
||||
super(msg);
|
||||
}
|
||||
|
||||
public TiExpressionException(String msg, Throwable t) {
|
||||
super(msg, t);
|
||||
}
|
||||
}
|
||||
|
|
@ -1,26 +0,0 @@
|
|||
/*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.tikv.exception;
|
||||
|
||||
public class TypeException extends RuntimeException {
|
||||
public TypeException(String msg) {
|
||||
super(msg);
|
||||
}
|
||||
|
||||
public TypeException(String msg, Throwable t) {
|
||||
super(msg, t);
|
||||
}
|
||||
}
|
||||
|
|
@ -1,22 +0,0 @@
|
|||
/*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.tikv.exception;
|
||||
|
||||
public class UnsupportedTypeException extends RuntimeException {
|
||||
public UnsupportedTypeException(String msg) {
|
||||
super(msg);
|
||||
}
|
||||
}
|
||||
|
|
@ -1,87 +0,0 @@
|
|||
/*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.tikv.expression;
|
||||
|
||||
import static java.util.Objects.requireNonNull;
|
||||
|
||||
import com.google.common.base.Joiner;
|
||||
import com.google.common.collect.ImmutableList;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
|
||||
public class AggregateFunction implements Expression {
|
||||
public enum FunctionType {
|
||||
Sum,
|
||||
Count,
|
||||
Min,
|
||||
Max,
|
||||
First
|
||||
}
|
||||
|
||||
private final FunctionType type;
|
||||
private final Expression argument;
|
||||
|
||||
public static AggregateFunction newCall(FunctionType type, Expression argument) {
|
||||
return new AggregateFunction(type, argument);
|
||||
}
|
||||
|
||||
private AggregateFunction(FunctionType type, Expression argument) {
|
||||
this.type = requireNonNull(type, "function type is null");
|
||||
this.argument = requireNonNull(argument, "function argument is null");
|
||||
}
|
||||
|
||||
public FunctionType getType() {
|
||||
return type;
|
||||
}
|
||||
|
||||
public Expression getArgument() {
|
||||
return argument;
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<Expression> getChildren() {
|
||||
return ImmutableList.of(argument);
|
||||
}
|
||||
|
||||
@Override
|
||||
public <R, C> R accept(Visitor<R, C> visitor, C context) {
|
||||
return visitor.visit(this, context);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object other) {
|
||||
if (this == other) {
|
||||
return true;
|
||||
}
|
||||
if (!(other instanceof AggregateFunction)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
AggregateFunction that = (AggregateFunction) other;
|
||||
return type == that.type && Objects.equals(argument, that.argument);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(type, argument);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return String.format(
|
||||
"%s(%s)", getType(), Joiner.on(",").useForNull("NULL").join(getChildren()));
|
||||
}
|
||||
}
|
||||
|
|
@ -1,120 +0,0 @@
|
|||
/*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.tikv.expression;
|
||||
|
||||
import static java.util.Objects.requireNonNull;
|
||||
import static org.tikv.expression.ArithmeticBinaryExpression.Type.*;
|
||||
|
||||
import com.google.common.collect.ImmutableList;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
|
||||
public class ArithmeticBinaryExpression implements Expression {
|
||||
public enum Type {
|
||||
PLUS,
|
||||
MINUS,
|
||||
MULTIPLY,
|
||||
DIVIDE,
|
||||
BIT_AND,
|
||||
BIT_OR,
|
||||
BIT_XOR
|
||||
}
|
||||
|
||||
public static ArithmeticBinaryExpression plus(Expression left, Expression right) {
|
||||
return new ArithmeticBinaryExpression(PLUS, left, right);
|
||||
}
|
||||
|
||||
public static ArithmeticBinaryExpression minus(Expression left, Expression right) {
|
||||
return new ArithmeticBinaryExpression(MINUS, left, right);
|
||||
}
|
||||
|
||||
public static ArithmeticBinaryExpression multiply(Expression left, Expression right) {
|
||||
return new ArithmeticBinaryExpression(MULTIPLY, left, right);
|
||||
}
|
||||
|
||||
public static ArithmeticBinaryExpression divide(Expression left, Expression right) {
|
||||
return new ArithmeticBinaryExpression(DIVIDE, left, right);
|
||||
}
|
||||
|
||||
public static ArithmeticBinaryExpression bitAnd(Expression left, Expression right) {
|
||||
return new ArithmeticBinaryExpression(BIT_AND, left, right);
|
||||
}
|
||||
|
||||
public static ArithmeticBinaryExpression bitOr(Expression left, Expression right) {
|
||||
return new ArithmeticBinaryExpression(BIT_OR, left, right);
|
||||
}
|
||||
|
||||
public static ArithmeticBinaryExpression bitXor(Expression left, Expression right) {
|
||||
return new ArithmeticBinaryExpression(BIT_XOR, left, right);
|
||||
}
|
||||
|
||||
private final Expression left;
|
||||
private final Expression right;
|
||||
private final Type compType;
|
||||
|
||||
public ArithmeticBinaryExpression(Type type, Expression left, Expression right) {
|
||||
this.left = requireNonNull(left, "left expression is null");
|
||||
this.right = requireNonNull(right, "right expression is null");
|
||||
this.compType = requireNonNull(type, "type is null");
|
||||
}
|
||||
|
||||
public Expression getLeft() {
|
||||
return left;
|
||||
}
|
||||
|
||||
public Expression getRight() {
|
||||
return right;
|
||||
}
|
||||
|
||||
public Type getCompType() {
|
||||
return compType;
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<Expression> getChildren() {
|
||||
return ImmutableList.of(left, right);
|
||||
}
|
||||
|
||||
@Override
|
||||
public <R, C> R accept(Visitor<R, C> visitor, C context) {
|
||||
return visitor.visit(this, context);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object other) {
|
||||
if (this == other) {
|
||||
return true;
|
||||
}
|
||||
if (!(other instanceof ArithmeticBinaryExpression)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
ArithmeticBinaryExpression that = (ArithmeticBinaryExpression) other;
|
||||
return (compType == that.compType)
|
||||
&& Objects.equals(left, that.left)
|
||||
&& Objects.equals(right, that.right);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(compType, left, right);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return String.format("[%s %s %s]", getLeft(), getCompType(), getRight());
|
||||
}
|
||||
}
|
||||
|
|
@ -1,53 +0,0 @@
|
|||
/*
|
||||
*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*
|
||||
*/
|
||||
|
||||
package org.tikv.expression;
|
||||
|
||||
import static java.util.Objects.requireNonNull;
|
||||
|
||||
import java.util.HashSet;
|
||||
import java.util.Set;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
public class Blacklist {
|
||||
private final Set<String> unsupported = new HashSet<>();
|
||||
|
||||
Blacklist(String string) {
|
||||
if (string != null) {
|
||||
String[] some = string.split(",");
|
||||
for (String one : some) {
|
||||
String trimmedExprName = one.trim();
|
||||
if (!trimmedExprName.isEmpty()) {
|
||||
unsupported.add(one.trim());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
boolean isUnsupported(String name) {
|
||||
return unsupported.contains(name);
|
||||
}
|
||||
|
||||
boolean isUnsupported(Class<?> cls) {
|
||||
return isUnsupported(requireNonNull(cls).getSimpleName());
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return unsupported.stream().collect(Collectors.joining(","));
|
||||
}
|
||||
}
|
||||
|
|
@ -1,55 +0,0 @@
|
|||
/*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.tikv.expression;
|
||||
|
||||
import static com.google.common.base.Preconditions.checkNotNull;
|
||||
|
||||
import java.io.Serializable;
|
||||
import org.tikv.expression.visitor.ProtoConverter;
|
||||
|
||||
public class ByItem implements Serializable {
|
||||
private Expression expr;
|
||||
private boolean desc;
|
||||
|
||||
public static ByItem create(Expression expr, boolean desc) {
|
||||
return new ByItem(expr, desc);
|
||||
}
|
||||
|
||||
private ByItem(Expression expr, boolean desc) {
|
||||
checkNotNull(expr, "Expr cannot be null for ByItem");
|
||||
|
||||
this.expr = expr;
|
||||
this.desc = desc;
|
||||
}
|
||||
|
||||
public com.pingcap.tidb.tipb.ByItem toProto(Object context) {
|
||||
com.pingcap.tidb.tipb.ByItem.Builder builder = com.pingcap.tidb.tipb.ByItem.newBuilder();
|
||||
return builder.setExpr(ProtoConverter.toProto(expr, context)).setDesc(desc).build();
|
||||
}
|
||||
|
||||
public Expression getExpr() {
|
||||
return expr;
|
||||
}
|
||||
|
||||
public boolean isDesc() {
|
||||
return desc;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return String.format("[%s %s]", expr.toString(), desc ? "DESC" : "ASC");
|
||||
}
|
||||
}
|
||||
|
|
@ -1,143 +0,0 @@
|
|||
/*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.tikv.expression;
|
||||
|
||||
import com.google.common.collect.ImmutableList;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
import org.tikv.exception.TiClientInternalException;
|
||||
import org.tikv.exception.TiExpressionException;
|
||||
import org.tikv.meta.TiColumnInfo;
|
||||
import org.tikv.meta.TiTableInfo;
|
||||
import org.tikv.types.DataType;
|
||||
|
||||
public class ColumnRef implements Expression {
|
||||
public static ColumnRef create(String name, TiTableInfo table) {
|
||||
for (TiColumnInfo columnInfo : table.getColumns()) {
|
||||
if (columnInfo.matchName(name)) {
|
||||
return new ColumnRef(columnInfo.getName(), columnInfo, table);
|
||||
}
|
||||
}
|
||||
throw new TiExpressionException(
|
||||
String.format("Column name %s not found in table %s", name, table));
|
||||
}
|
||||
|
||||
public static ColumnRef create(String name) {
|
||||
return new ColumnRef(name);
|
||||
}
|
||||
|
||||
private final String name;
|
||||
|
||||
private TiColumnInfo columnInfo;
|
||||
private TiTableInfo tableInfo;
|
||||
|
||||
public ColumnRef(String name) {
|
||||
this.name = name;
|
||||
}
|
||||
|
||||
public ColumnRef(String name, TiColumnInfo columnInfo, TiTableInfo tableInfo) {
|
||||
this.name = name;
|
||||
this.columnInfo = columnInfo;
|
||||
this.tableInfo = tableInfo;
|
||||
}
|
||||
|
||||
public String getName() {
|
||||
return name;
|
||||
}
|
||||
|
||||
public void resolve(TiTableInfo table) {
|
||||
TiColumnInfo columnInfo = null;
|
||||
for (TiColumnInfo col : table.getColumns()) {
|
||||
if (col.matchName(name)) {
|
||||
columnInfo = col;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (columnInfo == null) {
|
||||
throw new TiExpressionException(
|
||||
String.format("No Matching column %s from table %s", name, table.getName()));
|
||||
}
|
||||
|
||||
if (columnInfo.getId() == 0) {
|
||||
throw new TiExpressionException("Zero Id is not a referable column id");
|
||||
}
|
||||
|
||||
this.tableInfo = table;
|
||||
this.columnInfo = columnInfo;
|
||||
}
|
||||
|
||||
public TiColumnInfo getColumnInfo() {
|
||||
if (columnInfo == null) {
|
||||
throw new TiClientInternalException(String.format("ColumnRef [%s] is unbound", name));
|
||||
}
|
||||
return columnInfo;
|
||||
}
|
||||
|
||||
public DataType getType() {
|
||||
return getColumnInfo().getType();
|
||||
}
|
||||
|
||||
public TiTableInfo getTableInfo() {
|
||||
return tableInfo;
|
||||
}
|
||||
|
||||
public boolean isResolved() {
|
||||
return tableInfo != null && columnInfo != null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object another) {
|
||||
if (this == another) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (another instanceof ColumnRef) {
|
||||
ColumnRef that = (ColumnRef) another;
|
||||
if (isResolved() && that.isResolved()) {
|
||||
return Objects.equals(columnInfo, that.columnInfo)
|
||||
&& Objects.equals(tableInfo, that.tableInfo);
|
||||
} else {
|
||||
return name.equalsIgnoreCase(that.name);
|
||||
}
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
if (isResolved()) {
|
||||
return Objects.hash(tableInfo, columnInfo);
|
||||
} else {
|
||||
return Objects.hashCode(name);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return String.format("[%s]", getName());
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<Expression> getChildren() {
|
||||
return ImmutableList.of();
|
||||
}
|
||||
|
||||
@Override
|
||||
public <R, C> R accept(Visitor<R, C> visitor, C context) {
|
||||
return visitor.visit(this, context);
|
||||
}
|
||||
}
|
||||
|
|
@ -1,199 +0,0 @@
|
|||
/*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.tikv.expression;
|
||||
|
||||
import static com.google.common.base.Preconditions.checkArgument;
|
||||
import static java.util.Objects.requireNonNull;
|
||||
import static org.tikv.expression.ComparisonBinaryExpression.Type.*;
|
||||
|
||||
import com.google.common.collect.ImmutableList;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
import java.util.Optional;
|
||||
import org.tikv.exception.TiExpressionException;
|
||||
import org.tikv.key.TypedKey;
|
||||
import org.tikv.types.DataType;
|
||||
|
||||
public class ComparisonBinaryExpression implements Expression {
|
||||
public enum Type {
|
||||
EQUAL,
|
||||
NOT_EQUAL,
|
||||
LESS_THAN,
|
||||
LESS_EQUAL,
|
||||
GREATER_THAN,
|
||||
GREATER_EQUAL
|
||||
}
|
||||
|
||||
public static ComparisonBinaryExpression equal(Expression left, Expression right) {
|
||||
return new ComparisonBinaryExpression(EQUAL, left, right);
|
||||
}
|
||||
|
||||
public static ComparisonBinaryExpression notEqual(Expression left, Expression right) {
|
||||
return new ComparisonBinaryExpression(NOT_EQUAL, left, right);
|
||||
}
|
||||
|
||||
public static ComparisonBinaryExpression lessThan(Expression left, Expression right) {
|
||||
return new ComparisonBinaryExpression(LESS_THAN, left, right);
|
||||
}
|
||||
|
||||
public static ComparisonBinaryExpression lessEqual(Expression left, Expression right) {
|
||||
return new ComparisonBinaryExpression(LESS_EQUAL, left, right);
|
||||
}
|
||||
|
||||
public static ComparisonBinaryExpression greaterThan(Expression left, Expression right) {
|
||||
return new ComparisonBinaryExpression(GREATER_THAN, left, right);
|
||||
}
|
||||
|
||||
public static ComparisonBinaryExpression greaterEqual(Expression left, Expression right) {
|
||||
return new ComparisonBinaryExpression(GREATER_EQUAL, left, right);
|
||||
}
|
||||
|
||||
public static class NormalizedPredicate {
|
||||
private final ComparisonBinaryExpression pred;
|
||||
private TypedKey key;
|
||||
|
||||
NormalizedPredicate(ComparisonBinaryExpression pred) {
|
||||
checkArgument(pred.getLeft() instanceof ColumnRef);
|
||||
checkArgument(pred.getRight() instanceof Constant);
|
||||
this.pred = pred;
|
||||
}
|
||||
|
||||
public ColumnRef getColumnRef() {
|
||||
return (ColumnRef) pred.getLeft();
|
||||
}
|
||||
|
||||
public Constant getValue() {
|
||||
return (Constant) pred.getRight();
|
||||
}
|
||||
|
||||
public Type getType() {
|
||||
return pred.getComparisonType();
|
||||
}
|
||||
|
||||
public TypedKey getTypedLiteral() {
|
||||
return getTypedLiteral(DataType.UNSPECIFIED_LEN);
|
||||
}
|
||||
|
||||
public TypedKey getTypedLiteral(int prefixLength) {
|
||||
if (key == null) {
|
||||
key = TypedKey.toTypedKey(getValue().getValue(), getColumnRef().getType(), prefixLength);
|
||||
}
|
||||
return key;
|
||||
}
|
||||
}
|
||||
|
||||
private final Expression left;
|
||||
private final Expression right;
|
||||
private final Type compType;
|
||||
private transient Optional<NormalizedPredicate> normalizedPredicate;
|
||||
|
||||
public ComparisonBinaryExpression(Type type, Expression left, Expression right) {
|
||||
this.left = requireNonNull(left, "left expression is null");
|
||||
this.right = requireNonNull(right, "right expression is null");
|
||||
this.compType = requireNonNull(type, "type is null");
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<Expression> getChildren() {
|
||||
return ImmutableList.of(left, right);
|
||||
}
|
||||
|
||||
@Override
|
||||
public <R, C> R accept(Visitor<R, C> visitor, C context) {
|
||||
return visitor.visit(this, context);
|
||||
}
|
||||
|
||||
public Expression getLeft() {
|
||||
return left;
|
||||
}
|
||||
|
||||
public Expression getRight() {
|
||||
return right;
|
||||
}
|
||||
|
||||
public Type getComparisonType() {
|
||||
return compType;
|
||||
}
|
||||
|
||||
public NormalizedPredicate normalize() {
|
||||
if (normalizedPredicate != null) {
|
||||
return normalizedPredicate.orElseGet(null);
|
||||
}
|
||||
if (getLeft() instanceof Constant && getRight() instanceof ColumnRef) {
|
||||
Constant left = (Constant) getLeft();
|
||||
ColumnRef right = (ColumnRef) getRight();
|
||||
Type newType;
|
||||
switch (getComparisonType()) {
|
||||
case EQUAL:
|
||||
newType = EQUAL;
|
||||
break;
|
||||
case LESS_EQUAL:
|
||||
newType = GREATER_EQUAL;
|
||||
break;
|
||||
case LESS_THAN:
|
||||
newType = GREATER_THAN;
|
||||
break;
|
||||
case GREATER_EQUAL:
|
||||
newType = LESS_EQUAL;
|
||||
break;
|
||||
case GREATER_THAN:
|
||||
newType = LESS_THAN;
|
||||
break;
|
||||
case NOT_EQUAL:
|
||||
newType = NOT_EQUAL;
|
||||
break;
|
||||
default:
|
||||
throw new TiExpressionException(
|
||||
String.format(
|
||||
"PredicateNormalizer is not able to process type %s", getComparisonType()));
|
||||
}
|
||||
ComparisonBinaryExpression newExpression =
|
||||
new ComparisonBinaryExpression(newType, right, left);
|
||||
normalizedPredicate = Optional.of(new NormalizedPredicate(newExpression));
|
||||
return normalizedPredicate.get();
|
||||
} else if (getRight() instanceof Constant && getLeft() instanceof ColumnRef) {
|
||||
normalizedPredicate = Optional.of(new NormalizedPredicate(this));
|
||||
return normalizedPredicate.get();
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return String.format("[%s %s %s]", getLeft(), getComparisonType(), getRight());
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object other) {
|
||||
if (this == other) {
|
||||
return true;
|
||||
}
|
||||
if (!(other instanceof ComparisonBinaryExpression)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
ComparisonBinaryExpression that = (ComparisonBinaryExpression) other;
|
||||
return (compType == that.compType)
|
||||
&& Objects.equals(left, that.left)
|
||||
&& Objects.equals(right, that.right);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(compType, left, right);
|
||||
}
|
||||
}
|
||||
|
|
@ -1,127 +0,0 @@
|
|||
/*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.tikv.expression;
|
||||
|
||||
import com.google.common.collect.ImmutableList;
|
||||
import java.math.BigDecimal;
|
||||
import java.sql.Date;
|
||||
import java.sql.Timestamp;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
import org.joda.time.DateTime;
|
||||
import org.tikv.exception.TiExpressionException;
|
||||
import org.tikv.types.*;
|
||||
|
||||
// Refactor needed.
|
||||
// Refer to https://github.com/pingcap/tipb/blob/master/go-tipb/expression.pb.go
|
||||
// TODO: This might need a refactor to accept an DataType?
|
||||
public class Constant implements Expression {
|
||||
private final Object value;
|
||||
private DataType type;
|
||||
|
||||
public static Constant create(Object value, DataType type) {
|
||||
return new Constant(value, type);
|
||||
}
|
||||
|
||||
public static Constant create(Object value) {
|
||||
return new Constant(value, null);
|
||||
}
|
||||
|
||||
public Constant(Object value, DataType type) {
|
||||
this.value = value;
|
||||
this.type = (type == null && value != null) ? getDefaultType(value) : type;
|
||||
}
|
||||
|
||||
protected static boolean isIntegerType(Object value) {
|
||||
return value instanceof Long
|
||||
|| value instanceof Integer
|
||||
|| value instanceof Short
|
||||
|| value instanceof Byte;
|
||||
}
|
||||
|
||||
private static DataType getDefaultType(Object value) {
|
||||
if (value == null) {
|
||||
throw new TiExpressionException("NULL constant has no type");
|
||||
} else if (isIntegerType(value)) {
|
||||
return IntegerType.BIGINT;
|
||||
} else if (value instanceof String) {
|
||||
return StringType.VARCHAR;
|
||||
} else if (value instanceof Float) {
|
||||
return RealType.FLOAT;
|
||||
} else if (value instanceof Double) {
|
||||
return RealType.DOUBLE;
|
||||
} else if (value instanceof BigDecimal) {
|
||||
return DecimalType.DECIMAL;
|
||||
} else if (value instanceof DateTime) {
|
||||
return DateTimeType.DATETIME;
|
||||
} else if (value instanceof Date) {
|
||||
return DateType.DATE;
|
||||
} else if (value instanceof Timestamp) {
|
||||
return TimestampType.TIMESTAMP;
|
||||
} else if (value instanceof byte[]) {
|
||||
return BytesType.TEXT;
|
||||
} else {
|
||||
throw new TiExpressionException(
|
||||
"Constant type not supported:" + value.getClass().getSimpleName());
|
||||
}
|
||||
}
|
||||
|
||||
public void setType(DataType type) {
|
||||
this.type = type;
|
||||
}
|
||||
|
||||
public Object getValue() {
|
||||
return value;
|
||||
}
|
||||
|
||||
public DataType getType() {
|
||||
return type;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
if (value == null) {
|
||||
return "null";
|
||||
}
|
||||
if (value instanceof String) {
|
||||
return String.format("\"%s\"", value);
|
||||
}
|
||||
return value.toString();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object other) {
|
||||
if (other instanceof Constant) {
|
||||
return Objects.equals(value, ((Constant) other).value);
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hashCode(value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<Expression> getChildren() {
|
||||
return ImmutableList.of();
|
||||
}
|
||||
|
||||
@Override
|
||||
public <R, C> R accept(Visitor<R, C> visitor, C context) {
|
||||
return visitor.visit(this, context);
|
||||
}
|
||||
}
|
||||
|
|
@ -1,25 +0,0 @@
|
|||
/*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.tikv.expression;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.util.List;
|
||||
|
||||
public interface Expression extends Serializable {
|
||||
List<Expression> getChildren();
|
||||
|
||||
<R, C> R accept(Visitor<R, C> visitor, C context);
|
||||
}
|
||||
|
|
@ -1,27 +0,0 @@
|
|||
/*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.tikv.expression;
|
||||
|
||||
public class ExpressionBlacklist extends Blacklist {
|
||||
|
||||
public ExpressionBlacklist(String exprsString) {
|
||||
super(exprsString);
|
||||
}
|
||||
|
||||
public boolean isUnsupportedPushdownExpr(Class<?> cls) {
|
||||
return isUnsupported(cls);
|
||||
}
|
||||
}
|
||||
|
|
@ -1,67 +0,0 @@
|
|||
/*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.tikv.expression;
|
||||
|
||||
import static java.util.Objects.requireNonNull;
|
||||
|
||||
import com.google.common.collect.ImmutableList;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
|
||||
public class IsNull implements Expression {
|
||||
private Expression expression;
|
||||
|
||||
public IsNull(Expression expression) {
|
||||
this.expression = requireNonNull(expression, "expression is null");
|
||||
}
|
||||
|
||||
public Expression getExpression() {
|
||||
return expression;
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<Expression> getChildren() {
|
||||
return ImmutableList.of(expression);
|
||||
}
|
||||
|
||||
@Override
|
||||
public <R, C> R accept(Visitor<R, C> visitor, C context) {
|
||||
return visitor.visit(this, context);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return String.format("IsNull(%s)", getExpression());
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object other) {
|
||||
if (this == other) {
|
||||
return true;
|
||||
}
|
||||
if (!(other instanceof IsNull)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
IsNull that = (IsNull) other;
|
||||
return Objects.equals(expression, that.expression);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hashCode(expression);
|
||||
}
|
||||
}
|
||||
|
|
@ -1,87 +0,0 @@
|
|||
// Generated by the protocol buffer compiler. DO NOT EDIT!
|
||||
// source: expression.proto
|
||||
|
||||
package org.tikv.expression;
|
||||
|
||||
import static java.util.Objects.requireNonNull;
|
||||
|
||||
import com.google.common.collect.ImmutableList;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
|
||||
public class LogicalBinaryExpression implements Expression {
|
||||
public enum Type {
|
||||
AND,
|
||||
OR,
|
||||
XOR
|
||||
}
|
||||
|
||||
public static LogicalBinaryExpression and(Expression left, Expression right) {
|
||||
return new LogicalBinaryExpression(Type.AND, left, right);
|
||||
}
|
||||
|
||||
public static LogicalBinaryExpression or(Expression left, Expression right) {
|
||||
return new LogicalBinaryExpression(Type.OR, left, right);
|
||||
}
|
||||
|
||||
public static LogicalBinaryExpression xor(Expression left, Expression right) {
|
||||
return new LogicalBinaryExpression(Type.XOR, left, right);
|
||||
}
|
||||
|
||||
public LogicalBinaryExpression(Type type, Expression left, Expression right) {
|
||||
this.left = requireNonNull(left, "left expression is null");
|
||||
this.right = requireNonNull(right, "right expression is null");
|
||||
this.compType = requireNonNull(type, "type is null");
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<Expression> getChildren() {
|
||||
return ImmutableList.of(getLeft(), getRight());
|
||||
}
|
||||
|
||||
@Override
|
||||
public <R, C> R accept(Visitor<R, C> visitor, C context) {
|
||||
return visitor.visit(this, context);
|
||||
}
|
||||
|
||||
public Expression getLeft() {
|
||||
return left;
|
||||
}
|
||||
|
||||
public Expression getRight() {
|
||||
return right;
|
||||
}
|
||||
|
||||
public Type getCompType() {
|
||||
return compType;
|
||||
}
|
||||
|
||||
private final Expression left;
|
||||
private final Expression right;
|
||||
private final Type compType;
|
||||
|
||||
@Override
|
||||
public boolean equals(Object other) {
|
||||
if (this == other) {
|
||||
return true;
|
||||
}
|
||||
if (!(other instanceof LogicalBinaryExpression)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
LogicalBinaryExpression that = (LogicalBinaryExpression) other;
|
||||
return (compType == that.compType)
|
||||
&& Objects.equals(left, that.left)
|
||||
&& Objects.equals(right, that.right);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(compType, left, right);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return String.format("[%s %s %s]", getLeft(), getCompType(), getRight());
|
||||
}
|
||||
}
|
||||
|
|
@ -1,72 +0,0 @@
|
|||
/*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.tikv.expression;
|
||||
|
||||
import static java.util.Objects.requireNonNull;
|
||||
|
||||
import com.google.common.collect.ImmutableList;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
|
||||
public class Not implements Expression {
|
||||
|
||||
public static Not not(Expression expression) {
|
||||
return new Not(expression);
|
||||
}
|
||||
|
||||
private Expression expression;
|
||||
|
||||
public Not(Expression expression) {
|
||||
this.expression = requireNonNull(expression, "expression is null");
|
||||
}
|
||||
|
||||
public Expression getExpression() {
|
||||
return expression;
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<Expression> getChildren() {
|
||||
return ImmutableList.of(expression);
|
||||
}
|
||||
|
||||
@Override
|
||||
public <R, C> R accept(Visitor<R, C> visitor, C context) {
|
||||
return visitor.visit(this, context);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return String.format("Not(%s)", getExpression());
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object other) {
|
||||
if (this == other) {
|
||||
return true;
|
||||
}
|
||||
if (!(other instanceof Not)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
Not that = (Not) other;
|
||||
return Objects.equals(expression, that.expression);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hashCode(expression);
|
||||
}
|
||||
}
|
||||
|
|
@ -1,145 +0,0 @@
|
|||
/*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.tikv.expression;
|
||||
|
||||
import static java.util.Objects.requireNonNull;
|
||||
import static org.tikv.expression.StringRegExpression.Type.*;
|
||||
|
||||
import com.google.common.collect.ImmutableList;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
import org.tikv.key.TypedKey;
|
||||
import org.tikv.types.DataType;
|
||||
import org.tikv.types.IntegerType;
|
||||
|
||||
public class StringRegExpression implements Expression {
|
||||
public enum Type {
|
||||
STARTS_WITH,
|
||||
CONTAINS,
|
||||
ENDS_WITH,
|
||||
LIKE
|
||||
}
|
||||
|
||||
public static StringRegExpression startsWith(Expression left, Expression right) {
|
||||
Expression reg =
|
||||
Constant.create(((Constant) right).getValue() + "%", ((Constant) right).getType());
|
||||
return new StringRegExpression(STARTS_WITH, left, right, reg);
|
||||
}
|
||||
|
||||
public static StringRegExpression contains(Expression left, Expression right) {
|
||||
Expression reg =
|
||||
Constant.create("%" + ((Constant) right).getValue() + "%", ((Constant) right).getType());
|
||||
return new StringRegExpression(CONTAINS, left, right, reg);
|
||||
}
|
||||
|
||||
public static StringRegExpression endsWith(Expression left, Expression right) {
|
||||
Expression reg =
|
||||
Constant.create("%" + ((Constant) right).getValue(), ((Constant) right).getType());
|
||||
return new StringRegExpression(ENDS_WITH, left, right, reg);
|
||||
}
|
||||
|
||||
public static StringRegExpression like(Expression left, Expression right) {
|
||||
return new StringRegExpression(LIKE, left, right, right);
|
||||
}
|
||||
|
||||
private transient TypedKey key;
|
||||
|
||||
public ColumnRef getColumnRef() {
|
||||
return (ColumnRef) getLeft();
|
||||
}
|
||||
|
||||
public Constant getValue() {
|
||||
return (Constant) getRight();
|
||||
}
|
||||
|
||||
public TypedKey getTypedLiteral() {
|
||||
return getTypedLiteral(DataType.UNSPECIFIED_LEN);
|
||||
}
|
||||
|
||||
public TypedKey getTypedLiteral(int prefixLength) {
|
||||
if (key == null) {
|
||||
key = TypedKey.toTypedKey(getValue().getValue(), getColumnRef().getType(), prefixLength);
|
||||
}
|
||||
return key;
|
||||
}
|
||||
|
||||
private final Expression left;
|
||||
private final Expression right;
|
||||
private final Expression reg;
|
||||
private final Type regType;
|
||||
|
||||
public StringRegExpression(Type type, Expression left, Expression right, Expression reg) {
|
||||
this.left = requireNonNull(left, "left expression is null");
|
||||
this.right = requireNonNull(right, "right expression is null");
|
||||
this.regType = requireNonNull(type, "type is null");
|
||||
this.reg = requireNonNull(reg, "reg string is null");
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<Expression> getChildren() {
|
||||
// For LIKE statement, an extra ESCAPE parameter is required as the third parameter for
|
||||
// ScalarFunc.
|
||||
// However in Spark ESCAPE is not supported so we simply set this value to zero.
|
||||
return ImmutableList.of(left, reg, Constant.create(0, IntegerType.BIGINT));
|
||||
}
|
||||
|
||||
@Override
|
||||
public <R, C> R accept(Visitor<R, C> visitor, C context) {
|
||||
return visitor.visit(this, context);
|
||||
}
|
||||
|
||||
public Expression getLeft() {
|
||||
return left;
|
||||
}
|
||||
|
||||
public Expression getRight() {
|
||||
return right;
|
||||
}
|
||||
|
||||
public Type getRegType() {
|
||||
return regType;
|
||||
}
|
||||
|
||||
public Expression getReg() {
|
||||
return reg;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return String.format("[%s %s %s reg: %s]", getLeft(), getRegType(), getRight(), getReg());
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object other) {
|
||||
if (this == other) {
|
||||
return true;
|
||||
}
|
||||
if (!(other instanceof StringRegExpression)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
StringRegExpression that = (StringRegExpression) other;
|
||||
return (regType == that.regType)
|
||||
&& Objects.equals(left, that.left)
|
||||
&& Objects.equals(left, that.right)
|
||||
&& Objects.equals(reg, that.reg);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(regType, left, right, reg);
|
||||
}
|
||||
}
|
||||
|
|
@ -1,67 +0,0 @@
|
|||
/*
|
||||
*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*
|
||||
*/
|
||||
|
||||
package org.tikv.expression;
|
||||
|
||||
import static org.tikv.types.MySQLType.*;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import org.tikv.types.MySQLType;
|
||||
|
||||
public class TypeBlacklist extends Blacklist {
|
||||
private static final Map<MySQLType, String> typeToMySQLMap = initialTypeMap();
|
||||
|
||||
private static HashMap<MySQLType, String> initialTypeMap() {
|
||||
HashMap<MySQLType, String> map = new HashMap<>();
|
||||
map.put(TypeDecimal, "decimal");
|
||||
map.put(TypeTiny, "tinyint");
|
||||
map.put(TypeShort, "smallint");
|
||||
map.put(TypeLong, "int");
|
||||
map.put(TypeFloat, "float");
|
||||
map.put(TypeDouble, "double");
|
||||
map.put(TypeNull, "null");
|
||||
map.put(TypeTimestamp, "timestamp");
|
||||
map.put(TypeLonglong, "bigint");
|
||||
map.put(TypeInt24, "mediumint");
|
||||
map.put(TypeDate, "date");
|
||||
map.put(TypeDuration, "time");
|
||||
map.put(TypeDatetime, "datetime");
|
||||
map.put(TypeYear, "year");
|
||||
map.put(TypeNewDate, "date");
|
||||
map.put(TypeVarchar, "varchar");
|
||||
map.put(TypeJSON, "json");
|
||||
map.put(TypeNewDecimal, "decimal");
|
||||
map.put(TypeEnum, "enum");
|
||||
map.put(TypeSet, "set");
|
||||
map.put(TypeTinyBlob, "tinytext");
|
||||
map.put(TypeMediumBlob, "mediumtext");
|
||||
map.put(TypeLongBlob, "longtext");
|
||||
map.put(TypeBlob, "text");
|
||||
map.put(TypeVarString, "varString");
|
||||
map.put(TypeString, "string");
|
||||
return map;
|
||||
}
|
||||
|
||||
public TypeBlacklist(String typesString) {
|
||||
super(typesString);
|
||||
}
|
||||
|
||||
public boolean isUnsupportedType(MySQLType sqlType) {
|
||||
return isUnsupported(typeToMySQLMap.getOrDefault(sqlType, ""));
|
||||
}
|
||||
}
|
||||
|
|
@ -1,38 +0,0 @@
|
|||
/*
|
||||
*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*
|
||||
*/
|
||||
|
||||
package org.tikv.expression;
|
||||
|
||||
public abstract class Visitor<R, C> {
|
||||
protected abstract R visit(ColumnRef node, C context);
|
||||
|
||||
protected abstract R visit(ComparisonBinaryExpression node, C context);
|
||||
|
||||
protected abstract R visit(StringRegExpression node, C context);
|
||||
|
||||
protected abstract R visit(ArithmeticBinaryExpression node, C context);
|
||||
|
||||
protected abstract R visit(LogicalBinaryExpression node, C context);
|
||||
|
||||
protected abstract R visit(Constant node, C context);
|
||||
|
||||
protected abstract R visit(AggregateFunction node, C context);
|
||||
|
||||
protected abstract R visit(IsNull node, C context);
|
||||
|
||||
protected abstract R visit(Not node, C context);
|
||||
}
|
||||
|
|
@ -1,42 +0,0 @@
|
|||
/*
|
||||
* Copyright 2018 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.tikv.expression.visitor;
|
||||
|
||||
import org.tikv.expression.ColumnRef;
|
||||
import org.tikv.expression.Expression;
|
||||
|
||||
public class ColumnMatcher extends DefaultVisitor<Boolean, Void> {
|
||||
private final ColumnRef columnRef;
|
||||
|
||||
private ColumnMatcher(ColumnRef exp) {
|
||||
this.columnRef = exp;
|
||||
}
|
||||
|
||||
public static Boolean match(ColumnRef col, Expression expression) {
|
||||
ColumnMatcher matcher = new ColumnMatcher(col);
|
||||
return expression.accept(matcher, null);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Boolean process(Expression node, Void context) {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Boolean visit(ColumnRef node, Void context) {
|
||||
return node.getColumnInfo().matchName(columnRef.getName());
|
||||
}
|
||||
}
|
||||
|
|
@ -1,72 +0,0 @@
|
|||
/*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.tikv.expression.visitor;
|
||||
|
||||
import org.tikv.expression.*;
|
||||
|
||||
public class DefaultVisitor<R, C> extends Visitor<R, C> {
|
||||
protected R process(Expression node, C context) {
|
||||
for (Expression expr : node.getChildren()) {
|
||||
expr.accept(this, context);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected R visit(ColumnRef node, C context) {
|
||||
return process(node, context);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected R visit(ComparisonBinaryExpression node, C context) {
|
||||
return process(node, context);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected R visit(StringRegExpression node, C context) {
|
||||
return process(node, context);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected R visit(ArithmeticBinaryExpression node, C context) {
|
||||
return process(node, context);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected R visit(LogicalBinaryExpression node, C context) {
|
||||
return process(node, context);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected R visit(Constant node, C context) {
|
||||
return process(node, context);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected R visit(AggregateFunction node, C context) {
|
||||
return process(node, context);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected R visit(IsNull node, C context) {
|
||||
return process(node, context);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected R visit(Not node, C context) {
|
||||
return process(node, context);
|
||||
}
|
||||
}
|
||||
|
|
@ -1,224 +0,0 @@
|
|||
/*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.tikv.expression.visitor;
|
||||
|
||||
import static java.util.Objects.requireNonNull;
|
||||
|
||||
import java.util.IdentityHashMap;
|
||||
import java.util.List;
|
||||
import org.tikv.exception.TiExpressionException;
|
||||
import org.tikv.expression.*;
|
||||
import org.tikv.expression.AggregateFunction.FunctionType;
|
||||
import org.tikv.types.DataType;
|
||||
import org.tikv.types.DecimalType;
|
||||
import org.tikv.types.IntegerType;
|
||||
import org.tikv.types.RealType;
|
||||
import org.tikv.util.Pair;
|
||||
|
||||
/**
|
||||
* Validate and infer expression type Collected results are returned getTypeMap For now we don't do
|
||||
* any type promotion and only coerce from left to right.
|
||||
*/
|
||||
public class ExpressionTypeCoercer extends Visitor<Pair<DataType, Double>, DataType> {
|
||||
private final IdentityHashMap<Expression, DataType> typeMap = new IdentityHashMap<>();
|
||||
private static final double MAX_CREDIBILITY = 1.0;
|
||||
private static final double MIN_CREDIBILITY = 0.1;
|
||||
private static final double COLUMN_REF_CRED = MAX_CREDIBILITY;
|
||||
private static final double CONSTANT_CRED = MIN_CREDIBILITY;
|
||||
private static final double LOGICAL_OP_CRED = MAX_CREDIBILITY;
|
||||
private static final double COMPARISON_OP_CRED = MAX_CREDIBILITY;
|
||||
private static final double SRING_REG_OP_CRED = MAX_CREDIBILITY;
|
||||
private static final double FUNCTION_CRED = MAX_CREDIBILITY;
|
||||
private static final double ISNULL_CRED = MAX_CREDIBILITY;
|
||||
private static final double NOT_CRED = MAX_CREDIBILITY;
|
||||
|
||||
public IdentityHashMap<Expression, DataType> getTypeMap() {
|
||||
return typeMap;
|
||||
}
|
||||
|
||||
public static DataType inferType(Expression expression) {
|
||||
ExpressionTypeCoercer inf = new ExpressionTypeCoercer();
|
||||
return inf.infer(expression);
|
||||
}
|
||||
|
||||
public DataType infer(Expression expression) {
|
||||
requireNonNull(expression, "expression is null");
|
||||
return expression.accept(this, null).first;
|
||||
}
|
||||
|
||||
public void infer(List<? extends Expression> expressions) {
|
||||
requireNonNull(expressions, "expressions is null");
|
||||
expressions.forEach(expr -> expr.accept(this, null));
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Pair<DataType, Double> visit(ColumnRef node, DataType targetType) {
|
||||
DataType type = node.getType();
|
||||
if (targetType != null && !targetType.equals(type)) {
|
||||
throw new TiExpressionException(String.format("Column %s cannot be %s", node, targetType));
|
||||
}
|
||||
typeMap.put(node, type);
|
||||
return Pair.create(type, COLUMN_REF_CRED);
|
||||
}
|
||||
|
||||
// Try to coerceType if needed
|
||||
// A column reference is source of coerce and constant is the subject to coerce
|
||||
// targetType null means no coerce needed from parent and choose the highest credibility result
|
||||
protected Pair<DataType, Double> coerceType(DataType targetType, Expression... nodes) {
|
||||
if (nodes.length == 0) {
|
||||
throw new TiExpressionException("failed to verify empty node list");
|
||||
}
|
||||
if (targetType == null) {
|
||||
Pair<DataType, Double> baseline = nodes[0].accept(this, null);
|
||||
for (int i = 1; i < nodes.length; i++) {
|
||||
Pair<DataType, Double> current = nodes[i].accept(this, null);
|
||||
if (current.second > baseline.second) {
|
||||
baseline = current;
|
||||
}
|
||||
}
|
||||
for (Expression node : nodes) {
|
||||
node.accept(this, baseline.first);
|
||||
}
|
||||
return baseline;
|
||||
} else {
|
||||
double credibility = -1;
|
||||
for (Expression node : nodes) {
|
||||
Pair<DataType, Double> result = node.accept(this, targetType);
|
||||
if (result.second > credibility) {
|
||||
credibility = result.second;
|
||||
}
|
||||
}
|
||||
return Pair.create(targetType, credibility);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Pair<DataType, Double> visit(ComparisonBinaryExpression node, DataType targetType) {
|
||||
if (targetType != null && !targetType.equals(IntegerType.BOOLEAN)) {
|
||||
throw new TiExpressionException(String.format("Comparison result cannot be %s", targetType));
|
||||
}
|
||||
if (!typeMap.containsKey(node)) {
|
||||
coerceType(null, node.getLeft(), node.getRight());
|
||||
typeMap.put(node, IntegerType.BOOLEAN);
|
||||
}
|
||||
return Pair.create(IntegerType.BOOLEAN, COMPARISON_OP_CRED);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Pair<DataType, Double> visit(StringRegExpression node, DataType targetType) {
|
||||
if (targetType != null && !targetType.equals(IntegerType.BOOLEAN)) {
|
||||
throw new TiExpressionException(String.format("Comparison result cannot be %s", targetType));
|
||||
}
|
||||
if (!typeMap.containsKey(node)) {
|
||||
coerceType(null, node.getLeft(), node.getRight());
|
||||
typeMap.put(node, IntegerType.BOOLEAN);
|
||||
}
|
||||
return Pair.create(IntegerType.BOOLEAN, SRING_REG_OP_CRED);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Pair<DataType, Double> visit(ArithmeticBinaryExpression node, DataType targetType) {
|
||||
Pair<DataType, Double> result = coerceType(targetType, node.getLeft(), node.getRight());
|
||||
typeMap.put(node, result.first);
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Pair<DataType, Double> visit(LogicalBinaryExpression node, DataType targetType) {
|
||||
if (targetType != null && !targetType.equals(IntegerType.BOOLEAN)) {
|
||||
throw new TiExpressionException(String.format("Comparison result cannot be %s", targetType));
|
||||
}
|
||||
if (!typeMap.containsKey(node)) {
|
||||
coerceType(null, node.getLeft(), node.getRight());
|
||||
typeMap.put(node, IntegerType.BOOLEAN);
|
||||
}
|
||||
return Pair.create(IntegerType.BOOLEAN, LOGICAL_OP_CRED);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Pair<DataType, Double> visit(Constant node, DataType targetType) {
|
||||
if (targetType == null) {
|
||||
return Pair.create(node.getType(), CONSTANT_CRED);
|
||||
} else {
|
||||
node.setType(targetType);
|
||||
typeMap.put(node, targetType);
|
||||
return Pair.create(targetType, CONSTANT_CRED);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Pair<DataType, Double> visit(AggregateFunction node, DataType targetType) {
|
||||
FunctionType fType = node.getType();
|
||||
coerceType(null, node.getArgument());
|
||||
switch (fType) {
|
||||
case Count:
|
||||
{
|
||||
if (targetType != null && targetType.equals(IntegerType.BIGINT)) {
|
||||
throw new TiExpressionException(String.format("Count cannot be %s", targetType));
|
||||
}
|
||||
typeMap.put(node, IntegerType.BIGINT);
|
||||
return Pair.create(targetType, FUNCTION_CRED);
|
||||
}
|
||||
case Sum:
|
||||
{
|
||||
if (targetType != null && targetType.equals(DecimalType.DECIMAL)) {
|
||||
throw new TiExpressionException(String.format("Sum cannot be %s", targetType));
|
||||
}
|
||||
DataType colType = node.getArgument().accept(this, null).first;
|
||||
if (colType instanceof RealType) {
|
||||
typeMap.put(node, RealType.DOUBLE);
|
||||
} else {
|
||||
typeMap.put(node, DecimalType.DECIMAL);
|
||||
}
|
||||
return Pair.create(targetType, FUNCTION_CRED);
|
||||
}
|
||||
case First:
|
||||
case Max:
|
||||
case Min:
|
||||
{
|
||||
Pair<DataType, Double> result = coerceType(targetType, node.getArgument());
|
||||
typeMap.put(node, result.first);
|
||||
return result;
|
||||
}
|
||||
default:
|
||||
throw new TiExpressionException(String.format("Unknown function %s", fType));
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Pair<DataType, Double> visit(IsNull node, DataType targetType) {
|
||||
if (targetType != null && !targetType.equals(IntegerType.BOOLEAN)) {
|
||||
throw new TiExpressionException(String.format("IsNull result cannot be %s", targetType));
|
||||
}
|
||||
if (!typeMap.containsKey(node)) {
|
||||
coerceType(null, node.getExpression());
|
||||
typeMap.put(node, IntegerType.BOOLEAN);
|
||||
}
|
||||
return Pair.create(IntegerType.BOOLEAN, ISNULL_CRED);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Pair<DataType, Double> visit(Not node, DataType targetType) {
|
||||
if (targetType != null && !targetType.equals(IntegerType.BOOLEAN)) {
|
||||
throw new TiExpressionException(String.format("Not result cannot be %s", targetType));
|
||||
}
|
||||
if (!typeMap.containsKey(node)) {
|
||||
coerceType(null, node.getExpression());
|
||||
typeMap.put(node, IntegerType.BOOLEAN);
|
||||
}
|
||||
return Pair.create(IntegerType.BOOLEAN, NOT_CRED);
|
||||
}
|
||||
}
|
||||
|
|
@ -1,111 +0,0 @@
|
|||
/*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.tikv.expression.visitor;
|
||||
|
||||
import static java.util.Objects.requireNonNull;
|
||||
|
||||
import org.tikv.expression.*;
|
||||
import org.tikv.expression.ComparisonBinaryExpression.NormalizedPredicate;
|
||||
import org.tikv.meta.TiIndexColumn;
|
||||
|
||||
/**
|
||||
* Test if a predicate matches and index column entirely and can be convert to index related ranges
|
||||
* If a predicate matches only partially, it returns false
|
||||
*/
|
||||
public class IndexMatcher extends DefaultVisitor<Boolean, Void> {
|
||||
private final boolean matchEqualTestOnly;
|
||||
private final TiIndexColumn indexColumn;
|
||||
|
||||
private IndexMatcher(TiIndexColumn indexColumn, boolean matchEqualTestOnly) {
|
||||
this.matchEqualTestOnly = matchEqualTestOnly;
|
||||
this.indexColumn = requireNonNull(indexColumn, "index column is null");
|
||||
}
|
||||
|
||||
public static IndexMatcher equalOnlyMatcher(TiIndexColumn indexColumn) {
|
||||
return new IndexMatcher(indexColumn, true);
|
||||
}
|
||||
|
||||
public static IndexMatcher matcher(TiIndexColumn indexColumn) {
|
||||
return new IndexMatcher(indexColumn, false);
|
||||
}
|
||||
|
||||
public boolean match(Expression expression) {
|
||||
return expression.accept(this, null);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Boolean process(Expression node, Void context) {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Boolean visit(ColumnRef node, Void context) {
|
||||
String indexColumnName = indexColumn.getName();
|
||||
return node.getColumnInfo().matchName(indexColumnName);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Boolean visit(ComparisonBinaryExpression node, Void context) {
|
||||
switch (node.getComparisonType()) {
|
||||
case LESS_THAN:
|
||||
case LESS_EQUAL:
|
||||
case GREATER_THAN:
|
||||
case GREATER_EQUAL:
|
||||
case NOT_EQUAL:
|
||||
if (matchEqualTestOnly) {
|
||||
return false;
|
||||
}
|
||||
case EQUAL:
|
||||
NormalizedPredicate predicate = node.normalize();
|
||||
if (predicate == null) {
|
||||
return false;
|
||||
}
|
||||
return predicate.getColumnRef().accept(this, context);
|
||||
default:
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Boolean visit(StringRegExpression node, Void context) {
|
||||
switch (node.getRegType()) {
|
||||
// If the predicate is StartsWith(col, 'a'), this predicate
|
||||
// indicates a range of ['a', +∞) which can be used by index scan
|
||||
case STARTS_WITH:
|
||||
if (matchEqualTestOnly) {
|
||||
return false;
|
||||
}
|
||||
return node.getLeft().accept(this, context);
|
||||
default:
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Boolean visit(LogicalBinaryExpression node, Void context) {
|
||||
switch (node.getCompType()) {
|
||||
case AND:
|
||||
if (matchEqualTestOnly) {
|
||||
return false;
|
||||
}
|
||||
case OR:
|
||||
case XOR:
|
||||
return node.getLeft().accept(this, context) && node.getRight().accept(this, context);
|
||||
default:
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -1,200 +0,0 @@
|
|||
/*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.tikv.expression.visitor;
|
||||
|
||||
import com.google.common.collect.Range;
|
||||
import com.google.common.collect.RangeSet;
|
||||
import com.google.common.collect.TreeRangeSet;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
import java.util.Set;
|
||||
import org.tikv.exception.TiExpressionException;
|
||||
import org.tikv.expression.*;
|
||||
import org.tikv.expression.ComparisonBinaryExpression.NormalizedPredicate;
|
||||
import org.tikv.key.TypedKey;
|
||||
import org.tikv.meta.TiIndexColumn;
|
||||
import org.tikv.meta.TiIndexInfo;
|
||||
import org.tikv.meta.TiTableInfo;
|
||||
import org.tikv.types.DataType;
|
||||
|
||||
public class IndexRangeBuilder extends DefaultVisitor<RangeSet<TypedKey>, Void> {
|
||||
|
||||
private final Map<ColumnRef, Integer> lengths; // length of corresponding ColumnRef
|
||||
|
||||
public IndexRangeBuilder(TiTableInfo table, TiIndexInfo index) {
|
||||
Map<ColumnRef, Integer> result = new HashMap<>();
|
||||
if (table != null && index != null) {
|
||||
for (TiIndexColumn indexColumn : index.getIndexColumns()) {
|
||||
ColumnRef columnRef = ColumnRef.create(indexColumn.getName(), table);
|
||||
result.put(columnRef, (int) indexColumn.getLength());
|
||||
}
|
||||
}
|
||||
this.lengths = result;
|
||||
}
|
||||
|
||||
public Set<Range<TypedKey>> buildRange(Expression predicate) {
|
||||
Objects.requireNonNull(predicate, "predicate is null");
|
||||
return predicate.accept(this, null).asRanges();
|
||||
}
|
||||
|
||||
private static void throwOnError(Expression node) {
|
||||
final String errorFormat = "Unsupported conversion to Range: %s";
|
||||
throw new TiExpressionException(String.format(errorFormat, node));
|
||||
}
|
||||
|
||||
protected RangeSet<TypedKey> process(Expression node, Void context) {
|
||||
throwOnError(node);
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected RangeSet<TypedKey> visit(LogicalBinaryExpression node, Void context) {
|
||||
RangeSet<TypedKey> leftRanges = node.getLeft().accept(this, context);
|
||||
RangeSet<TypedKey> rightRanges = node.getRight().accept(this, context);
|
||||
switch (node.getCompType()) {
|
||||
case AND:
|
||||
for (Range<TypedKey> range : leftRanges.asRanges()) {
|
||||
rightRanges = rightRanges.subRangeSet(range);
|
||||
}
|
||||
break;
|
||||
case OR:
|
||||
rightRanges.addAll(leftRanges);
|
||||
break;
|
||||
case XOR:
|
||||
// AND
|
||||
RangeSet<TypedKey> intersection = rightRanges;
|
||||
for (Range<TypedKey> range : leftRanges.asRanges()) {
|
||||
intersection = intersection.subRangeSet(range);
|
||||
}
|
||||
// full set
|
||||
rightRanges.addAll(leftRanges);
|
||||
rightRanges.removeAll(intersection);
|
||||
break;
|
||||
default:
|
||||
throwOnError(node);
|
||||
}
|
||||
return rightRanges;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected RangeSet<TypedKey> visit(ComparisonBinaryExpression node, Void context) {
|
||||
NormalizedPredicate predicate = node.normalize();
|
||||
if (predicate == null) {
|
||||
throwOnError(node);
|
||||
}
|
||||
// In order to match a prefix index, we have to cut the literal by prefix length.
|
||||
// e.g., for table t:
|
||||
// CREATE TABLE `t` {
|
||||
// `b` VARCHAR(10) DEFAULT NULL,
|
||||
// KEY `prefix_index` (`b`(2))
|
||||
// }
|
||||
//
|
||||
// b(2) > "bbc" -> ["bb", +∞)
|
||||
// b(2) >= "bbc" -> ["bb", +∞)
|
||||
// b(2) < "bbc" -> (-∞, "bb"]
|
||||
// b(2) <= "bbc" -> (-∞, "bb"]
|
||||
// b(2) = "bbc" -> ["bb", "bb"]
|
||||
// b(2) > "b" -> ["b", +∞)
|
||||
// b(2) >= "b" -> ["b", +∞)
|
||||
// b(2) < "b" -> (-∞, "b"]
|
||||
// b(2) <= "b" -> (-∞, "b"]
|
||||
//
|
||||
// For varchar, `b`(2) will take first two characters(bytes) as prefix index.
|
||||
// TODO: Note that TiDB only supports UTF-8, we need to check if prefix index behave differently
|
||||
// under other encoding methods
|
||||
int prefixLen = lengths.getOrDefault(predicate.getColumnRef(), DataType.UNSPECIFIED_LEN);
|
||||
TypedKey literal = predicate.getTypedLiteral(prefixLen);
|
||||
RangeSet<TypedKey> ranges = TreeRangeSet.create();
|
||||
|
||||
if (prefixLen != DataType.UNSPECIFIED_LEN) {
|
||||
// With prefix length specified, the filter is loosen and so should the ranges
|
||||
switch (predicate.getType()) {
|
||||
case GREATER_THAN:
|
||||
case GREATER_EQUAL:
|
||||
ranges.add(Range.atLeast(literal));
|
||||
break;
|
||||
case LESS_THAN:
|
||||
case LESS_EQUAL:
|
||||
ranges.add(Range.atMost(literal));
|
||||
break;
|
||||
case EQUAL:
|
||||
ranges.add(Range.singleton(literal));
|
||||
break;
|
||||
case NOT_EQUAL:
|
||||
// Should return full range because prefix index predicate for NOT_EQUAL
|
||||
// will be split into an NOT_EQUAL filter and a full range scan
|
||||
ranges.add(Range.all());
|
||||
break;
|
||||
default:
|
||||
throwOnError(node);
|
||||
}
|
||||
} else {
|
||||
switch (predicate.getType()) {
|
||||
case GREATER_THAN:
|
||||
ranges.add(Range.greaterThan(literal));
|
||||
break;
|
||||
case GREATER_EQUAL:
|
||||
ranges.add(Range.atLeast(literal));
|
||||
break;
|
||||
case LESS_THAN:
|
||||
ranges.add(Range.lessThan(literal));
|
||||
break;
|
||||
case LESS_EQUAL:
|
||||
ranges.add(Range.atMost(literal));
|
||||
break;
|
||||
case EQUAL:
|
||||
ranges.add(Range.singleton(literal));
|
||||
break;
|
||||
case NOT_EQUAL:
|
||||
ranges.add(Range.lessThan(literal));
|
||||
ranges.add(Range.greaterThan(literal));
|
||||
break;
|
||||
default:
|
||||
throwOnError(node);
|
||||
}
|
||||
}
|
||||
return ranges;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected RangeSet<TypedKey> visit(StringRegExpression node, Void context) {
|
||||
ColumnRef columnRef = node.getColumnRef();
|
||||
// In order to match a prefix index, we have to cut the literal by prefix length.
|
||||
// e.g., for table t:
|
||||
// CREATE TABLE `t` {
|
||||
// `c1` VARCHAR(10) DEFAULT NULL,
|
||||
// KEY `prefix_index` (`c`(2))
|
||||
// }
|
||||
// when the predicate is `c1` LIKE 'abc%', the index range should be ['ab', 'ab'].
|
||||
// when the predicate is `c1` LIKE 'a%', the index range should be ['a', 'b').
|
||||
// for varchar, `c1`(2) will take first two characters(bytes) as prefix index.
|
||||
// TODO: Note that TiDB only supports UTF-8, we need to check if prefix index behave differently
|
||||
// under other encoding methods
|
||||
int prefixLen = lengths.getOrDefault(columnRef, DataType.UNSPECIFIED_LEN);
|
||||
TypedKey literal = node.getTypedLiteral(prefixLen);
|
||||
RangeSet<TypedKey> ranges = TreeRangeSet.create();
|
||||
|
||||
switch (node.getRegType()) {
|
||||
case STARTS_WITH:
|
||||
ranges.add(Range.atLeast(literal).intersection(Range.lessThan(literal.next(prefixLen))));
|
||||
break;
|
||||
default:
|
||||
throwOnError(node);
|
||||
}
|
||||
return ranges;
|
||||
}
|
||||
}
|
||||
|
|
@ -1,55 +0,0 @@
|
|||
/*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.tikv.expression.visitor;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
import org.tikv.expression.ColumnRef;
|
||||
import org.tikv.expression.Expression;
|
||||
import org.tikv.meta.TiTableInfo;
|
||||
|
||||
public class MetaResolver extends DefaultVisitor<Void, Expression> {
|
||||
public static void resolve(Expression expression, TiTableInfo table) {
|
||||
MetaResolver resolver = new MetaResolver(table);
|
||||
resolver.resolve(expression);
|
||||
}
|
||||
|
||||
public static void resolve(List<? extends Expression> expressions, TiTableInfo table) {
|
||||
MetaResolver resolver = new MetaResolver(table);
|
||||
resolver.resolve(expressions);
|
||||
}
|
||||
|
||||
private final TiTableInfo table;
|
||||
|
||||
public MetaResolver(TiTableInfo table) {
|
||||
this.table = table;
|
||||
}
|
||||
|
||||
public void resolve(List<? extends Expression> expressions) {
|
||||
expressions.forEach(expression -> expression.accept(this, null));
|
||||
}
|
||||
|
||||
public void resolve(Expression expression) {
|
||||
Objects.requireNonNull(expression, "expression is null");
|
||||
expression.accept(this, null);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Void visit(ColumnRef node, Expression parent) {
|
||||
node.resolve(table);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
|
@ -1,315 +0,0 @@
|
|||
/*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.tikv.expression.visitor;
|
||||
|
||||
import static java.util.Objects.requireNonNull;
|
||||
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import com.pingcap.tidb.tipb.Expr;
|
||||
import com.pingcap.tidb.tipb.ExprType;
|
||||
import com.pingcap.tidb.tipb.FieldType;
|
||||
import com.pingcap.tidb.tipb.ScalarFuncSig;
|
||||
import java.util.IdentityHashMap;
|
||||
import java.util.Map;
|
||||
import org.tikv.codec.Codec.IntegerCodec;
|
||||
import org.tikv.codec.CodecDataOutput;
|
||||
import org.tikv.exception.TiExpressionException;
|
||||
import org.tikv.expression.*;
|
||||
import org.tikv.expression.AggregateFunction.FunctionType;
|
||||
import org.tikv.types.*;
|
||||
import org.tikv.types.DataType.EncodeType;
|
||||
|
||||
public class ProtoConverter extends Visitor<Expr, Object> {
|
||||
// All concrete data type should be hooked to a type name
|
||||
private static final Map<Class<? extends DataType>, String> SCALAR_SIG_MAP =
|
||||
ImmutableMap.<Class<? extends DataType>, String>builder()
|
||||
.put(IntegerType.class, "Int")
|
||||
.put(BitType.class, "Int")
|
||||
.put(DecimalType.class, "Decimal")
|
||||
.put(RealType.class, "Real")
|
||||
.put(DateTimeType.class, "Time")
|
||||
.put(DateType.class, "Time")
|
||||
.put(TimestampType.class, "Time")
|
||||
.put(BytesType.class, "String")
|
||||
.put(StringType.class, "String")
|
||||
.build();
|
||||
|
||||
private final IdentityHashMap<Expression, DataType> typeMap;
|
||||
private final boolean validateColPosition;
|
||||
|
||||
public ProtoConverter(IdentityHashMap<Expression, DataType> typeMap) {
|
||||
this(typeMap, true);
|
||||
}
|
||||
|
||||
/**
|
||||
* Instantiate a {{@code ProtoConverter}} using a typeMap.
|
||||
*
|
||||
* @param typeMap the type map
|
||||
* @param validateColPosition whether to consider column position in this converter. By default, a
|
||||
* {{@code TiDAGRequest}} should check whether a {{@code ColumnRef}}'s position is correct in
|
||||
* it's executors. Can ignore this validation if `validateColPosition` is set to false.
|
||||
*/
|
||||
public ProtoConverter(
|
||||
IdentityHashMap<Expression, DataType> typeMap, boolean validateColPosition) {
|
||||
this.typeMap = typeMap;
|
||||
this.validateColPosition = validateColPosition;
|
||||
}
|
||||
|
||||
private DataType getType(Expression expression) {
|
||||
DataType type = typeMap.get(expression);
|
||||
if (type == null) {
|
||||
throw new TiExpressionException(String.format("Expression %s type unknown", expression));
|
||||
}
|
||||
return type;
|
||||
}
|
||||
|
||||
private String getTypeSignature(Expression expression) {
|
||||
DataType type = getType(expression);
|
||||
String typeSignature = SCALAR_SIG_MAP.get(type.getClass());
|
||||
if (typeSignature == null) {
|
||||
throw new TiExpressionException(String.format("Type %s signature unknown", type));
|
||||
}
|
||||
return typeSignature;
|
||||
}
|
||||
|
||||
public static Expr toProto(Expression expression) {
|
||||
return toProto(expression, null);
|
||||
}
|
||||
|
||||
public static Expr toProto(Expression expression, Object context) {
|
||||
ExpressionTypeCoercer coercer = new ExpressionTypeCoercer();
|
||||
coercer.infer(expression);
|
||||
ProtoConverter converter = new ProtoConverter(coercer.getTypeMap());
|
||||
return expression.accept(converter, context);
|
||||
}
|
||||
|
||||
// Generate protobuf builder with partial data encoded.
|
||||
// Scala Signature is left alone
|
||||
private Expr.Builder scalaToPartialProto(Expression node, Object context) {
|
||||
Expr.Builder builder = Expr.newBuilder();
|
||||
// Scalar function type
|
||||
builder.setTp(ExprType.ScalarFunc);
|
||||
|
||||
// Return type
|
||||
builder.setFieldType(FieldType.newBuilder().setTp(getType(node).getTypeCode()).build());
|
||||
|
||||
for (Expression child : node.getChildren()) {
|
||||
Expr exprProto = child.accept(this, context);
|
||||
builder.addChildren(exprProto);
|
||||
}
|
||||
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Expr visit(LogicalBinaryExpression node, Object context) {
|
||||
ScalarFuncSig protoSig;
|
||||
switch (node.getCompType()) {
|
||||
case AND:
|
||||
protoSig = ScalarFuncSig.LogicalAnd;
|
||||
break;
|
||||
case OR:
|
||||
protoSig = ScalarFuncSig.LogicalOr;
|
||||
break;
|
||||
case XOR:
|
||||
protoSig = ScalarFuncSig.LogicalXor;
|
||||
break;
|
||||
default:
|
||||
throw new TiExpressionException(
|
||||
String.format("Unknown comparison type %s", node.getCompType()));
|
||||
}
|
||||
Expr.Builder builder = scalaToPartialProto(node, context);
|
||||
builder.setSig(protoSig);
|
||||
return builder.build();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Expr visit(ArithmeticBinaryExpression node, Object context) {
|
||||
// assume after type coerce, children should be compatible
|
||||
Expression child = node.getLeft();
|
||||
String typeSignature = getTypeSignature(child);
|
||||
ScalarFuncSig protoSig;
|
||||
switch (node.getCompType()) {
|
||||
// TODO: Add test for bitwise push down
|
||||
case BIT_AND:
|
||||
protoSig = ScalarFuncSig.BitAndSig;
|
||||
break;
|
||||
case BIT_OR:
|
||||
protoSig = ScalarFuncSig.BitOrSig;
|
||||
break;
|
||||
case BIT_XOR:
|
||||
protoSig = ScalarFuncSig.BitXorSig;
|
||||
break;
|
||||
case DIVIDE:
|
||||
protoSig = ScalarFuncSig.valueOf("Divide" + typeSignature);
|
||||
break;
|
||||
case MINUS:
|
||||
protoSig = ScalarFuncSig.valueOf("Minus" + typeSignature);
|
||||
break;
|
||||
case MULTIPLY:
|
||||
protoSig = ScalarFuncSig.valueOf("Multiply" + typeSignature);
|
||||
break;
|
||||
case PLUS:
|
||||
protoSig = ScalarFuncSig.valueOf("Plus" + typeSignature);
|
||||
break;
|
||||
default:
|
||||
throw new TiExpressionException(
|
||||
String.format("Unknown comparison type %s", node.getCompType()));
|
||||
}
|
||||
Expr.Builder builder = scalaToPartialProto(node, context);
|
||||
builder.setSig(protoSig);
|
||||
return builder.build();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Expr visit(ComparisonBinaryExpression node, Object context) {
|
||||
// assume after type coerce, children should be compatible
|
||||
Expression child = node.getLeft();
|
||||
String typeSignature = getTypeSignature(child);
|
||||
ScalarFuncSig protoSig;
|
||||
switch (node.getComparisonType()) {
|
||||
case EQUAL:
|
||||
protoSig = ScalarFuncSig.valueOf("EQ" + typeSignature);
|
||||
break;
|
||||
case GREATER_EQUAL:
|
||||
protoSig = ScalarFuncSig.valueOf("GE" + typeSignature);
|
||||
break;
|
||||
case GREATER_THAN:
|
||||
protoSig = ScalarFuncSig.valueOf("GT" + typeSignature);
|
||||
break;
|
||||
case LESS_EQUAL:
|
||||
protoSig = ScalarFuncSig.valueOf("LE" + typeSignature);
|
||||
break;
|
||||
case LESS_THAN:
|
||||
protoSig = ScalarFuncSig.valueOf("LT" + typeSignature);
|
||||
break;
|
||||
case NOT_EQUAL:
|
||||
protoSig = ScalarFuncSig.valueOf("NE" + typeSignature);
|
||||
break;
|
||||
default:
|
||||
throw new TiExpressionException(
|
||||
String.format("Unknown comparison type %s", node.getComparisonType()));
|
||||
}
|
||||
Expr.Builder builder = scalaToPartialProto(node, context);
|
||||
builder.setSig(protoSig);
|
||||
return builder.build();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Expr visit(StringRegExpression node, Object context) {
|
||||
// assume after type coerce, children should be compatible
|
||||
ScalarFuncSig protoSig;
|
||||
switch (node.getRegType()) {
|
||||
case STARTS_WITH:
|
||||
case CONTAINS:
|
||||
case ENDS_WITH:
|
||||
case LIKE:
|
||||
protoSig = ScalarFuncSig.LikeSig;
|
||||
break;
|
||||
default:
|
||||
throw new TiExpressionException(String.format("Unknown reg type %s", node.getRegType()));
|
||||
}
|
||||
Expr.Builder builder = scalaToPartialProto(node, context);
|
||||
builder.setSig(protoSig);
|
||||
return builder.build();
|
||||
}
|
||||
|
||||
@Override
|
||||
@SuppressWarnings("unchecked")
|
||||
protected Expr visit(ColumnRef node, Object context) {
|
||||
long position = 0;
|
||||
if (validateColPosition) {
|
||||
requireNonNull(context, "Context of a ColumnRef should not be null");
|
||||
Map<ColumnRef, Integer> colIdOffsetMap = (Map<ColumnRef, Integer>) context;
|
||||
position =
|
||||
requireNonNull(
|
||||
colIdOffsetMap.get(node), "Required column position info is not in a valid context.");
|
||||
}
|
||||
Expr.Builder builder = Expr.newBuilder();
|
||||
builder.setTp(ExprType.ColumnRef);
|
||||
CodecDataOutput cdo = new CodecDataOutput();
|
||||
// After switching to DAG request mode, expression value
|
||||
// should be the index of table columns we provided in
|
||||
// the first executor of a DAG request.
|
||||
IntegerCodec.writeLong(cdo, position);
|
||||
builder.setVal(cdo.toByteString());
|
||||
return builder.build();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Expr visit(Constant node, Object context) {
|
||||
Expr.Builder builder = Expr.newBuilder();
|
||||
if (node.getValue() == null) {
|
||||
builder.setTp(ExprType.Null);
|
||||
return builder.build();
|
||||
} else {
|
||||
DataType type = node.getType();
|
||||
builder.setTp(type.getProtoExprType());
|
||||
CodecDataOutput cdo = new CodecDataOutput();
|
||||
type.encode(cdo, EncodeType.PROTO, node.getValue());
|
||||
builder.setVal(cdo.toByteString());
|
||||
}
|
||||
return builder.build();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Expr visit(AggregateFunction node, Object context) {
|
||||
Expr.Builder builder = Expr.newBuilder();
|
||||
|
||||
FunctionType type = node.getType();
|
||||
switch (type) {
|
||||
case Max:
|
||||
builder.setTp(ExprType.Max);
|
||||
break;
|
||||
case Sum:
|
||||
builder.setTp(ExprType.Sum);
|
||||
break;
|
||||
case Min:
|
||||
builder.setTp(ExprType.Min);
|
||||
break;
|
||||
case First:
|
||||
builder.setTp(ExprType.First);
|
||||
break;
|
||||
case Count:
|
||||
builder.setTp(ExprType.Count);
|
||||
break;
|
||||
}
|
||||
|
||||
for (Expression arg : node.getChildren()) {
|
||||
Expr exprProto = arg.accept(this, context);
|
||||
builder.addChildren(exprProto);
|
||||
}
|
||||
|
||||
return builder.build();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Expr visit(IsNull node, Object context) {
|
||||
String typeSignature = getTypeSignature(node.getExpression());
|
||||
ScalarFuncSig protoSig = ScalarFuncSig.valueOf(typeSignature + "IsNull");
|
||||
Expr.Builder builder = scalaToPartialProto(node, context);
|
||||
builder.setSig(protoSig);
|
||||
return builder.build();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Expr visit(Not node, Object context) {
|
||||
ScalarFuncSig protoSig = ScalarFuncSig.UnaryNot;
|
||||
Expr.Builder builder = scalaToPartialProto(node, context);
|
||||
builder.setSig(protoSig);
|
||||
return builder.build();
|
||||
}
|
||||
}
|
||||
|
|
@ -1,65 +0,0 @@
|
|||
/*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.tikv.expression.visitor;
|
||||
|
||||
import org.tikv.expression.ComparisonBinaryExpression;
|
||||
import org.tikv.expression.Expression;
|
||||
import org.tikv.expression.LogicalBinaryExpression;
|
||||
|
||||
public class PseudoCostCalculator extends DefaultVisitor<Double, Void> {
|
||||
public static double calculateCost(Expression expr) {
|
||||
PseudoCostCalculator calc = new PseudoCostCalculator();
|
||||
return expr.accept(calc, null);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Double process(Expression node, Void context) {
|
||||
return 1.0;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Double visit(LogicalBinaryExpression node, Void context) {
|
||||
double leftCost = node.getLeft().accept(this, context);
|
||||
double rightCost = node.getLeft().accept(this, context);
|
||||
switch (node.getCompType()) {
|
||||
case AND:
|
||||
return leftCost * rightCost;
|
||||
case OR:
|
||||
case XOR:
|
||||
return leftCost + rightCost;
|
||||
default:
|
||||
return 1.0;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Double visit(ComparisonBinaryExpression node, Void context) {
|
||||
switch (node.getComparisonType()) {
|
||||
case EQUAL:
|
||||
return 0.01;
|
||||
case GREATER_EQUAL:
|
||||
case GREATER_THAN:
|
||||
case LESS_EQUAL:
|
||||
case LESS_THAN:
|
||||
// magic number for testing
|
||||
return 0.3;
|
||||
case NOT_EQUAL:
|
||||
return 0.99;
|
||||
default:
|
||||
return 1.0;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -1,53 +0,0 @@
|
|||
/*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.tikv.expression.visitor;
|
||||
|
||||
import org.tikv.expression.Expression;
|
||||
import org.tikv.expression.ExpressionBlacklist;
|
||||
|
||||
public class SupportedExpressionValidator extends DefaultVisitor<Boolean, ExpressionBlacklist> {
|
||||
private static final SupportedExpressionValidator validator = new SupportedExpressionValidator();
|
||||
|
||||
public static boolean isSupportedExpression(Expression node, ExpressionBlacklist blacklist) {
|
||||
if (!node.accept(validator, blacklist)) {
|
||||
return false;
|
||||
}
|
||||
try {
|
||||
ExpressionTypeCoercer coercer = new ExpressionTypeCoercer();
|
||||
coercer.infer(node);
|
||||
ProtoConverter protoConverter = new ProtoConverter(coercer.getTypeMap(), false);
|
||||
if (node.accept(protoConverter, null) == null) {
|
||||
return false;
|
||||
}
|
||||
} catch (Exception e) {
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Boolean process(Expression node, ExpressionBlacklist blacklist) {
|
||||
if (blacklist != null && blacklist.isUnsupportedPushdownExpr(getClass())) {
|
||||
return false;
|
||||
}
|
||||
for (Expression expr : node.getChildren()) {
|
||||
if (!expr.accept(this, blacklist)) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
|
@ -1,78 +0,0 @@
|
|||
/*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.tikv.key;
|
||||
|
||||
import com.google.common.base.Joiner;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import org.tikv.codec.CodecDataOutput;
|
||||
|
||||
public class CompoundKey extends Key {
|
||||
|
||||
private final List<Key> keys;
|
||||
|
||||
protected CompoundKey(List<Key> keys, byte[] value) {
|
||||
super(value);
|
||||
this.keys = keys;
|
||||
}
|
||||
|
||||
public static CompoundKey concat(Key lKey, Key rKey) {
|
||||
Builder builder = newBuilder();
|
||||
builder.append(lKey).append(rKey);
|
||||
return builder.build();
|
||||
}
|
||||
|
||||
public List<Key> getKeys() {
|
||||
return keys;
|
||||
}
|
||||
|
||||
public static Builder newBuilder() {
|
||||
return new Builder();
|
||||
}
|
||||
|
||||
public static class Builder {
|
||||
private final List<Key> keys = new ArrayList<>();
|
||||
|
||||
public Builder append(Key key) {
|
||||
if (key instanceof CompoundKey) {
|
||||
CompoundKey compKey = (CompoundKey) key;
|
||||
for (Key child : compKey.getKeys()) {
|
||||
append(child);
|
||||
}
|
||||
} else {
|
||||
keys.add(key);
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
public CompoundKey build() {
|
||||
int totalLen = 0;
|
||||
for (Key key : keys) {
|
||||
totalLen += key.getBytes().length;
|
||||
}
|
||||
CodecDataOutput cdo = new CodecDataOutput(totalLen);
|
||||
for (Key key : keys) {
|
||||
cdo.write(key.getBytes());
|
||||
}
|
||||
return new CompoundKey(keys, cdo.toBytes());
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return String.format("[%s]", Joiner.on(",").useForNull("Null").join(keys));
|
||||
}
|
||||
}
|
||||
|
|
@ -1,72 +0,0 @@
|
|||
/*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.tikv.key;
|
||||
|
||||
import com.google.common.base.Joiner;
|
||||
import org.tikv.codec.Codec.IntegerCodec;
|
||||
import org.tikv.codec.CodecDataOutput;
|
||||
import org.tikv.exception.TypeException;
|
||||
|
||||
public class IndexKey extends Key {
|
||||
private static final byte[] IDX_PREFIX_SEP = new byte[] {'_', 'i'};
|
||||
|
||||
private final long tableId;
|
||||
private final long indexId;
|
||||
private final Key[] dataKeys;
|
||||
|
||||
private IndexKey(long tableId, long indexId, Key[] dataKeys) {
|
||||
super(encode(tableId, indexId, dataKeys));
|
||||
this.tableId = tableId;
|
||||
this.indexId = indexId;
|
||||
this.dataKeys = dataKeys;
|
||||
}
|
||||
|
||||
public static IndexKey toIndexKey(long tableId, long indexId, Key... dataKeys) {
|
||||
return new IndexKey(tableId, indexId, dataKeys);
|
||||
}
|
||||
|
||||
private static byte[] encode(long tableId, long indexId, Key[] dataKeys) {
|
||||
CodecDataOutput cdo = new CodecDataOutput();
|
||||
cdo.write(TBL_PREFIX);
|
||||
IntegerCodec.writeLong(cdo, tableId);
|
||||
cdo.write(IDX_PREFIX_SEP);
|
||||
IntegerCodec.writeLong(cdo, indexId);
|
||||
for (Key key : dataKeys) {
|
||||
if (key == null) {
|
||||
throw new TypeException("key cannot be null");
|
||||
}
|
||||
cdo.write(key.getBytes());
|
||||
}
|
||||
return cdo.toBytes();
|
||||
}
|
||||
|
||||
public long getTableId() {
|
||||
return tableId;
|
||||
}
|
||||
|
||||
public long getIndexId() {
|
||||
return indexId;
|
||||
}
|
||||
|
||||
public Key[] getDataKeys() {
|
||||
return dataKeys;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return String.format("[%s]", Joiner.on(",").useForNull("null").join(dataKeys));
|
||||
}
|
||||
}
|
||||
|
|
@ -20,20 +20,14 @@ import static org.tikv.codec.KeyUtils.formatBytes;
|
|||
|
||||
import com.google.protobuf.ByteString;
|
||||
import java.util.Arrays;
|
||||
import org.tikv.codec.CodecDataOutput;
|
||||
import org.tikv.types.DataType;
|
||||
import org.tikv.util.FastByteComparisons;
|
||||
|
||||
public class Key implements Comparable<Key> {
|
||||
protected static final byte[] TBL_PREFIX = new byte[] {'t'};
|
||||
|
||||
protected final byte[] value;
|
||||
protected final int infFlag;
|
||||
|
||||
public static final Key EMPTY = createEmpty();
|
||||
public static final Key NULL = createNull();
|
||||
public static final Key MIN = createTypelessMin();
|
||||
public static final Key MAX = createTypelessMax();
|
||||
|
||||
private Key(byte[] value, boolean negative) {
|
||||
this.value = requireNonNull(value, "value is null");
|
||||
|
|
@ -60,17 +54,6 @@ public class Key implements Comparable<Key> {
|
|||
return new Key(bytes);
|
||||
}
|
||||
|
||||
private static Key createNull() {
|
||||
CodecDataOutput cdo = new CodecDataOutput();
|
||||
DataType.encodeNull(cdo);
|
||||
return new Key(cdo.toBytes()) {
|
||||
@Override
|
||||
public String toString() {
|
||||
return "null";
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
private static Key createEmpty() {
|
||||
return new Key(new byte[0]) {
|
||||
@Override
|
||||
|
|
@ -85,28 +68,6 @@ public class Key implements Comparable<Key> {
|
|||
};
|
||||
}
|
||||
|
||||
private static Key createTypelessMin() {
|
||||
CodecDataOutput cdo = new CodecDataOutput();
|
||||
DataType.encodeIndex(cdo);
|
||||
return new Key(cdo.toBytes()) {
|
||||
@Override
|
||||
public String toString() {
|
||||
return "MIN";
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
private static Key createTypelessMax() {
|
||||
CodecDataOutput cdo = new CodecDataOutput();
|
||||
DataType.encodeMaxValue(cdo);
|
||||
return new Key(cdo.toBytes()) {
|
||||
@Override
|
||||
public String toString() {
|
||||
return "MAX";
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* The next key for bytes domain It first plus one at LSB and if LSB overflows, a zero byte is
|
||||
* appended at the end Original bytes will be reused if possible
|
||||
|
|
|
|||
|
|
@ -1,176 +0,0 @@
|
|||
/*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.tikv.key;
|
||||
|
||||
import static org.tikv.codec.Codec.IntegerCodec.writeLong;
|
||||
|
||||
import java.util.Objects;
|
||||
import org.tikv.codec.Codec.IntegerCodec;
|
||||
import org.tikv.codec.CodecDataInput;
|
||||
import org.tikv.codec.CodecDataOutput;
|
||||
import org.tikv.exception.TiClientInternalException;
|
||||
import org.tikv.exception.TiExpressionException;
|
||||
import org.tikv.key.RowKey.DecodeResult.Status;
|
||||
import org.tikv.util.FastByteComparisons;
|
||||
|
||||
public class RowKey extends Key {
|
||||
private static final byte[] REC_PREFIX_SEP = new byte[] {'_', 'r'};
|
||||
|
||||
private final long tableId;
|
||||
private final long handle;
|
||||
private final boolean maxHandleFlag;
|
||||
|
||||
private RowKey(long tableId, long handle) {
|
||||
super(encode(tableId, handle));
|
||||
this.tableId = tableId;
|
||||
this.handle = handle;
|
||||
this.maxHandleFlag = false;
|
||||
}
|
||||
|
||||
/**
|
||||
* The RowKey indicating maximum handle (its value exceeds Long.Max_Value)
|
||||
*
|
||||
* <p>Initializes an imaginary globally MAXIMUM rowKey with tableId.
|
||||
*/
|
||||
private RowKey(long tableId) {
|
||||
super(encodeBeyondMaxHandle(tableId));
|
||||
this.tableId = tableId;
|
||||
this.handle = Long.MAX_VALUE;
|
||||
this.maxHandleFlag = true;
|
||||
}
|
||||
|
||||
public static RowKey toRowKey(long tableId, long handle) {
|
||||
return new RowKey(tableId, handle);
|
||||
}
|
||||
|
||||
public static RowKey toRowKey(long tableId, TypedKey handle) {
|
||||
Object obj = handle.getValue();
|
||||
if (obj instanceof Long) {
|
||||
return new RowKey(tableId, (long) obj);
|
||||
}
|
||||
throw new TiExpressionException("Cannot encode row key with non-long type");
|
||||
}
|
||||
|
||||
public static RowKey createMin(long tableId) {
|
||||
return toRowKey(tableId, Long.MIN_VALUE);
|
||||
}
|
||||
|
||||
public static RowKey createBeyondMax(long tableId) {
|
||||
return new RowKey(tableId);
|
||||
}
|
||||
|
||||
private static byte[] encode(long tableId, long handle) {
|
||||
CodecDataOutput cdo = new CodecDataOutput();
|
||||
encodePrefix(cdo, tableId);
|
||||
writeLong(cdo, handle);
|
||||
return cdo.toBytes();
|
||||
}
|
||||
|
||||
private static byte[] encodeBeyondMaxHandle(long tableId) {
|
||||
return nextValue(encode(tableId, Long.MAX_VALUE));
|
||||
}
|
||||
|
||||
@Override
|
||||
public RowKey next() {
|
||||
long handle = getHandle();
|
||||
boolean maxHandleFlag = getMaxHandleFlag();
|
||||
if (maxHandleFlag) {
|
||||
throw new TiClientInternalException("Handle overflow for Long MAX");
|
||||
}
|
||||
if (handle == Long.MAX_VALUE) {
|
||||
return createBeyondMax(tableId);
|
||||
}
|
||||
return new RowKey(tableId, handle + 1);
|
||||
}
|
||||
|
||||
public long getTableId() {
|
||||
return tableId;
|
||||
}
|
||||
|
||||
public long getHandle() {
|
||||
return handle;
|
||||
}
|
||||
|
||||
private boolean getMaxHandleFlag() {
|
||||
return maxHandleFlag;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return Long.toString(handle);
|
||||
}
|
||||
|
||||
private static void encodePrefix(CodecDataOutput cdo, long tableId) {
|
||||
cdo.write(TBL_PREFIX);
|
||||
writeLong(cdo, tableId);
|
||||
cdo.write(REC_PREFIX_SEP);
|
||||
}
|
||||
|
||||
public static class DecodeResult {
|
||||
public long handle;
|
||||
|
||||
public enum Status {
|
||||
MIN,
|
||||
MAX,
|
||||
EQUAL,
|
||||
LESS,
|
||||
GREATER,
|
||||
UNKNOWN_INF
|
||||
}
|
||||
|
||||
public Status status;
|
||||
}
|
||||
|
||||
public static void tryDecodeRowKey(long tableId, byte[] rowKey, DecodeResult outResult) {
|
||||
Objects.requireNonNull(rowKey, "rowKey cannot be null");
|
||||
if (rowKey.length == 0) {
|
||||
outResult.status = Status.UNKNOWN_INF;
|
||||
return;
|
||||
}
|
||||
CodecDataOutput cdo = new CodecDataOutput();
|
||||
encodePrefix(cdo, tableId);
|
||||
byte[] tablePrefix = cdo.toBytes();
|
||||
|
||||
int res =
|
||||
FastByteComparisons.compareTo(
|
||||
tablePrefix,
|
||||
0,
|
||||
tablePrefix.length,
|
||||
rowKey,
|
||||
0,
|
||||
Math.min(rowKey.length, tablePrefix.length));
|
||||
|
||||
if (res > 0) {
|
||||
outResult.status = Status.MIN;
|
||||
return;
|
||||
}
|
||||
if (res < 0) {
|
||||
outResult.status = Status.MAX;
|
||||
return;
|
||||
}
|
||||
|
||||
CodecDataInput cdi = new CodecDataInput(rowKey);
|
||||
cdi.skipBytes(tablePrefix.length);
|
||||
if (cdi.available() == 8) {
|
||||
outResult.status = Status.EQUAL;
|
||||
} else if (cdi.available() < 8) {
|
||||
outResult.status = Status.LESS;
|
||||
} else if (cdi.available() > 8) {
|
||||
outResult.status = Status.GREATER;
|
||||
}
|
||||
outResult.handle = IntegerCodec.readPartialLong(cdi);
|
||||
}
|
||||
}
|
||||
|
|
@ -1,84 +0,0 @@
|
|||
/*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.tikv.key;
|
||||
|
||||
import static java.util.Objects.requireNonNull;
|
||||
|
||||
import org.tikv.codec.CodecDataInput;
|
||||
import org.tikv.codec.CodecDataOutput;
|
||||
import org.tikv.exception.TypeException;
|
||||
import org.tikv.types.DataType;
|
||||
|
||||
public class TypedKey extends Key {
|
||||
private final DataType type;
|
||||
|
||||
public TypedKey(Object val, DataType type, int prefixLength) {
|
||||
super(encodeKey(val, type, prefixLength));
|
||||
this.type = type;
|
||||
}
|
||||
|
||||
public DataType getType() {
|
||||
return type;
|
||||
}
|
||||
|
||||
public Object getValue() {
|
||||
CodecDataInput cdi = new CodecDataInput(value);
|
||||
return type.decode(cdi);
|
||||
}
|
||||
|
||||
public static TypedKey toTypedKey(Object val, DataType type) {
|
||||
return toTypedKey(val, type, DataType.UNSPECIFIED_LEN);
|
||||
}
|
||||
|
||||
/**
|
||||
* Map a typed value into TypedKey, only encoding first prefixLength bytes When prefixLength is
|
||||
* DataType.UNSPECIFIED_LEN, encode full length of value
|
||||
*
|
||||
* @param val value
|
||||
* @param type type of value
|
||||
* @param prefixLength described above
|
||||
* @return an encoded TypedKey
|
||||
*/
|
||||
public static TypedKey toTypedKey(Object val, DataType type, int prefixLength) {
|
||||
requireNonNull(type, "type is null");
|
||||
return new TypedKey(val, type, prefixLength);
|
||||
}
|
||||
|
||||
private static byte[] encodeKey(Object val, DataType type, int prefixLength) {
|
||||
CodecDataOutput cdo = new CodecDataOutput();
|
||||
type.encodeKey(cdo, val, type, prefixLength);
|
||||
return cdo.toBytes();
|
||||
}
|
||||
|
||||
public TypedKey next(int prefixLength) {
|
||||
Object val = getValue();
|
||||
if (val instanceof String) {
|
||||
return toTypedKey(nextValue(((String) val).getBytes()), type, prefixLength);
|
||||
} else if (val instanceof byte[]) {
|
||||
return toTypedKey(nextValue(((byte[]) val)), type, prefixLength);
|
||||
} else {
|
||||
throw new TypeException(
|
||||
"Type for TypedKey in next() function must be either String or Byte array");
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
CodecDataInput cdi = new CodecDataInput(value);
|
||||
Object val = type.decode(cdi);
|
||||
return String.format("%s", val);
|
||||
}
|
||||
}
|
||||
|
|
@ -1,45 +0,0 @@
|
|||
/*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.tikv.meta;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonCreator;
|
||||
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
|
||||
/** This class is mapping TiDB's CIStr/ For internal use only. */
|
||||
@JsonIgnoreProperties(ignoreUnknown = true)
|
||||
public class CIStr {
|
||||
private final String o; // original
|
||||
private final String l;
|
||||
|
||||
@JsonCreator
|
||||
private CIStr(@JsonProperty("O") String o, @JsonProperty("L") String l) {
|
||||
this.o = o;
|
||||
this.l = l;
|
||||
}
|
||||
|
||||
public static CIStr newCIStr(String str) {
|
||||
return new CIStr(str, str.toLowerCase());
|
||||
}
|
||||
|
||||
public String getO() {
|
||||
return o;
|
||||
}
|
||||
|
||||
public String getL() {
|
||||
return l;
|
||||
}
|
||||
}
|
||||
|
|
@ -1,273 +0,0 @@
|
|||
/*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.tikv.meta;
|
||||
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import java.util.Map;
|
||||
|
||||
public class Collation {
|
||||
public static int translate(String collation) {
|
||||
Integer code = collationMap.get(collation);
|
||||
if (code == null) {
|
||||
return DEF_COLLATION_CODE;
|
||||
}
|
||||
return code;
|
||||
}
|
||||
|
||||
public static String translate(int code) {
|
||||
String collation = collationCodeMap.get(code);
|
||||
if (collation == null) {
|
||||
return "";
|
||||
}
|
||||
return collation;
|
||||
}
|
||||
|
||||
public static final int DEF_COLLATION_CODE = 83;
|
||||
|
||||
private static final Map<String, Integer> collationMap;
|
||||
private static final Map<Integer, String> collationCodeMap;
|
||||
|
||||
static {
|
||||
collationMap =
|
||||
ImmutableMap.<String, Integer>builder()
|
||||
.put("big5_chinese_ci", 1)
|
||||
.put("latin2_czech_cs", 2)
|
||||
.put("dec8_swedish_ci", 3)
|
||||
.put("cp850_general_ci", 4)
|
||||
.put("latin1_german1_ci", 5)
|
||||
.put("hp8_english_ci", 6)
|
||||
.put("koi8r_general_ci", 7)
|
||||
.put("latin1_swedish_ci", 8)
|
||||
.put("latin2_general_ci", 9)
|
||||
.put("swe7_swedish_ci", 10)
|
||||
.put("ascii_general_ci", 11)
|
||||
.put("ujis_japanese_ci", 12)
|
||||
.put("sjis_japanese_ci", 13)
|
||||
.put("cp1251_bulgarian_ci", 14)
|
||||
.put("latin1_danish_ci", 15)
|
||||
.put("hebrew_general_ci", 16)
|
||||
.put("tis620_thai_ci", 18)
|
||||
.put("euckr_korean_ci", 19)
|
||||
.put("latin7_estonian_cs", 20)
|
||||
.put("latin2_hungarian_ci", 21)
|
||||
.put("koi8u_general_ci", 22)
|
||||
.put("cp1251_ukrainian_ci", 23)
|
||||
.put("gb2312_chinese_ci", 24)
|
||||
.put("greek_general_ci", 25)
|
||||
.put("cp1250_general_ci", 26)
|
||||
.put("latin2_croatian_ci", 27)
|
||||
.put("gbk_chinese_ci", 28)
|
||||
.put("cp1257_lithuanian_ci", 29)
|
||||
.put("latin5_turkish_ci", 30)
|
||||
.put("latin1_german2_ci", 31)
|
||||
.put("armscii8_general_ci", 32)
|
||||
.put("utf8_general_ci", 33)
|
||||
.put("cp1250_czech_cs", 34)
|
||||
.put("ucs2_general_ci", 35)
|
||||
.put("cp866_general_ci", 36)
|
||||
.put("keybcs2_general_ci", 37)
|
||||
.put("macce_general_ci", 38)
|
||||
.put("macroman_general_ci", 39)
|
||||
.put("cp852_general_ci", 40)
|
||||
.put("latin7_general_ci", 41)
|
||||
.put("latin7_general_cs", 42)
|
||||
.put("macce_bin", 43)
|
||||
.put("cp1250_croatian_ci", 44)
|
||||
.put("utf8mb4_general_ci", 45)
|
||||
.put("utf8mb4_bin", 46)
|
||||
.put("latin1_bin", 47)
|
||||
.put("latin1_general_ci", 48)
|
||||
.put("latin1_general_cs", 49)
|
||||
.put("cp1251_bin", 50)
|
||||
.put("cp1251_general_ci", 51)
|
||||
.put("cp1251_general_cs", 52)
|
||||
.put("macroman_bin", 53)
|
||||
.put("utf16_general_ci", 54)
|
||||
.put("utf16_bin", 55)
|
||||
.put("utf16le_general_ci", 56)
|
||||
.put("cp1256_general_ci", 57)
|
||||
.put("cp1257_bin", 58)
|
||||
.put("cp1257_general_ci", 59)
|
||||
.put("utf32_general_ci", 60)
|
||||
.put("utf32_bin", 61)
|
||||
.put("utf16le_bin", 62)
|
||||
.put("binary", 63)
|
||||
.put("armscii8_bin", 64)
|
||||
.put("ascii_bin", 65)
|
||||
.put("cp1250_bin", 66)
|
||||
.put("cp1256_bin", 67)
|
||||
.put("cp866_bin", 68)
|
||||
.put("dec8_bin", 69)
|
||||
.put("greek_bin", 70)
|
||||
.put("hebrew_bin", 71)
|
||||
.put("hp8_bin", 72)
|
||||
.put("keybcs2_bin", 73)
|
||||
.put("koi8r_bin", 74)
|
||||
.put("koi8u_bin", 75)
|
||||
.put("latin2_bin", 77)
|
||||
.put("latin5_bin", 78)
|
||||
.put("latin7_bin", 79)
|
||||
.put("cp850_bin", 80)
|
||||
.put("cp852_bin", 81)
|
||||
.put("swe7_bin", 82)
|
||||
.put("utf8_bin", 83)
|
||||
.put("big5_bin", 84)
|
||||
.put("euckr_bin", 85)
|
||||
.put("gb2312_bin", 86)
|
||||
.put("gbk_bin", 87)
|
||||
.put("sjis_bin", 88)
|
||||
.put("tis620_bin", 89)
|
||||
.put("ucs2_bin", 90)
|
||||
.put("ujis_bin", 91)
|
||||
.put("geostd8_general_ci", 92)
|
||||
.put("geostd8_bin", 93)
|
||||
.put("latin1_spanish_ci", 94)
|
||||
.put("cp932_japanese_ci", 95)
|
||||
.put("cp932_bin", 96)
|
||||
.put("eucjpms_japanese_ci", 97)
|
||||
.put("eucjpms_bin", 98)
|
||||
.put("cp1250_polish_ci", 99)
|
||||
.put("utf16_unicode_ci", 101)
|
||||
.put("utf16_icelandic_ci", 102)
|
||||
.put("utf16_latvian_ci", 103)
|
||||
.put("utf16_romanian_ci", 104)
|
||||
.put("utf16_slovenian_ci", 105)
|
||||
.put("utf16_polish_ci", 106)
|
||||
.put("utf16_estonian_ci", 107)
|
||||
.put("utf16_spanish_ci", 108)
|
||||
.put("utf16_swedish_ci", 109)
|
||||
.put("utf16_turkish_ci", 110)
|
||||
.put("utf16_czech_ci", 111)
|
||||
.put("utf16_danish_ci", 112)
|
||||
.put("utf16_lithuanian_ci", 113)
|
||||
.put("utf16_slovak_ci", 114)
|
||||
.put("utf16_spanish2_ci", 115)
|
||||
.put("utf16_roman_ci", 116)
|
||||
.put("utf16_persian_ci", 117)
|
||||
.put("utf16_esperanto_ci", 118)
|
||||
.put("utf16_hungarian_ci", 119)
|
||||
.put("utf16_sinhala_ci", 120)
|
||||
.put("utf16_german2_ci", 121)
|
||||
.put("utf16_croatian_ci", 122)
|
||||
.put("utf16_unicode_520_ci", 123)
|
||||
.put("utf16_vietnamese_ci", 124)
|
||||
.put("ucs2_unicode_ci", 128)
|
||||
.put("ucs2_icelandic_ci", 129)
|
||||
.put("ucs2_latvian_ci", 130)
|
||||
.put("ucs2_romanian_ci", 131)
|
||||
.put("ucs2_slovenian_ci", 132)
|
||||
.put("ucs2_polish_ci", 133)
|
||||
.put("ucs2_estonian_ci", 134)
|
||||
.put("ucs2_spanish_ci", 135)
|
||||
.put("ucs2_swedish_ci", 136)
|
||||
.put("ucs2_turkish_ci", 137)
|
||||
.put("ucs2_czech_ci", 138)
|
||||
.put("ucs2_danish_ci", 139)
|
||||
.put("ucs2_lithuanian_ci", 140)
|
||||
.put("ucs2_slovak_ci", 141)
|
||||
.put("ucs2_spanish2_ci", 142)
|
||||
.put("ucs2_roman_ci", 143)
|
||||
.put("ucs2_persian_ci", 144)
|
||||
.put("ucs2_esperanto_ci", 145)
|
||||
.put("ucs2_hungarian_ci", 146)
|
||||
.put("ucs2_sinhala_ci", 147)
|
||||
.put("ucs2_german2_ci", 148)
|
||||
.put("ucs2_croatian_ci", 149)
|
||||
.put("ucs2_unicode_520_ci", 150)
|
||||
.put("ucs2_vietnamese_ci", 151)
|
||||
.put("ucs2_general_mysql500_ci", 159)
|
||||
.put("utf32_unicode_ci", 160)
|
||||
.put("utf32_icelandic_ci", 161)
|
||||
.put("utf32_latvian_ci", 162)
|
||||
.put("utf32_romanian_ci", 163)
|
||||
.put("utf32_slovenian_ci", 164)
|
||||
.put("utf32_polish_ci", 165)
|
||||
.put("utf32_estonian_ci", 166)
|
||||
.put("utf32_spanish_ci", 167)
|
||||
.put("utf32_swedish_ci", 168)
|
||||
.put("utf32_turkish_ci", 169)
|
||||
.put("utf32_czech_ci", 170)
|
||||
.put("utf32_danish_ci", 171)
|
||||
.put("utf32_lithuanian_ci", 172)
|
||||
.put("utf32_slovak_ci", 173)
|
||||
.put("utf32_spanish2_ci", 174)
|
||||
.put("utf32_roman_ci", 175)
|
||||
.put("utf32_persian_ci", 176)
|
||||
.put("utf32_esperanto_ci", 177)
|
||||
.put("utf32_hungarian_ci", 178)
|
||||
.put("utf32_sinhala_ci", 179)
|
||||
.put("utf32_german2_ci", 180)
|
||||
.put("utf32_croatian_ci", 181)
|
||||
.put("utf32_unicode_520_ci", 182)
|
||||
.put("utf32_vietnamese_ci", 183)
|
||||
.put("utf8_unicode_ci", 192)
|
||||
.put("utf8_icelandic_ci", 193)
|
||||
.put("utf8_latvian_ci", 194)
|
||||
.put("utf8_romanian_ci", 195)
|
||||
.put("utf8_slovenian_ci", 196)
|
||||
.put("utf8_polish_ci", 197)
|
||||
.put("utf8_estonian_ci", 198)
|
||||
.put("utf8_spanish_ci", 199)
|
||||
.put("utf8_swedish_ci", 200)
|
||||
.put("utf8_turkish_ci", 201)
|
||||
.put("utf8_czech_ci", 202)
|
||||
.put("utf8_danish_ci", 203)
|
||||
.put("utf8_lithuanian_ci", 204)
|
||||
.put("utf8_slovak_ci", 205)
|
||||
.put("utf8_spanish2_ci", 206)
|
||||
.put("utf8_roman_ci", 207)
|
||||
.put("utf8_persian_ci", 208)
|
||||
.put("utf8_esperanto_ci", 209)
|
||||
.put("utf8_hungarian_ci", 210)
|
||||
.put("utf8_sinhala_ci", 211)
|
||||
.put("utf8_german2_ci", 212)
|
||||
.put("utf8_croatian_ci", 213)
|
||||
.put("utf8_unicode_520_ci", 214)
|
||||
.put("utf8_vietnamese_ci", 215)
|
||||
.put("utf8_general_mysql500_ci", 223)
|
||||
.put("utf8mb4_unicode_ci", 224)
|
||||
.put("utf8mb4_icelandic_ci", 225)
|
||||
.put("utf8mb4_latvian_ci", 226)
|
||||
.put("utf8mb4_romanian_ci", 227)
|
||||
.put("utf8mb4_slovenian_ci", 228)
|
||||
.put("utf8mb4_polish_ci", 229)
|
||||
.put("utf8mb4_estonian_ci", 230)
|
||||
.put("utf8mb4_spanish_ci", 231)
|
||||
.put("utf8mb4_swedish_ci", 232)
|
||||
.put("utf8mb4_turkish_ci", 233)
|
||||
.put("utf8mb4_czech_ci", 234)
|
||||
.put("utf8mb4_danish_ci", 235)
|
||||
.put("utf8mb4_lithuanian_ci", 236)
|
||||
.put("utf8mb4_slovak_ci", 237)
|
||||
.put("utf8mb4_spanish2_ci", 238)
|
||||
.put("utf8mb4_roman_ci", 239)
|
||||
.put("utf8mb4_persian_ci", 240)
|
||||
.put("utf8mb4_esperanto_ci", 241)
|
||||
.put("utf8mb4_hungarian_ci", 242)
|
||||
.put("utf8mb4_sinhala_ci", 243)
|
||||
.put("utf8mb4_german2_ci", 244)
|
||||
.put("utf8mb4_croatian_ci", 245)
|
||||
.put("utf8mb4_unicode_520_ci", 246)
|
||||
.put("utf8mb4_vietnamese_ci", 247)
|
||||
.build();
|
||||
|
||||
ImmutableMap.Builder<Integer, String> builder = ImmutableMap.builder();
|
||||
for (String collation : collationMap.keySet()) {
|
||||
builder.put(collationMap.get(collation), collation);
|
||||
}
|
||||
collationCodeMap = builder.build();
|
||||
}
|
||||
}
|
||||
|
|
@ -1,55 +0,0 @@
|
|||
/*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.tikv.meta;
|
||||
|
||||
import org.tikv.exception.TiClientInternalException;
|
||||
|
||||
// Actually we are not using either real btree or hash index
|
||||
// TiDB has its own way for indexing as key value pair.
|
||||
public enum IndexType {
|
||||
IndexTypeInvalid(0),
|
||||
IndexTypeBtree(1),
|
||||
IndexTypeHash(2);
|
||||
|
||||
private final int type;
|
||||
|
||||
IndexType(int type) {
|
||||
this.type = type;
|
||||
}
|
||||
|
||||
public static IndexType fromValue(int type) {
|
||||
for (IndexType e : IndexType.values()) {
|
||||
if (e.type == type) {
|
||||
return e;
|
||||
}
|
||||
}
|
||||
throw new TiClientInternalException("Invalid index type code: " + type);
|
||||
}
|
||||
|
||||
public int getTypeCode() {
|
||||
return type;
|
||||
}
|
||||
|
||||
public String toString() {
|
||||
switch (this.type) {
|
||||
case 1:
|
||||
return "BTREE";
|
||||
case 2:
|
||||
return "HASH";
|
||||
}
|
||||
return "Invalid";
|
||||
}
|
||||
}
|
||||
|
|
@ -1,46 +0,0 @@
|
|||
/*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.tikv.meta;
|
||||
|
||||
import org.tikv.exception.TiClientInternalException;
|
||||
|
||||
public enum SchemaState {
|
||||
StateNone(0),
|
||||
StateDeleteOnly(1),
|
||||
StateWriteOnly(2),
|
||||
StateWriteReorganization(3),
|
||||
StateDeleteReorganization(4),
|
||||
StatePublic(5);
|
||||
|
||||
private final int state;
|
||||
|
||||
SchemaState(int state) {
|
||||
this.state = state;
|
||||
}
|
||||
|
||||
public static SchemaState fromValue(int b) {
|
||||
for (SchemaState e : SchemaState.values()) {
|
||||
if (e.state == b) {
|
||||
return e;
|
||||
}
|
||||
}
|
||||
throw new TiClientInternalException("Invalid SchemaState code: " + b);
|
||||
}
|
||||
|
||||
public int getStateCode() {
|
||||
return state;
|
||||
}
|
||||
}
|
||||
|
|
@ -1,346 +0,0 @@
|
|||
/*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.tikv.meta;
|
||||
|
||||
import static java.util.Objects.requireNonNull;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonCreator;
|
||||
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
import com.google.protobuf.ByteString;
|
||||
import com.pingcap.tidb.tipb.ColumnInfo;
|
||||
import java.io.Serializable;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
import org.tikv.codec.CodecDataOutput;
|
||||
import org.tikv.types.DataType;
|
||||
import org.tikv.types.DataType.EncodeType;
|
||||
import org.tikv.types.DataTypeFactory;
|
||||
import org.tikv.types.IntegerType;
|
||||
|
||||
@JsonIgnoreProperties(ignoreUnknown = true)
|
||||
public class TiColumnInfo implements Serializable {
|
||||
private final long id;
|
||||
private final String name;
|
||||
private final int offset;
|
||||
private final DataType type;
|
||||
private final SchemaState schemaState;
|
||||
private final String comment;
|
||||
private final boolean isPrimaryKey;
|
||||
private final String defaultValue;
|
||||
private final String originDefaultValue;
|
||||
|
||||
public static TiColumnInfo getRowIdColumn(int offset) {
|
||||
return new TiColumnInfo(-1, "_tidb_rowid", offset, IntegerType.ROW_ID_TYPE, true);
|
||||
}
|
||||
|
||||
@VisibleForTesting private static final int PK_MASK = 0x2;
|
||||
|
||||
@JsonCreator
|
||||
public TiColumnInfo(
|
||||
@JsonProperty("id") long id,
|
||||
@JsonProperty("name") CIStr name,
|
||||
@JsonProperty("offset") int offset,
|
||||
@JsonProperty("type") InternalTypeHolder type,
|
||||
@JsonProperty("state") int schemaState,
|
||||
@JsonProperty("origin_default") String originalDefaultValue,
|
||||
@JsonProperty("default") String defaultValue,
|
||||
@JsonProperty("comment") String comment) {
|
||||
this.id = id;
|
||||
this.name = requireNonNull(name, "column name is null").getL();
|
||||
this.offset = offset;
|
||||
this.type = DataTypeFactory.of(requireNonNull(type, "type is null"));
|
||||
this.schemaState = SchemaState.fromValue(schemaState);
|
||||
this.comment = comment;
|
||||
this.defaultValue = defaultValue;
|
||||
this.originDefaultValue = originalDefaultValue;
|
||||
// I don't think pk flag should be set on type
|
||||
// Refactor against original tidb code
|
||||
this.isPrimaryKey = (type.getFlag() & PK_MASK) > 0;
|
||||
}
|
||||
|
||||
public TiColumnInfo(
|
||||
long id,
|
||||
String name,
|
||||
int offset,
|
||||
DataType type,
|
||||
SchemaState schemaState,
|
||||
String originalDefaultValue,
|
||||
String defaultValue,
|
||||
String comment) {
|
||||
this.id = id;
|
||||
this.name = requireNonNull(name, "column name is null").toLowerCase();
|
||||
this.offset = offset;
|
||||
this.type = requireNonNull(type, "data type is null");
|
||||
this.schemaState = schemaState;
|
||||
this.comment = comment;
|
||||
this.defaultValue = defaultValue;
|
||||
this.originDefaultValue = originalDefaultValue;
|
||||
this.isPrimaryKey = (type.getFlag() & PK_MASK) > 0;
|
||||
}
|
||||
|
||||
public TiColumnInfo copyWithoutPrimaryKey() {
|
||||
InternalTypeHolder typeHolder = type.toTypeHolder();
|
||||
typeHolder.setFlag(type.getFlag() & (~TiColumnInfo.PK_MASK));
|
||||
DataType newType = DataTypeFactory.of(typeHolder);
|
||||
return new TiColumnInfo(
|
||||
this.id,
|
||||
this.name,
|
||||
this.offset,
|
||||
newType,
|
||||
this.schemaState,
|
||||
this.originDefaultValue,
|
||||
this.defaultValue,
|
||||
this.comment);
|
||||
}
|
||||
|
||||
@VisibleForTesting
|
||||
public TiColumnInfo(long id, String name, int offset, DataType type, boolean isPrimaryKey) {
|
||||
this.id = id;
|
||||
this.name = requireNonNull(name, "column name is null").toLowerCase();
|
||||
this.offset = offset;
|
||||
this.type = requireNonNull(type, "data type is null");
|
||||
this.schemaState = SchemaState.StatePublic;
|
||||
this.comment = "";
|
||||
this.isPrimaryKey = isPrimaryKey;
|
||||
this.originDefaultValue = "1";
|
||||
this.defaultValue = "";
|
||||
}
|
||||
|
||||
public long getId() {
|
||||
return this.id;
|
||||
}
|
||||
|
||||
public String getName() {
|
||||
return this.name;
|
||||
}
|
||||
|
||||
public boolean matchName(String name) {
|
||||
return this.name.equalsIgnoreCase(name);
|
||||
}
|
||||
|
||||
public int getOffset() {
|
||||
return this.offset;
|
||||
}
|
||||
|
||||
public DataType getType() {
|
||||
return type;
|
||||
}
|
||||
|
||||
public SchemaState getSchemaState() {
|
||||
return schemaState;
|
||||
}
|
||||
|
||||
public String getComment() {
|
||||
return comment;
|
||||
}
|
||||
|
||||
public boolean isPrimaryKey() {
|
||||
return isPrimaryKey;
|
||||
}
|
||||
|
||||
public String getDefaultValue() {
|
||||
return defaultValue;
|
||||
}
|
||||
|
||||
public String getOriginDefaultValue() {
|
||||
return originDefaultValue;
|
||||
}
|
||||
|
||||
public ByteString getOriginDefaultValueAsByteString() {
|
||||
CodecDataOutput cdo = new CodecDataOutput();
|
||||
type.encode(cdo, EncodeType.VALUE, type.getOriginDefaultValue(originDefaultValue));
|
||||
return cdo.toByteString();
|
||||
}
|
||||
|
||||
@JsonIgnoreProperties(ignoreUnknown = true)
|
||||
public static class InternalTypeHolder {
|
||||
private int tp;
|
||||
private int flag;
|
||||
private long flen;
|
||||
private int decimal;
|
||||
private String charset;
|
||||
private String collate;
|
||||
private String defaultValue;
|
||||
private String originDefaultValue;
|
||||
private List<String> elems;
|
||||
|
||||
public void setTp(int tp) {
|
||||
this.tp = tp;
|
||||
}
|
||||
|
||||
public void setFlag(int flag) {
|
||||
this.flag = flag;
|
||||
}
|
||||
|
||||
public void setFlen(long flen) {
|
||||
this.flen = flen;
|
||||
}
|
||||
|
||||
public void setDecimal(int decimal) {
|
||||
this.decimal = decimal;
|
||||
}
|
||||
|
||||
public void setCharset(String charset) {
|
||||
this.charset = charset;
|
||||
}
|
||||
|
||||
public void setCollate(String collate) {
|
||||
this.collate = collate;
|
||||
}
|
||||
|
||||
public void setDefaultValue(String defaultValue) {
|
||||
this.defaultValue = defaultValue;
|
||||
}
|
||||
|
||||
public void setOriginDefaultValue(String originDefaultValue) {
|
||||
this.originDefaultValue = originDefaultValue;
|
||||
}
|
||||
|
||||
public void setElems(List<String> elems) {
|
||||
this.elems = elems;
|
||||
}
|
||||
|
||||
interface Builder<E extends DataType> {
|
||||
E build(InternalTypeHolder holder);
|
||||
}
|
||||
|
||||
@JsonCreator
|
||||
public InternalTypeHolder(
|
||||
@JsonProperty("Tp") int tp,
|
||||
@JsonProperty("Flag") int flag,
|
||||
@JsonProperty("Flen") long flen,
|
||||
@JsonProperty("Decimal") int decimal,
|
||||
@JsonProperty("Charset") String charset,
|
||||
@JsonProperty("origin_default") String originalDefaultValue,
|
||||
@JsonProperty("default") String defaultValue,
|
||||
@JsonProperty("Collate") String collate,
|
||||
@JsonProperty("Elems") List<String> elems) {
|
||||
this.tp = tp;
|
||||
this.flag = flag;
|
||||
this.flen = flen;
|
||||
this.decimal = decimal;
|
||||
this.charset = charset;
|
||||
this.collate = collate;
|
||||
this.defaultValue = defaultValue;
|
||||
this.originDefaultValue = originalDefaultValue;
|
||||
this.elems = elems;
|
||||
}
|
||||
|
||||
public InternalTypeHolder(ColumnInfo c) {
|
||||
this.tp = c.getTp();
|
||||
this.flag = c.getFlag();
|
||||
this.flen = c.getColumnLen();
|
||||
this.decimal = c.getDecimal();
|
||||
this.charset = "";
|
||||
this.collate = Collation.translate(c.getCollation());
|
||||
this.elems = c.getElemsList();
|
||||
this.defaultValue = c.getDefaultVal().toStringUtf8();
|
||||
// TODO: we may need write a functon about get origin default value according to the string.
|
||||
this.originDefaultValue = "";
|
||||
}
|
||||
|
||||
public int getTp() {
|
||||
return tp;
|
||||
}
|
||||
|
||||
public int getFlag() {
|
||||
return flag;
|
||||
}
|
||||
|
||||
public long getFlen() {
|
||||
return flen;
|
||||
}
|
||||
|
||||
public int getDecimal() {
|
||||
return decimal;
|
||||
}
|
||||
|
||||
public String getCharset() {
|
||||
return charset;
|
||||
}
|
||||
|
||||
public String getCollate() {
|
||||
return collate;
|
||||
}
|
||||
|
||||
public List<String> getElems() {
|
||||
return elems;
|
||||
}
|
||||
|
||||
public String getDefaultValue() {
|
||||
return defaultValue;
|
||||
}
|
||||
|
||||
public String getOriginDefaultValue() {
|
||||
return originDefaultValue;
|
||||
}
|
||||
}
|
||||
|
||||
TiIndexColumn toFakeIndexColumn() {
|
||||
// we don't use original length of column since for a clustered index column
|
||||
// it always full index instead of prefix index
|
||||
return new TiIndexColumn(CIStr.newCIStr(getName()), getOffset(), DataType.UNSPECIFIED_LEN);
|
||||
}
|
||||
|
||||
TiIndexColumn toIndexColumn() {
|
||||
return new TiIndexColumn(CIStr.newCIStr(getName()), getOffset(), getType().getLength());
|
||||
}
|
||||
|
||||
public ColumnInfo toProto(TiTableInfo table) {
|
||||
return toProtoBuilder(table).build();
|
||||
}
|
||||
|
||||
ColumnInfo.Builder toProtoBuilder(TiTableInfo table) {
|
||||
return ColumnInfo.newBuilder()
|
||||
.setColumnId(id)
|
||||
.setTp(type.getTypeCode())
|
||||
.setCollation(type.getCollationCode())
|
||||
.setColumnLen((int) type.getLength())
|
||||
.setDecimal(type.getDecimal())
|
||||
.setFlag(type.getFlag())
|
||||
.setDefaultVal(getOriginDefaultValueAsByteString())
|
||||
.setPkHandle(table.isPkHandle() && isPrimaryKey())
|
||||
.addAllElems(type.getElems());
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object other) {
|
||||
if (other == this) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (!(other instanceof TiColumnInfo)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
TiColumnInfo col = (TiColumnInfo) other;
|
||||
return Objects.equals(id, col.id)
|
||||
&& Objects.equals(name, col.name)
|
||||
&& Objects.equals(type, col.type)
|
||||
&& Objects.equals(schemaState, col.schemaState)
|
||||
&& isPrimaryKey == col.isPrimaryKey
|
||||
&& Objects.equals(defaultValue, col.defaultValue)
|
||||
&& Objects.equals(originDefaultValue, col.originDefaultValue);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(
|
||||
id, name, type, schemaState, isPrimaryKey, defaultValue, originDefaultValue);
|
||||
}
|
||||
}
|
||||
|
|
@ -1,822 +0,0 @@
|
|||
/*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.tikv.meta;
|
||||
|
||||
import static com.google.common.base.Preconditions.checkArgument;
|
||||
import static java.util.Objects.requireNonNull;
|
||||
import static org.tikv.predicates.PredicateUtils.mergeCNFExpressions;
|
||||
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
import com.google.common.base.Joiner;
|
||||
import com.google.common.collect.ImmutableList;
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import com.pingcap.tidb.tipb.*;
|
||||
import java.io.*;
|
||||
import java.util.*;
|
||||
import java.util.stream.Collectors;
|
||||
import org.tikv.codec.KeyUtils;
|
||||
import org.tikv.exception.DAGRequestException;
|
||||
import org.tikv.exception.TiClientInternalException;
|
||||
import org.tikv.expression.ByItem;
|
||||
import org.tikv.expression.ColumnRef;
|
||||
import org.tikv.expression.Expression;
|
||||
import org.tikv.expression.visitor.ExpressionTypeCoercer;
|
||||
import org.tikv.expression.visitor.MetaResolver;
|
||||
import org.tikv.expression.visitor.ProtoConverter;
|
||||
import org.tikv.key.RowKey;
|
||||
import org.tikv.kvproto.Coprocessor;
|
||||
import org.tikv.types.DataType;
|
||||
import org.tikv.util.KeyRangeUtils;
|
||||
import org.tikv.util.Pair;
|
||||
|
||||
/**
|
||||
* Type TiDAGRequest.
|
||||
*
|
||||
* <p>Used for constructing a new DAG request to TiKV
|
||||
*/
|
||||
public class TiDAGRequest implements Serializable {
|
||||
public static class Builder {
|
||||
private List<String> requiredCols = new ArrayList<>();
|
||||
private List<Expression> filters = new ArrayList<>();
|
||||
private List<ByItem> orderBys = new ArrayList<>();
|
||||
private List<Coprocessor.KeyRange> ranges = new ArrayList<>();
|
||||
private TiTableInfo tableInfo;
|
||||
private int limit;
|
||||
private long startTs;
|
||||
|
||||
public static Builder newBuilder() {
|
||||
return new Builder();
|
||||
}
|
||||
|
||||
public Builder setFullTableScan(TiTableInfo tableInfo) {
|
||||
requireNonNull(tableInfo);
|
||||
setTableInfo(tableInfo);
|
||||
RowKey start = RowKey.createMin(tableInfo.getId());
|
||||
RowKey end = RowKey.createBeyondMax(tableInfo.getId());
|
||||
ranges.add(KeyRangeUtils.makeCoprocRange(start.toByteString(), end.toByteString()));
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder setLimit(int limit) {
|
||||
this.limit = limit;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder setTableInfo(TiTableInfo tableInfo) {
|
||||
this.tableInfo = tableInfo;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder addRequiredCols(String... cols) {
|
||||
this.requiredCols.addAll(Arrays.asList(cols));
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder addRequiredCols(List<String> cols) {
|
||||
this.requiredCols.addAll(cols);
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder addFilter(Expression filter) {
|
||||
this.filters.add(filter);
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder addOrderBy(ByItem item) {
|
||||
this.orderBys.add(item);
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder setStartTs(long ts) {
|
||||
this.startTs = ts;
|
||||
return this;
|
||||
}
|
||||
|
||||
public TiDAGRequest build(PushDownType pushDownType) {
|
||||
TiDAGRequest req = new TiDAGRequest(pushDownType);
|
||||
req.setTableInfo(tableInfo);
|
||||
req.addRanges(ranges);
|
||||
filters.forEach(req::addFilter);
|
||||
if (!orderBys.isEmpty()) {
|
||||
orderBys.forEach(req::addOrderByItem);
|
||||
}
|
||||
if (limit != 0) {
|
||||
req.setLimit(limit);
|
||||
}
|
||||
requiredCols.forEach(c -> req.addRequiredColumn(ColumnRef.create(c)));
|
||||
req.setStartTs(startTs);
|
||||
|
||||
req.resolve();
|
||||
return req;
|
||||
}
|
||||
}
|
||||
|
||||
public TiDAGRequest(PushDownType pushDownType) {
|
||||
this.pushDownType = pushDownType;
|
||||
}
|
||||
|
||||
public TiDAGRequest(PushDownType pushDownType, int timeZoneOffset) {
|
||||
this(pushDownType);
|
||||
this.timeZoneOffset = timeZoneOffset;
|
||||
}
|
||||
|
||||
public enum TruncateMode {
|
||||
IgnoreTruncation(0x1),
|
||||
TruncationAsWarning(0x2);
|
||||
|
||||
private final long mask;
|
||||
|
||||
TruncateMode(long mask) {
|
||||
this.mask = mask;
|
||||
}
|
||||
|
||||
public long mask(long flags) {
|
||||
return flags | mask;
|
||||
}
|
||||
}
|
||||
|
||||
/** Whether we use streaming to push down the request */
|
||||
public enum PushDownType {
|
||||
STREAMING,
|
||||
NORMAL
|
||||
}
|
||||
|
||||
/** Predefined executor priority map. */
|
||||
private static final Map<ExecType, Integer> EXEC_TYPE_PRIORITY_MAP =
|
||||
ImmutableMap.<ExecType, Integer>builder()
|
||||
.put(ExecType.TypeTableScan, 0)
|
||||
.put(ExecType.TypeIndexScan, 0)
|
||||
.put(ExecType.TypeSelection, 1)
|
||||
.put(ExecType.TypeAggregation, 2)
|
||||
.put(ExecType.TypeTopN, 3)
|
||||
.put(ExecType.TypeLimit, 4)
|
||||
.build();
|
||||
|
||||
private TiTableInfo tableInfo;
|
||||
private TiIndexInfo indexInfo;
|
||||
private final List<ColumnRef> fields = new ArrayList<>();
|
||||
private final List<Expression> filters = new ArrayList<>();
|
||||
private final List<ByItem> groupByItems = new ArrayList<>();
|
||||
private final List<ByItem> orderByItems = new ArrayList<>();
|
||||
private List<Expression> pushdownFilters = null;
|
||||
// System like Spark has different type promotion rules
|
||||
// we need a cast to target when given
|
||||
private final List<Pair<Expression, DataType>> aggregates = new ArrayList<>();
|
||||
private final List<Coprocessor.KeyRange> keyRanges = new ArrayList<>();
|
||||
// If index scanning of this request is not possible in some scenario, we downgrade it to a table
|
||||
// scan and use
|
||||
// downGradeRanges instead of index scan ranges stored in keyRanges along with downgradeFilters to
|
||||
// perform a
|
||||
// table scan.
|
||||
private List<Expression> downgradeFilters = new ArrayList<>();
|
||||
|
||||
private int limit;
|
||||
private int timeZoneOffset;
|
||||
private long flags;
|
||||
private long startTs;
|
||||
private Expression having;
|
||||
private boolean distinct;
|
||||
private boolean handleNeeded;
|
||||
private boolean isDoubleRead;
|
||||
private final PushDownType pushDownType;
|
||||
private IdentityHashMap<Expression, DataType> typeMap;
|
||||
private double estimatedCount = -1;
|
||||
|
||||
private static ColumnInfo handleColumn =
|
||||
ColumnInfo.newBuilder()
|
||||
.setColumnId(-1)
|
||||
.setPkHandle(true)
|
||||
// We haven't changed the field name in protobuf file, but
|
||||
// we need to set this to true in order to retrieve the handle,
|
||||
// so the name 'setPkHandle' may sounds strange.
|
||||
.build();
|
||||
|
||||
private List<Expression> getAllExpressions() {
|
||||
ImmutableList.Builder<Expression> builder = ImmutableList.builder();
|
||||
builder.addAll(getFields());
|
||||
builder.addAll(getFilters());
|
||||
builder.addAll(getAggregates());
|
||||
getGroupByItems().forEach(item -> builder.add(item.getExpr()));
|
||||
getOrderByItems().forEach(item -> builder.add(item.getExpr()));
|
||||
if (having != null) {
|
||||
builder.add(having);
|
||||
}
|
||||
return builder.build();
|
||||
}
|
||||
|
||||
public DataType getExpressionType(Expression expression) {
|
||||
requireNonNull(typeMap, "request is not resolved");
|
||||
return typeMap.get(expression);
|
||||
}
|
||||
|
||||
public void resolve() {
|
||||
MetaResolver resolver = new MetaResolver(tableInfo);
|
||||
ExpressionTypeCoercer inferrer = new ExpressionTypeCoercer();
|
||||
resolver.resolve(getAllExpressions());
|
||||
inferrer.infer(getAllExpressions());
|
||||
typeMap = inferrer.getTypeMap();
|
||||
}
|
||||
|
||||
/**
|
||||
* Unify indexScan and tableScan building logic since they are very much alike. DAGRequest for
|
||||
* IndexScan should also contain filters and aggregation, so we can reuse this part of logic.
|
||||
*
|
||||
* <p>DAGRequest is made up of a chain of executors with strict orders: TableScan/IndexScan >
|
||||
* Selection > Aggregation > TopN/Limit a DAGRequest must contain one and only one TableScan or
|
||||
* IndexScan.
|
||||
*
|
||||
* @param isIndexScan whether the dagRequest to build is an IndexScan
|
||||
* @return final DAGRequest built
|
||||
*/
|
||||
public DAGRequest buildScan(boolean isIndexScan) {
|
||||
checkArgument(startTs != 0, "timestamp is 0");
|
||||
DAGRequest.Builder dagRequestBuilder = DAGRequest.newBuilder();
|
||||
Executor.Builder executorBuilder = Executor.newBuilder();
|
||||
IndexScan.Builder indexScanBuilder = IndexScan.newBuilder();
|
||||
TableScan.Builder tblScanBuilder = TableScan.newBuilder();
|
||||
// find a column's offset in fields
|
||||
Map<ColumnRef, Integer> colOffsetInFieldMap = new HashMap<>();
|
||||
// find a column's position in index
|
||||
Map<TiColumnInfo, Integer> colPosInIndexMap = new HashMap<>();
|
||||
|
||||
if (isIndexScan) {
|
||||
// IndexScan
|
||||
if (indexInfo == null) {
|
||||
throw new TiClientInternalException("Index is empty for index scan");
|
||||
}
|
||||
List<TiColumnInfo> columnInfoList = tableInfo.getColumns();
|
||||
boolean hasPk = false;
|
||||
// We extract index column info
|
||||
List<Integer> indexColOffsets =
|
||||
indexInfo
|
||||
.getIndexColumns()
|
||||
.stream()
|
||||
.map(TiIndexColumn::getOffset)
|
||||
.collect(Collectors.toList());
|
||||
|
||||
int idxPos = 0;
|
||||
// for index scan builder, columns are added by its order in index
|
||||
for (Integer idx : indexColOffsets) {
|
||||
TiColumnInfo tiColumnInfo = columnInfoList.get(idx);
|
||||
ColumnInfo columnInfo = tiColumnInfo.toProto(tableInfo);
|
||||
colPosInIndexMap.put(tiColumnInfo, idxPos++);
|
||||
|
||||
ColumnInfo.Builder colBuilder = ColumnInfo.newBuilder(columnInfo);
|
||||
if (columnInfo.getColumnId() == -1) {
|
||||
hasPk = true;
|
||||
colBuilder.setPkHandle(true);
|
||||
}
|
||||
indexScanBuilder.addColumns(colBuilder);
|
||||
}
|
||||
|
||||
if (isDoubleRead()) {
|
||||
// double read case
|
||||
if (!hasPk) {
|
||||
indexScanBuilder.addColumns(handleColumn);
|
||||
}
|
||||
|
||||
int colCount = indexScanBuilder.getColumnsCount();
|
||||
// double read case: need to retrieve handle
|
||||
dagRequestBuilder.addOutputOffsets(colCount != 0 ? colCount - 1 : 0);
|
||||
} else {
|
||||
int colCount = indexScanBuilder.getColumnsCount();
|
||||
boolean pkIsNeeded = false;
|
||||
// =================== IMPORTANT ======================
|
||||
// offset for dagRequest should be in accordance with fields
|
||||
for (ColumnRef col : getFields()) {
|
||||
Integer pos = colPosInIndexMap.get(col.getColumnInfo());
|
||||
if (pos != null) {
|
||||
TiColumnInfo columnInfo = columnInfoList.get(indexColOffsets.get(pos));
|
||||
if (col.getColumnInfo().equals(columnInfo)) {
|
||||
dagRequestBuilder.addOutputOffsets(pos);
|
||||
colOffsetInFieldMap.put(col, pos);
|
||||
}
|
||||
}
|
||||
// if a column of field is not contained in index selected,
|
||||
// logically it must be the pk column and
|
||||
// the pkIsHandle must be true. Extra check here.
|
||||
else if (col.getColumnInfo().isPrimaryKey() && tableInfo.isPkHandle()) {
|
||||
pkIsNeeded = true;
|
||||
// offset should be processed for each primary key encountered
|
||||
dagRequestBuilder.addOutputOffsets(colCount);
|
||||
// for index scan, column offset must be in the order of index->handle
|
||||
colOffsetInFieldMap.put(col, indexColOffsets.size());
|
||||
} else {
|
||||
throw new DAGRequestException(
|
||||
"columns other than primary key and index key exist in fields while index single read: "
|
||||
+ col.getName());
|
||||
}
|
||||
}
|
||||
// pk is not included in index but still needed
|
||||
if (pkIsNeeded) {
|
||||
indexScanBuilder.addColumns(handleColumn);
|
||||
}
|
||||
}
|
||||
executorBuilder.setTp(ExecType.TypeIndexScan);
|
||||
|
||||
indexScanBuilder.setTableId(tableInfo.getId()).setIndexId(indexInfo.getId());
|
||||
dagRequestBuilder.addExecutors(executorBuilder.setIdxScan(indexScanBuilder).build());
|
||||
} else {
|
||||
// TableScan
|
||||
executorBuilder.setTp(ExecType.TypeTableScan);
|
||||
tblScanBuilder.setTableId(tableInfo.getId());
|
||||
// Step1. Add columns to first executor
|
||||
for (int i = 0; i < getFields().size(); i++) {
|
||||
ColumnRef col = getFields().get(i);
|
||||
tblScanBuilder.addColumns(col.getColumnInfo().toProto(tableInfo));
|
||||
colOffsetInFieldMap.put(col, i);
|
||||
}
|
||||
// Currently, according to TiKV's implementation, if handle
|
||||
// is needed, we should add an extra column with an ID of -1
|
||||
// to the TableScan executor
|
||||
if (isHandleNeeded()) {
|
||||
tblScanBuilder.addColumns(handleColumn);
|
||||
}
|
||||
dagRequestBuilder.addExecutors(executorBuilder.setTblScan(tblScanBuilder));
|
||||
|
||||
// column offset should be in accordance with fields
|
||||
for (int i = 0; i < getFields().size(); i++) {
|
||||
dagRequestBuilder.addOutputOffsets(i);
|
||||
}
|
||||
|
||||
// if handle is needed, we should append one output offset
|
||||
if (isHandleNeeded()) {
|
||||
dagRequestBuilder.addOutputOffsets(tableInfo.getColumns().size());
|
||||
}
|
||||
}
|
||||
|
||||
if (!isIndexScan || (isIndexScan() && !isDoubleRead())) {
|
||||
// clear executorBuilder
|
||||
executorBuilder.clear();
|
||||
|
||||
// Step2. Add others
|
||||
// DO NOT EDIT EXPRESSION CONSTRUCTION ORDER
|
||||
// Or make sure the construction order is below:
|
||||
// TableScan/IndexScan > Selection > Aggregation > TopN/Limit
|
||||
Expression whereExpr = mergeCNFExpressions(getFilters());
|
||||
if (whereExpr != null) {
|
||||
executorBuilder.setTp(ExecType.TypeSelection);
|
||||
dagRequestBuilder.addExecutors(
|
||||
executorBuilder.setSelection(
|
||||
Selection.newBuilder()
|
||||
.addConditions(ProtoConverter.toProto(whereExpr, colOffsetInFieldMap))));
|
||||
executorBuilder.clear();
|
||||
}
|
||||
|
||||
if (!getGroupByItems().isEmpty() || !getAggregates().isEmpty()) {
|
||||
Aggregation.Builder aggregationBuilder = Aggregation.newBuilder();
|
||||
getGroupByItems()
|
||||
.forEach(
|
||||
tiByItem ->
|
||||
aggregationBuilder.addGroupBy(
|
||||
ProtoConverter.toProto(tiByItem.getExpr(), colOffsetInFieldMap)));
|
||||
getAggregates()
|
||||
.forEach(
|
||||
tiExpr ->
|
||||
aggregationBuilder.addAggFunc(
|
||||
ProtoConverter.toProto(tiExpr, colOffsetInFieldMap)));
|
||||
executorBuilder.setTp(ExecType.TypeAggregation);
|
||||
dagRequestBuilder.addExecutors(executorBuilder.setAggregation(aggregationBuilder));
|
||||
executorBuilder.clear();
|
||||
}
|
||||
|
||||
if (!getOrderByItems().isEmpty()) {
|
||||
TopN.Builder topNBuilder = TopN.newBuilder();
|
||||
getOrderByItems()
|
||||
.forEach(
|
||||
tiByItem ->
|
||||
topNBuilder.addOrderBy(
|
||||
com.pingcap.tidb.tipb.ByItem.newBuilder()
|
||||
.setExpr(
|
||||
ProtoConverter.toProto(tiByItem.getExpr(), colOffsetInFieldMap))
|
||||
.setDesc(tiByItem.isDesc())));
|
||||
executorBuilder.setTp(ExecType.TypeTopN);
|
||||
topNBuilder.setLimit(getLimit());
|
||||
dagRequestBuilder.addExecutors(executorBuilder.setTopN(topNBuilder));
|
||||
executorBuilder.clear();
|
||||
} else if (getLimit() != 0) {
|
||||
Limit.Builder limitBuilder = Limit.newBuilder();
|
||||
limitBuilder.setLimit(getLimit());
|
||||
executorBuilder.setTp(ExecType.TypeLimit);
|
||||
dagRequestBuilder.addExecutors(executorBuilder.setLimit(limitBuilder));
|
||||
executorBuilder.clear();
|
||||
}
|
||||
}
|
||||
|
||||
DAGRequest request =
|
||||
dagRequestBuilder
|
||||
.setTimeZoneOffset(timeZoneOffset)
|
||||
.setFlags(flags)
|
||||
.setStartTs(startTs)
|
||||
.build();
|
||||
|
||||
validateRequest(request);
|
||||
|
||||
return request;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a DAG request is valid.
|
||||
*
|
||||
* <p>Note: When constructing a DAG request, a executor with an ExecType of higher priority should
|
||||
* always be placed before those lower ones.
|
||||
*
|
||||
* @param dagRequest Request DAG.
|
||||
*/
|
||||
private void validateRequest(DAGRequest dagRequest) {
|
||||
requireNonNull(dagRequest);
|
||||
// A DAG request must has at least one executor.
|
||||
if (dagRequest.getExecutorsCount() < 1) {
|
||||
throw new DAGRequestException("Invalid executors count:" + dagRequest.getExecutorsCount());
|
||||
}
|
||||
|
||||
ExecType formerType = dagRequest.getExecutors(0).getTp();
|
||||
if (formerType != ExecType.TypeTableScan && formerType != ExecType.TypeIndexScan) {
|
||||
throw new DAGRequestException(
|
||||
"Invalid first executor type:"
|
||||
+ formerType
|
||||
+ ", must one of TypeTableScan or TypeIndexScan");
|
||||
}
|
||||
|
||||
for (int i = 1; i < dagRequest.getExecutorsCount(); i++) {
|
||||
ExecType currentType = dagRequest.getExecutors(i).getTp();
|
||||
if (EXEC_TYPE_PRIORITY_MAP.get(currentType) < EXEC_TYPE_PRIORITY_MAP.get(formerType)) {
|
||||
throw new DAGRequestException("Invalid executor priority.");
|
||||
}
|
||||
formerType = currentType;
|
||||
}
|
||||
}
|
||||
|
||||
public TiDAGRequest setTableInfo(TiTableInfo tableInfo) {
|
||||
this.tableInfo = requireNonNull(tableInfo, "tableInfo is null");
|
||||
return this;
|
||||
}
|
||||
|
||||
public TiTableInfo getTableInfo() {
|
||||
return this.tableInfo;
|
||||
}
|
||||
|
||||
public TiDAGRequest setIndexInfo(TiIndexInfo indexInfo) {
|
||||
this.indexInfo = requireNonNull(indexInfo, "indexInfo is null");
|
||||
return this;
|
||||
}
|
||||
|
||||
public TiIndexInfo getIndexInfo() {
|
||||
return indexInfo;
|
||||
}
|
||||
|
||||
public void clearIndexInfo() {
|
||||
indexInfo = null;
|
||||
}
|
||||
|
||||
public int getLimit() {
|
||||
return limit;
|
||||
}
|
||||
|
||||
/**
|
||||
* add limit clause to select query.
|
||||
*
|
||||
* @param limit is just a integer.
|
||||
* @return a SelectBuilder
|
||||
*/
|
||||
public TiDAGRequest setLimit(int limit) {
|
||||
this.limit = limit;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* set timezone offset
|
||||
*
|
||||
* @param timeZoneOffset timezone offset
|
||||
* @return a TiDAGRequest
|
||||
*/
|
||||
public TiDAGRequest setTimeZoneOffset(int timeZoneOffset) {
|
||||
this.timeZoneOffset = timeZoneOffset;
|
||||
return this;
|
||||
}
|
||||
|
||||
int getTimeZoneOffset() {
|
||||
return timeZoneOffset;
|
||||
}
|
||||
|
||||
/**
|
||||
* set truncate mode
|
||||
*
|
||||
* @param mode truncate mode
|
||||
* @return a TiDAGRequest
|
||||
*/
|
||||
public TiDAGRequest setTruncateMode(TiDAGRequest.TruncateMode mode) {
|
||||
flags = requireNonNull(mode, "mode is null").mask(flags);
|
||||
return this;
|
||||
}
|
||||
|
||||
@VisibleForTesting
|
||||
public long getFlags() {
|
||||
return flags;
|
||||
}
|
||||
|
||||
/**
|
||||
* set start timestamp for the transaction
|
||||
*
|
||||
* @param startTs timestamp
|
||||
* @return a TiDAGRequest
|
||||
*/
|
||||
public TiDAGRequest setStartTs(long startTs) {
|
||||
this.startTs = startTs;
|
||||
return this;
|
||||
}
|
||||
|
||||
long getStartTs() {
|
||||
return startTs;
|
||||
}
|
||||
|
||||
/**
|
||||
* set having clause to select query
|
||||
*
|
||||
* @param having is a expression represents Having
|
||||
* @return a TiDAGRequest
|
||||
*/
|
||||
public TiDAGRequest setHaving(Expression having) {
|
||||
this.having = requireNonNull(having, "having is null");
|
||||
return this;
|
||||
}
|
||||
|
||||
public TiDAGRequest setDistinct(boolean distinct) {
|
||||
this.distinct = distinct;
|
||||
return this;
|
||||
}
|
||||
|
||||
public boolean isDistinct() {
|
||||
return distinct;
|
||||
}
|
||||
|
||||
public TiDAGRequest addAggregate(Expression expr, DataType targetType) {
|
||||
requireNonNull(expr, "aggregation expr is null");
|
||||
aggregates.add(Pair.create(expr, targetType));
|
||||
return this;
|
||||
}
|
||||
|
||||
public List<Expression> getAggregates() {
|
||||
return aggregates.stream().map(p -> p.first).collect(Collectors.toList());
|
||||
}
|
||||
|
||||
public List<Pair<Expression, DataType>> getAggregatePairs() {
|
||||
return aggregates;
|
||||
}
|
||||
|
||||
/**
|
||||
* add a order by clause to select query.
|
||||
*
|
||||
* @param byItem is a TiByItem.
|
||||
* @return a SelectBuilder
|
||||
*/
|
||||
public TiDAGRequest addOrderByItem(ByItem byItem) {
|
||||
orderByItems.add(requireNonNull(byItem, "byItem is null"));
|
||||
return this;
|
||||
}
|
||||
|
||||
List<ByItem> getOrderByItems() {
|
||||
return orderByItems;
|
||||
}
|
||||
|
||||
/**
|
||||
* add a group by clause to select query
|
||||
*
|
||||
* @param byItem is a TiByItem
|
||||
* @return a SelectBuilder
|
||||
*/
|
||||
public TiDAGRequest addGroupByItem(ByItem byItem) {
|
||||
groupByItems.add(requireNonNull(byItem, "byItem is null"));
|
||||
return this;
|
||||
}
|
||||
|
||||
public List<ByItem> getGroupByItems() {
|
||||
return groupByItems;
|
||||
}
|
||||
|
||||
/**
|
||||
* Field is not support in TiDB yet, for here we simply allow TiColumnRef instead of TiExpr like
|
||||
* in SelectRequest proto
|
||||
*
|
||||
* <p>
|
||||
*
|
||||
* <p>This interface allows duplicate columns and it's user's responsibility to do dedup since we
|
||||
* need to ensure exact order and items preserved during decoding
|
||||
*
|
||||
* @param column is column referred during selectReq
|
||||
*/
|
||||
public TiDAGRequest addRequiredColumn(ColumnRef column) {
|
||||
fields.add(requireNonNull(column, "columnRef is null"));
|
||||
return this;
|
||||
}
|
||||
|
||||
public List<ColumnRef> getFields() {
|
||||
return fields;
|
||||
}
|
||||
|
||||
/**
|
||||
* set key range of scan
|
||||
*
|
||||
* @param ranges key range of scan
|
||||
*/
|
||||
public TiDAGRequest addRanges(List<Coprocessor.KeyRange> ranges) {
|
||||
keyRanges.addAll(requireNonNull(ranges, "KeyRange is null"));
|
||||
return this;
|
||||
}
|
||||
|
||||
public void resetFilters(List<Expression> filters) {
|
||||
this.filters.clear();
|
||||
this.filters.addAll(filters);
|
||||
}
|
||||
|
||||
public List<Coprocessor.KeyRange> getRanges() {
|
||||
return keyRanges;
|
||||
}
|
||||
|
||||
public TiDAGRequest addFilter(Expression filter) {
|
||||
this.filters.add(requireNonNull(filter, "filters expr is null"));
|
||||
return this;
|
||||
}
|
||||
|
||||
public List<Expression> getDowngradeFilters() {
|
||||
return downgradeFilters;
|
||||
}
|
||||
|
||||
public TiDAGRequest addDowngradeFilter(Expression filter) {
|
||||
this.downgradeFilters.add(requireNonNull(filter, "downgrade filter is null"));
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check whether the DAG request has any aggregate expression.
|
||||
*
|
||||
* @return the boolean
|
||||
*/
|
||||
public boolean hasAggregate() {
|
||||
return !getAggregates().isEmpty();
|
||||
}
|
||||
|
||||
/**
|
||||
* Check whether the DAG request has any group by expression.
|
||||
*
|
||||
* @return the boolean
|
||||
*/
|
||||
public boolean hasGroupBy() {
|
||||
return !getGroupByItems().isEmpty();
|
||||
}
|
||||
|
||||
public List<Expression> getFilters() {
|
||||
return filters;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns whether handle is needed.
|
||||
*
|
||||
* @return the boolean
|
||||
*/
|
||||
public boolean isHandleNeeded() {
|
||||
return handleNeeded;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets handle needed.
|
||||
*
|
||||
* @param handleNeeded the handle needed
|
||||
*/
|
||||
public void setHandleNeeded(boolean handleNeeded) {
|
||||
this.handleNeeded = handleNeeded;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns whether needs double read
|
||||
*
|
||||
* @return boolean
|
||||
*/
|
||||
public boolean isDoubleRead() {
|
||||
return isDoubleRead;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets isDoubleRead
|
||||
*
|
||||
* @param isDoubleRead if is double read
|
||||
*/
|
||||
public void setIsDoubleRead(boolean isDoubleRead) {
|
||||
this.isDoubleRead = isDoubleRead;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns whether this request is of indexScanType
|
||||
*
|
||||
* @return true iff indexInfo is provided, false otherwise
|
||||
*/
|
||||
public boolean isIndexScan() {
|
||||
return indexInfo != null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Whether we use streaming processing to retrieve data
|
||||
*
|
||||
* @return push down type.
|
||||
*/
|
||||
public PushDownType getPushDownType() {
|
||||
return pushDownType;
|
||||
}
|
||||
|
||||
/** Set the estimated row count will be fetched from this request. */
|
||||
public void setEstimatedCount(double estimatedCount) {
|
||||
this.estimatedCount = estimatedCount;
|
||||
}
|
||||
|
||||
/** Get the estimated row count will be fetched from this request. */
|
||||
public double getEstimatedCount() {
|
||||
return estimatedCount;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
StringBuilder sb = new StringBuilder();
|
||||
if (tableInfo != null) {
|
||||
sb.append(String.format("[table: %s] ", tableInfo.getName()));
|
||||
}
|
||||
|
||||
if (indexInfo != null) {
|
||||
sb.append(String.format("[Index: %s] ", indexInfo.getName()));
|
||||
}
|
||||
|
||||
if (!getFields().isEmpty()) {
|
||||
sb.append(", Columns: ");
|
||||
Joiner.on(", ").skipNulls().appendTo(sb, getFields());
|
||||
}
|
||||
|
||||
if (!getDowngradeFilters().isEmpty()) {
|
||||
// should be called after all parameters are set
|
||||
if (pushdownFilters == null) {
|
||||
pushdownFilters = new ArrayList<>(getDowngradeFilters());
|
||||
pushdownFilters.removeAll(new HashSet<>(getFilters()));
|
||||
}
|
||||
if (!pushdownFilters.isEmpty()) {
|
||||
sb.append(", Pushdown Filter: ");
|
||||
Joiner.on(", ").skipNulls().appendTo(sb, pushdownFilters);
|
||||
}
|
||||
}
|
||||
|
||||
if (!getFilters().isEmpty()) {
|
||||
sb.append(", Residual Filter: ");
|
||||
Joiner.on(", ").skipNulls().appendTo(sb, getFilters());
|
||||
}
|
||||
|
||||
// Key ranges might be also useful
|
||||
if (!getRanges().isEmpty()) {
|
||||
sb.append(", KeyRange: ");
|
||||
getRanges().forEach(x -> sb.append(KeyUtils.formatBytes(x)));
|
||||
}
|
||||
|
||||
if (!getAggregates().isEmpty()) {
|
||||
sb.append(", Aggregates: ");
|
||||
Joiner.on(", ").skipNulls().appendTo(sb, getAggregates());
|
||||
}
|
||||
|
||||
if (!getGroupByItems().isEmpty()) {
|
||||
sb.append(", Group By: ");
|
||||
Joiner.on(", ").skipNulls().appendTo(sb, getGroupByItems());
|
||||
}
|
||||
|
||||
if (!getOrderByItems().isEmpty()) {
|
||||
sb.append(", Order By: ");
|
||||
Joiner.on(", ").skipNulls().appendTo(sb, getOrderByItems());
|
||||
}
|
||||
|
||||
if (getLimit() != 0) {
|
||||
sb.append(", Limit: ");
|
||||
sb.append("[").append(limit).append("]");
|
||||
}
|
||||
return sb.toString();
|
||||
}
|
||||
|
||||
public TiDAGRequest copy() {
|
||||
try {
|
||||
ByteArrayOutputStream baos = new ByteArrayOutputStream();
|
||||
ObjectOutputStream oos = new ObjectOutputStream(baos);
|
||||
oos.writeObject(this);
|
||||
ByteArrayInputStream bais = new ByteArrayInputStream(baos.toByteArray());
|
||||
ObjectInputStream ois = new ObjectInputStream(bais);
|
||||
return ((TiDAGRequest) ois.readObject());
|
||||
} catch (Exception e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -1,109 +0,0 @@
|
|||
/*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.tikv.meta;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonCreator;
|
||||
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
import java.util.List;
|
||||
|
||||
@JsonIgnoreProperties(ignoreUnknown = true)
|
||||
public class TiDBInfo {
|
||||
private final long id;
|
||||
private final String name;
|
||||
private final String charset;
|
||||
private final String collate;
|
||||
private final List<TiTableInfo> tables;
|
||||
private final SchemaState schemaState;
|
||||
|
||||
@JsonCreator
|
||||
public TiDBInfo(
|
||||
@JsonProperty("id") long id,
|
||||
@JsonProperty("db_name") CIStr name,
|
||||
@JsonProperty("charset") String charset,
|
||||
@JsonProperty("collate") String collate,
|
||||
@JsonProperty("-") List<TiTableInfo> tables,
|
||||
@JsonProperty("state") int schemaState) {
|
||||
this.id = id;
|
||||
this.name = name.getL();
|
||||
this.charset = charset;
|
||||
this.collate = collate;
|
||||
this.tables = tables;
|
||||
this.schemaState = SchemaState.fromValue(schemaState);
|
||||
}
|
||||
|
||||
private TiDBInfo(
|
||||
long id,
|
||||
String name,
|
||||
String charset,
|
||||
String collate,
|
||||
List<TiTableInfo> tables,
|
||||
SchemaState schemaState) {
|
||||
this.id = id;
|
||||
this.name = name;
|
||||
this.charset = charset;
|
||||
this.collate = collate;
|
||||
this.tables = tables;
|
||||
this.schemaState = schemaState;
|
||||
}
|
||||
|
||||
public TiDBInfo rename(String newName) {
|
||||
return new TiDBInfo(id, newName, charset, collate, tables, schemaState);
|
||||
}
|
||||
|
||||
public long getId() {
|
||||
return id;
|
||||
}
|
||||
|
||||
public String getName() {
|
||||
return name;
|
||||
}
|
||||
|
||||
public String getCharset() {
|
||||
return charset;
|
||||
}
|
||||
|
||||
public String getCollate() {
|
||||
return collate;
|
||||
}
|
||||
|
||||
public List<TiTableInfo> getTables() {
|
||||
return tables;
|
||||
}
|
||||
|
||||
SchemaState getSchemaState() {
|
||||
return schemaState;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object other) {
|
||||
if (other == this) {
|
||||
return true;
|
||||
}
|
||||
if (!(other instanceof TiDBInfo)) {
|
||||
return false;
|
||||
}
|
||||
TiDBInfo otherDB = (TiDBInfo) other;
|
||||
return otherDB.getId() == getId() && otherDB.getName().equals(getName());
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
final int prime = 31;
|
||||
int result = prime + Long.hashCode(getId());
|
||||
return result * prime + getName().hashCode();
|
||||
}
|
||||
}
|
||||
|
|
@ -1,65 +0,0 @@
|
|||
/*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.tikv.meta;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonCreator;
|
||||
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
import java.io.Serializable;
|
||||
import org.tikv.types.DataType;
|
||||
|
||||
@JsonIgnoreProperties(ignoreUnknown = true)
|
||||
public class TiIndexColumn implements Serializable {
|
||||
private String name;
|
||||
private int offset;
|
||||
private long length;
|
||||
|
||||
@JsonCreator
|
||||
public TiIndexColumn(
|
||||
@JsonProperty("name") CIStr name,
|
||||
@JsonProperty("offset") int offset,
|
||||
@JsonProperty("length") long length) {
|
||||
this.name = name.getL();
|
||||
this.offset = offset;
|
||||
this.length = length;
|
||||
}
|
||||
|
||||
public String getName() {
|
||||
return name;
|
||||
}
|
||||
|
||||
public int getOffset() {
|
||||
return offset;
|
||||
}
|
||||
|
||||
public long getLength() {
|
||||
return length;
|
||||
}
|
||||
|
||||
public boolean isPrefixIndex() {
|
||||
return length != DataType.UNSPECIFIED_LEN;
|
||||
}
|
||||
|
||||
public boolean matchName(String otherName) {
|
||||
return name.equalsIgnoreCase(otherName);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return String.format(
|
||||
"%s {name: %s, offset: %d, length: %d}", getClass().getSimpleName(), name, offset, length);
|
||||
}
|
||||
}
|
||||
|
|
@ -1,167 +0,0 @@
|
|||
/*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.tikv.meta;
|
||||
|
||||
import static java.util.Objects.requireNonNull;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonCreator;
|
||||
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
import com.google.common.base.Joiner;
|
||||
import com.google.common.collect.ImmutableList;
|
||||
import com.pingcap.tidb.tipb.ColumnInfo;
|
||||
import com.pingcap.tidb.tipb.IndexInfo;
|
||||
import java.io.Serializable;
|
||||
import java.util.List;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
@JsonIgnoreProperties(ignoreUnknown = true)
|
||||
public class TiIndexInfo implements Serializable {
|
||||
private final long id;
|
||||
private final String name;
|
||||
private final String tableName;
|
||||
private final List<TiIndexColumn> indexColumns;
|
||||
private final boolean isUnique;
|
||||
private final boolean isPrimary;
|
||||
private final SchemaState schemaState;
|
||||
private final String comment;
|
||||
private final IndexType indexType;
|
||||
private final boolean isFakePrimaryKey;
|
||||
|
||||
@JsonCreator
|
||||
@VisibleForTesting
|
||||
public TiIndexInfo(
|
||||
@JsonProperty("id") long id,
|
||||
@JsonProperty("idx_name") CIStr name,
|
||||
@JsonProperty("tbl_name") CIStr tableName,
|
||||
@JsonProperty("idx_cols") List<TiIndexColumn> indexColumns,
|
||||
@JsonProperty("is_unique") boolean isUnique,
|
||||
@JsonProperty("is_primary") boolean isPrimary,
|
||||
@JsonProperty("state") int schemaState,
|
||||
@JsonProperty("comment") String comment,
|
||||
@JsonProperty("index_type") int indexType,
|
||||
// This is a fake property and added JsonProperty only to
|
||||
// to bypass Jackson frameworks's check
|
||||
@JsonProperty("___isFakePrimaryKey") boolean isFakePrimaryKey) {
|
||||
this.id = id;
|
||||
this.name = requireNonNull(name, "index name is null").getL();
|
||||
this.tableName = requireNonNull(tableName, "table name is null").getL();
|
||||
this.indexColumns = ImmutableList.copyOf(requireNonNull(indexColumns, "indexColumns is null"));
|
||||
this.isUnique = isUnique;
|
||||
this.isPrimary = isPrimary;
|
||||
this.schemaState = SchemaState.fromValue(schemaState);
|
||||
this.comment = comment;
|
||||
this.indexType = IndexType.fromValue(indexType);
|
||||
this.isFakePrimaryKey = isFakePrimaryKey;
|
||||
}
|
||||
|
||||
public static TiIndexInfo generateFakePrimaryKeyIndex(TiTableInfo table) {
|
||||
TiColumnInfo pkColumn = table.getPrimaryKeyColumn();
|
||||
if (pkColumn != null) {
|
||||
return new TiIndexInfo(
|
||||
-1,
|
||||
CIStr.newCIStr("fake_pk_" + table.getId()),
|
||||
CIStr.newCIStr(table.getName()),
|
||||
ImmutableList.of(pkColumn.toFakeIndexColumn()),
|
||||
true,
|
||||
true,
|
||||
SchemaState.StatePublic.getStateCode(),
|
||||
"Fake Column",
|
||||
IndexType.IndexTypeHash.getTypeCode(),
|
||||
true);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
public long getId() {
|
||||
return id;
|
||||
}
|
||||
|
||||
public String getName() {
|
||||
return name;
|
||||
}
|
||||
|
||||
public String getTableName() {
|
||||
return tableName;
|
||||
}
|
||||
|
||||
public List<TiIndexColumn> getIndexColumns() {
|
||||
return indexColumns;
|
||||
}
|
||||
|
||||
public boolean isUnique() {
|
||||
return isUnique;
|
||||
}
|
||||
|
||||
public boolean isPrimary() {
|
||||
return isPrimary;
|
||||
}
|
||||
|
||||
public SchemaState getSchemaState() {
|
||||
return schemaState;
|
||||
}
|
||||
|
||||
public String getComment() {
|
||||
return comment;
|
||||
}
|
||||
|
||||
public IndexType getIndexType() {
|
||||
return indexType;
|
||||
}
|
||||
|
||||
public IndexInfo toProto(TiTableInfo tableInfo) {
|
||||
IndexInfo.Builder builder =
|
||||
IndexInfo.newBuilder().setTableId(tableInfo.getId()).setIndexId(id).setUnique(isUnique);
|
||||
|
||||
List<TiColumnInfo> columns = tableInfo.getColumns();
|
||||
|
||||
for (TiIndexColumn indexColumn : getIndexColumns()) {
|
||||
int offset = indexColumn.getOffset();
|
||||
TiColumnInfo column = columns.get(offset);
|
||||
builder.addColumns(column.toProto(tableInfo));
|
||||
}
|
||||
|
||||
if (tableInfo.isPkHandle()) {
|
||||
for (TiColumnInfo column : columns) {
|
||||
if (!column.isPrimaryKey()) {
|
||||
continue;
|
||||
}
|
||||
ColumnInfo pbColumn = column.toProtoBuilder(tableInfo).setPkHandle(true).build();
|
||||
builder.addColumns(pbColumn);
|
||||
}
|
||||
}
|
||||
return builder.build();
|
||||
}
|
||||
|
||||
public boolean isFakePrimaryKey() {
|
||||
return isFakePrimaryKey;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return String.format(
|
||||
"%s[%s]",
|
||||
name,
|
||||
Joiner.on(",")
|
||||
.skipNulls()
|
||||
.join(
|
||||
indexColumns
|
||||
.stream()
|
||||
.map(column -> column.getName())
|
||||
.collect(Collectors.toList())));
|
||||
}
|
||||
}
|
||||
|
|
@ -1,28 +0,0 @@
|
|||
package org.tikv.meta;
|
||||
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
import java.io.Serializable;
|
||||
import org.codehaus.jackson.annotate.JsonCreator;
|
||||
import org.codehaus.jackson.annotate.JsonIgnoreProperties;
|
||||
import org.codehaus.jackson.annotate.JsonProperty;
|
||||
|
||||
@JsonIgnoreProperties(ignoreUnknown = true)
|
||||
public class TiPartitionDef implements Serializable {
|
||||
private final long id;
|
||||
private final CIStr name;
|
||||
private final String[] lessThan;
|
||||
private final String comment;
|
||||
|
||||
@JsonCreator
|
||||
@VisibleForTesting
|
||||
public TiPartitionDef(
|
||||
@JsonProperty("id") long id,
|
||||
@JsonProperty("name") CIStr name,
|
||||
@JsonProperty("less_than") String[] lessThan,
|
||||
@JsonProperty("comment") String comment) {
|
||||
this.id = id;
|
||||
this.name = name;
|
||||
this.lessThan = lessThan;
|
||||
this.comment = comment;
|
||||
}
|
||||
}
|
||||
|
|
@ -1,36 +0,0 @@
|
|||
package org.tikv.meta;
|
||||
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
import org.codehaus.jackson.annotate.JsonCreator;
|
||||
import org.codehaus.jackson.annotate.JsonIgnoreProperties;
|
||||
import org.codehaus.jackson.annotate.JsonProperty;
|
||||
|
||||
@JsonIgnoreProperties(ignoreUnknown = true)
|
||||
public class TiPartitionInfo {
|
||||
public static enum PartitionType {
|
||||
RangePartition,
|
||||
HashPartition,
|
||||
ListPartition,
|
||||
}
|
||||
|
||||
private final PartitionType type;
|
||||
private final String expr;
|
||||
private final CIStr[] columns;
|
||||
private final boolean enable;
|
||||
private final TiPartitionDef[] defs;
|
||||
|
||||
@JsonCreator
|
||||
@VisibleForTesting
|
||||
public TiPartitionInfo(
|
||||
@JsonProperty("type") PartitionType type,
|
||||
@JsonProperty("expr") String expr,
|
||||
@JsonProperty("columns") CIStr[] columns,
|
||||
@JsonProperty("enable") boolean enable,
|
||||
@JsonProperty("definitions") TiPartitionDef[] defs) {
|
||||
this.type = type;
|
||||
this.expr = expr;
|
||||
this.columns = columns;
|
||||
this.enable = enable;
|
||||
this.defs = defs;
|
||||
}
|
||||
}
|
||||
|
|
@ -1,199 +0,0 @@
|
|||
/*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.tikv.meta;
|
||||
|
||||
import static java.util.Objects.requireNonNull;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonCreator;
|
||||
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
import com.google.common.collect.ImmutableList;
|
||||
import com.pingcap.tidb.tipb.TableInfo;
|
||||
import java.io.Serializable;
|
||||
import java.util.List;
|
||||
import java.util.stream.Collectors;
|
||||
import org.tikv.exception.TiClientInternalException;
|
||||
import org.tikv.meta.TiColumnInfo.InternalTypeHolder;
|
||||
import org.tikv.types.DataType;
|
||||
import org.tikv.types.DataTypeFactory;
|
||||
|
||||
@JsonIgnoreProperties(ignoreUnknown = true)
|
||||
public class TiTableInfo implements Serializable {
|
||||
private final long id;
|
||||
private final String name;
|
||||
private final String charset;
|
||||
private final String collate;
|
||||
private final List<TiColumnInfo> columns;
|
||||
private final List<TiIndexInfo> indices;
|
||||
private final boolean pkIsHandle;
|
||||
private final String comment;
|
||||
private final long autoIncId;
|
||||
private final long maxColumnId;
|
||||
private final long maxIndexId;
|
||||
private final long oldSchemaId;
|
||||
private final TiPartitionInfo partitionInfo;
|
||||
|
||||
@JsonCreator
|
||||
public TiTableInfo(
|
||||
@JsonProperty("id") long id,
|
||||
@JsonProperty("name") CIStr name,
|
||||
@JsonProperty("charset") String charset,
|
||||
@JsonProperty("collate") String collate,
|
||||
@JsonProperty("pk_is_handle") boolean pkIsHandle,
|
||||
@JsonProperty("cols") List<TiColumnInfo> columns,
|
||||
@JsonProperty("index_info") List<TiIndexInfo> indices,
|
||||
@JsonProperty("comment") String comment,
|
||||
@JsonProperty("auto_inc_id") long autoIncId,
|
||||
@JsonProperty("max_col_id") long maxColumnId,
|
||||
@JsonProperty("max_idx_id") long maxIndexId,
|
||||
@JsonProperty("old_schema_id") long oldSchemaId,
|
||||
@JsonProperty("partition") TiPartitionInfo partitionInfo) {
|
||||
this.id = id;
|
||||
this.name = name.getL();
|
||||
this.charset = charset;
|
||||
this.collate = collate;
|
||||
this.columns = ImmutableList.copyOf(requireNonNull(columns, "columns is null"));
|
||||
this.pkIsHandle = pkIsHandle;
|
||||
this.indices = indices != null ? ImmutableList.copyOf(indices) : ImmutableList.of();
|
||||
this.comment = comment;
|
||||
this.autoIncId = autoIncId;
|
||||
this.maxColumnId = maxColumnId;
|
||||
this.maxIndexId = maxIndexId;
|
||||
this.oldSchemaId = oldSchemaId;
|
||||
this.partitionInfo = partitionInfo;
|
||||
}
|
||||
|
||||
public long getId() {
|
||||
return id;
|
||||
}
|
||||
|
||||
public String getName() {
|
||||
return name;
|
||||
}
|
||||
|
||||
public String getCharset() {
|
||||
return charset;
|
||||
}
|
||||
|
||||
public String getCollate() {
|
||||
return collate;
|
||||
}
|
||||
|
||||
public List<TiColumnInfo> getColumns() {
|
||||
return columns;
|
||||
}
|
||||
|
||||
public TiColumnInfo getColumn(int offset) {
|
||||
if (offset < 0 || offset >= columns.size()) {
|
||||
throw new TiClientInternalException(String.format("Column offset %d out of bound", offset));
|
||||
}
|
||||
return columns.get(offset);
|
||||
}
|
||||
|
||||
public boolean isPkHandle() {
|
||||
return pkIsHandle;
|
||||
}
|
||||
|
||||
public List<TiIndexInfo> getIndices() {
|
||||
return indices;
|
||||
}
|
||||
|
||||
public String getComment() {
|
||||
return comment;
|
||||
}
|
||||
|
||||
public long getAutoIncId() {
|
||||
return autoIncId;
|
||||
}
|
||||
|
||||
public long getMaxColumnId() {
|
||||
return maxColumnId;
|
||||
}
|
||||
|
||||
public long getMaxIndexId() {
|
||||
return maxIndexId;
|
||||
}
|
||||
|
||||
public long getOldSchemaId() {
|
||||
return oldSchemaId;
|
||||
}
|
||||
|
||||
public TableInfo toProto() {
|
||||
return TableInfo.newBuilder()
|
||||
.setTableId(getId())
|
||||
.addAllColumns(
|
||||
getColumns().stream().map(col -> col.toProto(this)).collect(Collectors.toList()))
|
||||
.build();
|
||||
}
|
||||
|
||||
// Only Integer Column will be a PK column
|
||||
// and there exists only one PK column
|
||||
public TiColumnInfo getPrimaryKeyColumn() {
|
||||
if (isPkHandle()) {
|
||||
for (TiColumnInfo col : getColumns()) {
|
||||
if (col.isPrimaryKey()) {
|
||||
return col;
|
||||
}
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
public TiTableInfo copyTableWithRowId() {
|
||||
if (!isPkHandle()) {
|
||||
ImmutableList.Builder<TiColumnInfo> newColumns = ImmutableList.builder();
|
||||
for (TiColumnInfo col : getColumns()) {
|
||||
DataType type = col.getType();
|
||||
InternalTypeHolder typeHolder = type.toTypeHolder();
|
||||
typeHolder.setFlag(type.getFlag() & (~DataType.PriKeyFlag));
|
||||
DataType newType = DataTypeFactory.of(typeHolder);
|
||||
TiColumnInfo newCol =
|
||||
new TiColumnInfo(
|
||||
col.getId(),
|
||||
col.getName(),
|
||||
col.getOffset(),
|
||||
newType,
|
||||
col.getSchemaState(),
|
||||
col.getOriginDefaultValue(),
|
||||
col.getDefaultValue(),
|
||||
col.getComment());
|
||||
newColumns.add(newCol.copyWithoutPrimaryKey());
|
||||
}
|
||||
newColumns.add(TiColumnInfo.getRowIdColumn(getColumns().size()));
|
||||
return new TiTableInfo(
|
||||
getId(),
|
||||
CIStr.newCIStr(getName()),
|
||||
getCharset(),
|
||||
getCollate(),
|
||||
true,
|
||||
newColumns.build(),
|
||||
getIndices(),
|
||||
getComment(),
|
||||
getAutoIncId(),
|
||||
getMaxColumnId(),
|
||||
getMaxIndexId(),
|
||||
getOldSchemaId(),
|
||||
partitionInfo);
|
||||
} else {
|
||||
return this;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return toProto().toString();
|
||||
}
|
||||
}
|
||||
|
|
@ -23,7 +23,6 @@ import io.grpc.StatusRuntimeException;
|
|||
import java.util.function.Function;
|
||||
import org.apache.log4j.Logger;
|
||||
import org.tikv.codec.KeyUtils;
|
||||
import org.tikv.event.CacheInvalidateEvent;
|
||||
import org.tikv.exception.GrpcException;
|
||||
import org.tikv.kvproto.Errorpb;
|
||||
import org.tikv.region.RegionErrorReceiver;
|
||||
|
|
@ -38,7 +37,6 @@ public class KVErrorHandler<RespT> implements ErrorHandler<RespT> {
|
|||
private static final int NO_LEADER_STORE_ID =
|
||||
0; // if there's currently no leader of a store, store id is set to 0
|
||||
private final Function<RespT, Errorpb.Error> getRegionError;
|
||||
private final Function<CacheInvalidateEvent, Void> cacheInvalidateCallBack;
|
||||
private final RegionManager regionManager;
|
||||
private final RegionErrorReceiver recv;
|
||||
private final TiRegion ctxRegion;
|
||||
|
|
@ -52,10 +50,6 @@ public class KVErrorHandler<RespT> implements ErrorHandler<RespT> {
|
|||
this.recv = recv;
|
||||
this.regionManager = regionManager;
|
||||
this.getRegionError = getRegionError;
|
||||
this.cacheInvalidateCallBack =
|
||||
regionManager != null && regionManager.getSession() != null
|
||||
? regionManager.getSession().getCacheInvalidateCallback()
|
||||
: null;
|
||||
}
|
||||
|
||||
private Errorpb.Error getRegionError(RespT resp) {
|
||||
|
|
@ -68,55 +62,6 @@ public class KVErrorHandler<RespT> implements ErrorHandler<RespT> {
|
|||
private void invalidateRegionStoreCache(TiRegion ctxRegion) {
|
||||
regionManager.invalidateRegion(ctxRegion.getId());
|
||||
regionManager.invalidateStore(ctxRegion.getLeader().getStoreId());
|
||||
notifyRegionStoreCacheInvalidate(
|
||||
ctxRegion.getId(),
|
||||
ctxRegion.getLeader().getStoreId(),
|
||||
CacheInvalidateEvent.CacheType.REGION_STORE);
|
||||
}
|
||||
|
||||
/** Used for notifying Spark driver to invalidate cache from Spark workers. */
|
||||
private void notifyRegionStoreCacheInvalidate(
|
||||
long regionId, long storeId, CacheInvalidateEvent.CacheType type) {
|
||||
if (cacheInvalidateCallBack != null) {
|
||||
cacheInvalidateCallBack.apply(new CacheInvalidateEvent(regionId, storeId, true, true, type));
|
||||
logger.info(
|
||||
"Accumulating cache invalidation info to driver:regionId="
|
||||
+ regionId
|
||||
+ ",storeId="
|
||||
+ storeId
|
||||
+ ",type="
|
||||
+ type.name());
|
||||
} else {
|
||||
logger.warn(
|
||||
"Failed to send notification back to driver since CacheInvalidateCallBack is null in executor node.");
|
||||
}
|
||||
}
|
||||
|
||||
private void notifyRegionCacheInvalidate(long regionId) {
|
||||
if (cacheInvalidateCallBack != null) {
|
||||
cacheInvalidateCallBack.apply(
|
||||
new CacheInvalidateEvent(
|
||||
regionId, 0, true, false, CacheInvalidateEvent.CacheType.REGION_STORE));
|
||||
logger.info(
|
||||
"Accumulating cache invalidation info to driver:regionId="
|
||||
+ regionId
|
||||
+ ",type="
|
||||
+ CacheInvalidateEvent.CacheType.REGION_STORE.name());
|
||||
} else {
|
||||
logger.warn(
|
||||
"Failed to send notification back to driver since CacheInvalidateCallBack is null in executor node.");
|
||||
}
|
||||
}
|
||||
|
||||
private void notifyStoreCacheInvalidate(long storeId) {
|
||||
if (cacheInvalidateCallBack != null) {
|
||||
cacheInvalidateCallBack.apply(
|
||||
new CacheInvalidateEvent(
|
||||
0, storeId, false, true, CacheInvalidateEvent.CacheType.REGION_STORE));
|
||||
} else {
|
||||
logger.warn(
|
||||
"Failed to send notification back to driver since CacheInvalidateCallBack is null in executor node.");
|
||||
}
|
||||
}
|
||||
|
||||
// Referenced from TiDB
|
||||
|
|
@ -161,9 +106,6 @@ public class KVErrorHandler<RespT> implements ErrorHandler<RespT> {
|
|||
// to a new store address.
|
||||
retry = false;
|
||||
}
|
||||
notifyRegionStoreCacheInvalidate(
|
||||
ctxRegion.getId(), newStoreId, CacheInvalidateEvent.CacheType.LEADER);
|
||||
|
||||
backOffFuncType = BackOffFunction.BackOffFuncType.BoUpdateLeader;
|
||||
} else {
|
||||
logger.info(
|
||||
|
|
@ -171,9 +113,7 @@ public class KVErrorHandler<RespT> implements ErrorHandler<RespT> {
|
|||
"Received zero store id, from region %d try next time", ctxRegion.getId()));
|
||||
backOffFuncType = BackOffFunction.BackOffFuncType.BoRegionMiss;
|
||||
}
|
||||
|
||||
backOffer.doBackOff(backOffFuncType, new GrpcException(error.toString()));
|
||||
|
||||
return retry;
|
||||
} else if (error.hasStoreNotMatch()) {
|
||||
// this error is reported from raftstore:
|
||||
|
|
@ -184,17 +124,18 @@ public class KVErrorHandler<RespT> implements ErrorHandler<RespT> {
|
|||
String.format(
|
||||
"Store Not Match happened with region id %d, store id %d",
|
||||
ctxRegion.getId(), storeId));
|
||||
logger.warn(String.format("%s", error.getStoreNotMatch()));
|
||||
|
||||
this.regionManager.invalidateStore(storeId);
|
||||
recv.onStoreNotMatch(this.regionManager.getStoreById(storeId));
|
||||
notifyStoreCacheInvalidate(storeId);
|
||||
backOffer.doBackOff(
|
||||
BackOffFunction.BackOffFuncType.BoStoreNotMatch, new GrpcException(error.toString()));
|
||||
return true;
|
||||
} else if (error.hasStaleEpoch()) {
|
||||
// this error is reported from raftstore:
|
||||
// region has outdated version,please try later.
|
||||
logger.warn(String.format("Stale Epoch encountered for region [%s]", ctxRegion));
|
||||
this.regionManager.onRegionStale(ctxRegion.getId());
|
||||
notifyRegionCacheInvalidate(ctxRegion.getId());
|
||||
return false;
|
||||
} else if (error.hasServerIsBusy()) {
|
||||
// this error is reported from kv:
|
||||
|
|
@ -241,10 +182,6 @@ public class KVErrorHandler<RespT> implements ErrorHandler<RespT> {
|
|||
@Override
|
||||
public boolean handleRequestError(BackOffer backOffer, Exception e) {
|
||||
regionManager.onRequestFail(ctxRegion.getId(), ctxRegion.getLeader().getStoreId());
|
||||
notifyRegionStoreCacheInvalidate(
|
||||
ctxRegion.getId(),
|
||||
ctxRegion.getLeader().getStoreId(),
|
||||
CacheInvalidateEvent.CacheType.REQ_FAILED);
|
||||
|
||||
backOffer.doBackOff(
|
||||
BackOffFunction.BackOffFuncType.BoTiKVRPC,
|
||||
|
|
|
|||
|
|
@ -1,123 +0,0 @@
|
|||
/*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.tikv.operation;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import org.tikv.expression.ByItem;
|
||||
import org.tikv.expression.Expression;
|
||||
import org.tikv.meta.TiDAGRequest;
|
||||
import org.tikv.operation.transformer.Cast;
|
||||
import org.tikv.operation.transformer.NoOp;
|
||||
import org.tikv.operation.transformer.RowTransformer;
|
||||
import org.tikv.types.DataType;
|
||||
import org.tikv.types.IntegerType;
|
||||
import org.tikv.util.Pair;
|
||||
|
||||
/**
|
||||
* SchemaInfer extract row's type after query is executed. It is pretty rough version. Optimization
|
||||
* is on the way. The problem we have right now is that TiDB promote Sum to Decimal which is not
|
||||
* compatible with column's type. The solution we come up with right now is use record column's type
|
||||
* ad finalFieldType and build another list recording TiExpr's type as fieldType for row reading.
|
||||
* Once we finish row reading, we first check each element in fieldType and finalFieldType share the
|
||||
* same type or not. If yes, no need for casting. If no, casting is needed here.
|
||||
*/
|
||||
public class SchemaInfer {
|
||||
private List<DataType> types;
|
||||
private RowTransformer rt;
|
||||
|
||||
public static SchemaInfer create(TiDAGRequest dagRequest) {
|
||||
return new SchemaInfer(dagRequest);
|
||||
}
|
||||
|
||||
protected SchemaInfer(TiDAGRequest dagRequest) {
|
||||
types = new ArrayList<>();
|
||||
extractFieldTypes(dagRequest);
|
||||
extractHandleType(dagRequest);
|
||||
buildTransform(dagRequest);
|
||||
}
|
||||
|
||||
private void extractHandleType(TiDAGRequest dagRequest) {
|
||||
if (dagRequest.isHandleNeeded()) {
|
||||
// DataType of handle is long
|
||||
types.add(IntegerType.INT);
|
||||
}
|
||||
}
|
||||
|
||||
private void buildTransform(TiDAGRequest dagRequest) {
|
||||
RowTransformer.Builder rowTrans = RowTransformer.newBuilder();
|
||||
// Update:
|
||||
// Switching to DAG mode will eliminate first blob
|
||||
// TODO:check correctness of ↑
|
||||
// 1. if group by is empty, first column should be "single group"
|
||||
// which is a string
|
||||
// 2. if multiple group by items present, it is wrapped inside
|
||||
// a byte array. we make a multiple decoding
|
||||
// 3. for no aggregation case, make only projected columns
|
||||
|
||||
// append aggregates if present
|
||||
if (dagRequest.hasAggregate()) {
|
||||
for (Pair<Expression, DataType> pair : dagRequest.getAggregatePairs()) {
|
||||
rowTrans.addProjection(new Cast(pair.second));
|
||||
}
|
||||
if (dagRequest.hasGroupBy()) {
|
||||
for (ByItem byItem : dagRequest.getGroupByItems()) {
|
||||
rowTrans.addProjection(new NoOp(dagRequest.getExpressionType(byItem.getExpr())));
|
||||
}
|
||||
}
|
||||
} else {
|
||||
for (Expression field : dagRequest.getFields()) {
|
||||
rowTrans.addProjection(new NoOp(dagRequest.getExpressionType(field)));
|
||||
}
|
||||
}
|
||||
rowTrans.addSourceFieldTypes(types);
|
||||
rt = rowTrans.build();
|
||||
}
|
||||
|
||||
/**
|
||||
* TODO: order by extract field types from tiSelectRequest for reading data to row.
|
||||
*
|
||||
* @param dagRequest is SelectRequest
|
||||
*/
|
||||
private void extractFieldTypes(TiDAGRequest dagRequest) {
|
||||
if (dagRequest.hasAggregate()) {
|
||||
dagRequest.getAggregates().forEach(expr -> types.add(dagRequest.getExpressionType(expr)));
|
||||
// In DAG mode, if there is any group by statement in a request, all the columns specified
|
||||
// in group by expression will be returned, so when we decode a result row, we need to pay
|
||||
// extra attention to decoding.
|
||||
if (dagRequest.hasGroupBy()) {
|
||||
for (ByItem item : dagRequest.getGroupByItems()) {
|
||||
types.add(dagRequest.getExpressionType(item.getExpr()));
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// Extract all column type information from TiExpr
|
||||
dagRequest.getFields().forEach(expr -> types.add(expr.getType()));
|
||||
}
|
||||
}
|
||||
|
||||
public DataType getType(int index) {
|
||||
return types.get(index);
|
||||
}
|
||||
|
||||
public List<DataType> getTypes() {
|
||||
return types;
|
||||
}
|
||||
|
||||
public RowTransformer getRowTransformer() {
|
||||
return this.rt;
|
||||
}
|
||||
}
|
||||
|
|
@ -1,105 +0,0 @@
|
|||
/*
|
||||
*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*
|
||||
*/
|
||||
|
||||
package org.tikv.operation.iterator;
|
||||
|
||||
import com.google.protobuf.ByteString;
|
||||
import com.pingcap.tidb.tipb.Chunk;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import org.tikv.exception.TiClientInternalException;
|
||||
|
||||
public abstract class ChunkIterator<T> implements Iterator<T> {
|
||||
|
||||
private final List<Chunk> chunks;
|
||||
protected int chunkIndex;
|
||||
protected int metaIndex;
|
||||
protected int bufOffset;
|
||||
protected boolean eof;
|
||||
|
||||
public static ChunkIterator<ByteString> getRawBytesChunkIterator(List<Chunk> chunks) {
|
||||
return new ChunkIterator<ByteString>(chunks) {
|
||||
@Override
|
||||
public ByteString next() {
|
||||
Chunk c = chunks.get(chunkIndex);
|
||||
long endOffset = c.getRowsMeta(metaIndex).getLength() + bufOffset;
|
||||
if (endOffset > Integer.MAX_VALUE) {
|
||||
throw new TiClientInternalException("Offset exceeded MAX_INT.");
|
||||
}
|
||||
|
||||
ByteString result = c.getRowsData().substring(bufOffset, (int) endOffset);
|
||||
advance();
|
||||
return result;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
public static ChunkIterator<Long> getHandleChunkIterator(List<Chunk> chunks) {
|
||||
return new ChunkIterator<Long>(chunks) {
|
||||
@Override
|
||||
public Long next() {
|
||||
Chunk c = chunks.get(chunkIndex);
|
||||
long result = c.getRowsMeta(metaIndex).getHandle();
|
||||
advance();
|
||||
return result;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
protected ChunkIterator(List<Chunk> chunks) {
|
||||
// Read and then advance semantics
|
||||
this.chunks = chunks;
|
||||
this.chunkIndex = 0;
|
||||
this.metaIndex = 0;
|
||||
this.bufOffset = 0;
|
||||
if (chunks.size() == 0
|
||||
|| chunks.get(0).getRowsMetaCount() == 0
|
||||
|| chunks.get(0).getRowsData().size() == 0) {
|
||||
eof = true;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasNext() {
|
||||
return !eof;
|
||||
}
|
||||
|
||||
private boolean seekNextNonEmptyChunk() {
|
||||
// loop until the end of chunk list or first non empty chunk
|
||||
do {
|
||||
chunkIndex += 1;
|
||||
} while (chunkIndex < chunks.size() && chunks.get(chunkIndex).getRowsMetaCount() == 0);
|
||||
// return if remaining things left
|
||||
return chunkIndex < chunks.size();
|
||||
}
|
||||
|
||||
protected void advance() {
|
||||
if (eof) {
|
||||
return;
|
||||
}
|
||||
Chunk c = chunks.get(chunkIndex);
|
||||
bufOffset += c.getRowsMeta(metaIndex++).getLength();
|
||||
if (metaIndex >= c.getRowsMetaCount()) {
|
||||
if (seekNextNonEmptyChunk()) {
|
||||
metaIndex = 0;
|
||||
bufOffset = 0;
|
||||
} else {
|
||||
eof = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -1,111 +0,0 @@
|
|||
/*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.tikv.operation.iterator;
|
||||
|
||||
import static java.util.Objects.requireNonNull;
|
||||
|
||||
import com.pingcap.tidb.tipb.Chunk;
|
||||
import com.pingcap.tidb.tipb.DAGRequest;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.NoSuchElementException;
|
||||
import org.tikv.TiSession;
|
||||
import org.tikv.codec.CodecDataInput;
|
||||
import org.tikv.meta.TiDAGRequest;
|
||||
import org.tikv.operation.SchemaInfer;
|
||||
import org.tikv.row.Row;
|
||||
import org.tikv.row.RowReader;
|
||||
import org.tikv.row.RowReaderFactory;
|
||||
import org.tikv.types.DataType;
|
||||
import org.tikv.types.IntegerType;
|
||||
import org.tikv.util.RangeSplitter.RegionTask;
|
||||
|
||||
public abstract class CoprocessIterator<T> implements Iterator<T> {
|
||||
protected final TiSession session;
|
||||
protected final List<RegionTask> regionTasks;
|
||||
protected DAGRequest dagRequest;
|
||||
protected static final DataType[] handleTypes = new DataType[] {IntegerType.INT};
|
||||
// protected final ExecutorCompletionService<Iterator<SelectResponse>> completionService;
|
||||
protected RowReader rowReader;
|
||||
protected CodecDataInput dataInput;
|
||||
protected boolean eof = false;
|
||||
protected int taskIndex;
|
||||
protected int chunkIndex;
|
||||
protected List<Chunk> chunkList;
|
||||
protected SchemaInfer schemaInfer;
|
||||
|
||||
CoprocessIterator(
|
||||
DAGRequest req, List<RegionTask> regionTasks, TiSession session, SchemaInfer infer) {
|
||||
this.dagRequest = req;
|
||||
this.session = session;
|
||||
this.regionTasks = regionTasks;
|
||||
this.schemaInfer = infer;
|
||||
}
|
||||
|
||||
abstract void submitTasks();
|
||||
|
||||
public static CoprocessIterator<Row> getRowIterator(
|
||||
TiDAGRequest req, List<RegionTask> regionTasks, TiSession session) {
|
||||
return new DAGIterator<Row>(
|
||||
req.buildScan(req.isIndexScan() && !req.isDoubleRead()),
|
||||
regionTasks,
|
||||
session,
|
||||
SchemaInfer.create(req),
|
||||
req.getPushDownType()) {
|
||||
@Override
|
||||
public Row next() {
|
||||
if (hasNext()) {
|
||||
return rowReader.readRow(schemaInfer.getTypes().toArray(new DataType[0]));
|
||||
} else {
|
||||
throw new NoSuchElementException();
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
public static CoprocessIterator<Long> getHandleIterator(
|
||||
TiDAGRequest req, List<RegionTask> regionTasks, TiSession session) {
|
||||
return new DAGIterator<Long>(
|
||||
req.buildScan(true), regionTasks, session, SchemaInfer.create(req), req.getPushDownType()) {
|
||||
@Override
|
||||
public Long next() {
|
||||
if (hasNext()) {
|
||||
return rowReader.readRow(handleTypes).getLong(0);
|
||||
} else {
|
||||
throw new NoSuchElementException();
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
boolean tryAdvanceChunkIndex() {
|
||||
if (chunkList == null || chunkIndex >= chunkList.size() - 1) {
|
||||
return false;
|
||||
}
|
||||
|
||||
chunkIndex++;
|
||||
return true;
|
||||
}
|
||||
|
||||
void createDataInputReader() {
|
||||
requireNonNull(chunkList, "Chunk list should not be null.");
|
||||
if (0 > chunkIndex || chunkIndex >= chunkList.size()) {
|
||||
throw new IllegalArgumentException();
|
||||
}
|
||||
dataInput = new CodecDataInput(chunkList.get(chunkIndex).getRowsData());
|
||||
rowReader = RowReaderFactory.createRowReader(dataInput);
|
||||
}
|
||||
}
|
||||
|
|
@ -1,220 +0,0 @@
|
|||
package org.tikv.operation.iterator;
|
||||
|
||||
import static org.tikv.meta.TiDAGRequest.PushDownType.STREAMING;
|
||||
|
||||
import com.pingcap.tidb.tipb.Chunk;
|
||||
import com.pingcap.tidb.tipb.DAGRequest;
|
||||
import com.pingcap.tidb.tipb.SelectResponse;
|
||||
import java.util.ArrayDeque;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Queue;
|
||||
import java.util.concurrent.ExecutorCompletionService;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.tikv.TiSession;
|
||||
import org.tikv.exception.RegionTaskException;
|
||||
import org.tikv.exception.TiClientInternalException;
|
||||
import org.tikv.kvproto.Coprocessor;
|
||||
import org.tikv.kvproto.Metapb;
|
||||
import org.tikv.meta.TiDAGRequest.PushDownType;
|
||||
import org.tikv.operation.SchemaInfer;
|
||||
import org.tikv.region.RegionStoreClient;
|
||||
import org.tikv.region.TiRegion;
|
||||
import org.tikv.util.BackOffer;
|
||||
import org.tikv.util.ConcreteBackOffer;
|
||||
import org.tikv.util.RangeSplitter;
|
||||
|
||||
public abstract class DAGIterator<T> extends CoprocessIterator<T> {
|
||||
private ExecutorCompletionService<Iterator<SelectResponse>> streamingService;
|
||||
private ExecutorCompletionService<SelectResponse> dagService;
|
||||
private SelectResponse response;
|
||||
private static final Logger logger = LoggerFactory.getLogger(DAGIterator.class.getName());
|
||||
|
||||
private Iterator<SelectResponse> responseIterator;
|
||||
|
||||
private final PushDownType pushDownType;
|
||||
|
||||
DAGIterator(
|
||||
DAGRequest req,
|
||||
List<RangeSplitter.RegionTask> regionTasks,
|
||||
TiSession session,
|
||||
SchemaInfer infer,
|
||||
PushDownType pushDownType) {
|
||||
super(req, regionTasks, session, infer);
|
||||
this.pushDownType = pushDownType;
|
||||
switch (pushDownType) {
|
||||
case NORMAL:
|
||||
dagService = new ExecutorCompletionService<>(session.getThreadPoolForTableScan());
|
||||
break;
|
||||
case STREAMING:
|
||||
streamingService = new ExecutorCompletionService<>(session.getThreadPoolForTableScan());
|
||||
break;
|
||||
}
|
||||
submitTasks();
|
||||
}
|
||||
|
||||
@Override
|
||||
void submitTasks() {
|
||||
for (RangeSplitter.RegionTask task : regionTasks) {
|
||||
switch (pushDownType) {
|
||||
case STREAMING:
|
||||
streamingService.submit(() -> processByStreaming(task));
|
||||
break;
|
||||
case NORMAL:
|
||||
dagService.submit(() -> process(task));
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasNext() {
|
||||
if (eof) {
|
||||
return false;
|
||||
}
|
||||
|
||||
while (chunkList == null || chunkIndex >= chunkList.size() || dataInput.available() <= 0) {
|
||||
// First we check if our chunk list has remaining chunk
|
||||
if (tryAdvanceChunkIndex()) {
|
||||
createDataInputReader();
|
||||
}
|
||||
// If not, check next region/response
|
||||
else if (pushDownType == STREAMING) {
|
||||
if (!advanceNextResponse() && !readNextRegionChunks()) {
|
||||
return false;
|
||||
}
|
||||
} else if (!readNextRegionChunks()) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
private boolean hasMoreResponse() {
|
||||
switch (pushDownType) {
|
||||
case STREAMING:
|
||||
return responseIterator != null && responseIterator.hasNext();
|
||||
case NORMAL:
|
||||
return response != null;
|
||||
}
|
||||
|
||||
throw new IllegalArgumentException("Invalid push down type:" + pushDownType);
|
||||
}
|
||||
|
||||
private boolean advanceNextResponse() {
|
||||
if (!hasMoreResponse()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
switch (pushDownType) {
|
||||
case STREAMING:
|
||||
chunkList = responseIterator.next().getChunksList();
|
||||
break;
|
||||
case NORMAL:
|
||||
chunkList = response.getChunksList();
|
||||
break;
|
||||
}
|
||||
|
||||
if (chunkList == null || chunkList.isEmpty()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
chunkIndex = 0;
|
||||
createDataInputReader();
|
||||
return true;
|
||||
}
|
||||
|
||||
private boolean readNextRegionChunks() {
|
||||
if (eof || regionTasks == null || taskIndex >= regionTasks.size()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
try {
|
||||
switch (pushDownType) {
|
||||
case STREAMING:
|
||||
responseIterator = streamingService.take().get();
|
||||
break;
|
||||
case NORMAL:
|
||||
response = dagService.take().get();
|
||||
break;
|
||||
}
|
||||
|
||||
} catch (Exception e) {
|
||||
throw new TiClientInternalException("Error reading region:", e);
|
||||
}
|
||||
|
||||
taskIndex++;
|
||||
return advanceNextResponse();
|
||||
}
|
||||
|
||||
private SelectResponse process(RangeSplitter.RegionTask regionTask) {
|
||||
Queue<RangeSplitter.RegionTask> remainTasks = new ArrayDeque<>();
|
||||
Queue<SelectResponse> responseQueue = new ArrayDeque<>();
|
||||
remainTasks.add(regionTask);
|
||||
BackOffer backOffer = ConcreteBackOffer.newCopNextMaxBackOff();
|
||||
// In case of one region task spilt into several others, we ues a queue to properly handle all
|
||||
// the remaining tasks.
|
||||
while (!remainTasks.isEmpty()) {
|
||||
RangeSplitter.RegionTask task = remainTasks.poll();
|
||||
if (task == null) continue;
|
||||
List<Coprocessor.KeyRange> ranges = task.getRanges();
|
||||
TiRegion region = task.getRegion();
|
||||
Metapb.Store store = task.getStore();
|
||||
|
||||
try {
|
||||
RegionStoreClient client = RegionStoreClient.create(region, store, session);
|
||||
Collection<RangeSplitter.RegionTask> tasks =
|
||||
client.coprocess(backOffer, dagRequest, ranges, responseQueue);
|
||||
if (tasks != null) {
|
||||
remainTasks.addAll(tasks);
|
||||
}
|
||||
} catch (Throwable e) {
|
||||
// Handle region task failed
|
||||
logger.error(
|
||||
"Process region tasks failed, remain "
|
||||
+ remainTasks.size()
|
||||
+ " tasks not executed due to",
|
||||
e);
|
||||
// Rethrow to upper levels
|
||||
eof = true;
|
||||
throw new RegionTaskException("Handle region task failed:", e);
|
||||
}
|
||||
}
|
||||
|
||||
// Add all chunks to the final result
|
||||
List<Chunk> resultChunk = new ArrayList<>();
|
||||
while (!responseQueue.isEmpty()) {
|
||||
SelectResponse response = responseQueue.poll();
|
||||
if (response != null) {
|
||||
resultChunk.addAll(response.getChunksList());
|
||||
}
|
||||
}
|
||||
|
||||
return SelectResponse.newBuilder().addAllChunks(resultChunk).build();
|
||||
}
|
||||
|
||||
private Iterator<SelectResponse> processByStreaming(RangeSplitter.RegionTask regionTask) {
|
||||
List<Coprocessor.KeyRange> ranges = regionTask.getRanges();
|
||||
TiRegion region = regionTask.getRegion();
|
||||
Metapb.Store store = regionTask.getStore();
|
||||
|
||||
RegionStoreClient client;
|
||||
try {
|
||||
client = RegionStoreClient.create(region, store, session);
|
||||
Iterator<SelectResponse> responseIterator = client.coprocessStreaming(dagRequest, ranges);
|
||||
if (responseIterator == null) {
|
||||
eof = true;
|
||||
return null;
|
||||
}
|
||||
return responseIterator;
|
||||
} catch (Exception e) {
|
||||
// TODO: Fix stale error handling in streaming
|
||||
// see:https://github.com/pingcap/tikv-client-lib-java/pull/149
|
||||
throw new TiClientInternalException("Error Closing Store client.", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -1,105 +0,0 @@
|
|||
/*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.tikv.operation.iterator;
|
||||
|
||||
import gnu.trove.list.array.TLongArrayList;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.NoSuchElementException;
|
||||
import java.util.concurrent.ExecutorCompletionService;
|
||||
import org.tikv.Snapshot;
|
||||
import org.tikv.TiConfiguration;
|
||||
import org.tikv.TiSession;
|
||||
import org.tikv.exception.TiClientInternalException;
|
||||
import org.tikv.meta.TiDAGRequest;
|
||||
import org.tikv.row.Row;
|
||||
import org.tikv.util.RangeSplitter;
|
||||
import org.tikv.util.RangeSplitter.RegionTask;
|
||||
|
||||
public class IndexScanIterator implements Iterator<Row> {
|
||||
private final Iterator<Long> handleIterator;
|
||||
private final TiDAGRequest dagReq;
|
||||
private final Snapshot snapshot;
|
||||
private Iterator<Row> rowIterator;
|
||||
private final ExecutorCompletionService<Iterator<Row>> completionService;
|
||||
|
||||
private int batchCount = 0;
|
||||
private final int batchSize;
|
||||
|
||||
public IndexScanIterator(Snapshot snapshot, TiDAGRequest req, Iterator<Long> handleIterator) {
|
||||
TiSession session = snapshot.getSession();
|
||||
TiConfiguration conf = session.getConf();
|
||||
this.dagReq = req;
|
||||
this.handleIterator = handleIterator;
|
||||
this.snapshot = snapshot;
|
||||
this.batchSize = conf.getIndexScanBatchSize();
|
||||
this.completionService = new ExecutorCompletionService<>(session.getThreadPoolForIndexScan());
|
||||
}
|
||||
|
||||
private TLongArrayList feedBatch() {
|
||||
TLongArrayList handles = new TLongArrayList(512);
|
||||
while (handleIterator.hasNext()) {
|
||||
handles.add(handleIterator.next());
|
||||
if (batchSize <= handles.size()) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
return handles;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasNext() {
|
||||
try {
|
||||
if (rowIterator == null) {
|
||||
TiSession session = snapshot.getSession();
|
||||
while (handleIterator.hasNext()) {
|
||||
TLongArrayList handles = feedBatch();
|
||||
batchCount++;
|
||||
completionService.submit(
|
||||
() -> {
|
||||
List<RegionTask> tasks =
|
||||
RangeSplitter.newSplitter(session.getRegionManager())
|
||||
.splitAndSortHandlesByRegion(dagReq.getTableInfo().getId(), handles);
|
||||
return CoprocessIterator.getRowIterator(dagReq, tasks, session);
|
||||
});
|
||||
}
|
||||
while (batchCount > 0) {
|
||||
rowIterator = completionService.take().get();
|
||||
batchCount--;
|
||||
|
||||
if (rowIterator.hasNext()) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (rowIterator == null) {
|
||||
return false;
|
||||
}
|
||||
} catch (Exception e) {
|
||||
throw new TiClientInternalException("Error reading rows from handle", e);
|
||||
}
|
||||
return rowIterator.hasNext();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Row next() {
|
||||
if (hasNext()) {
|
||||
return rowIterator.next();
|
||||
} else {
|
||||
throw new NoSuchElementException();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -1,97 +0,0 @@
|
|||
/*
|
||||
*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*
|
||||
*/
|
||||
|
||||
package org.tikv.operation.transformer;
|
||||
|
||||
import java.math.BigDecimal;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import org.tikv.row.Row;
|
||||
import org.tikv.types.*;
|
||||
|
||||
public class Cast extends NoOp {
|
||||
public Cast(DataType type) {
|
||||
super(type);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void set(Object value, Row row, int pos) {
|
||||
Object casted;
|
||||
if (value == null) {
|
||||
row.set(pos, targetDataType, null);
|
||||
return;
|
||||
}
|
||||
if (targetDataType instanceof IntegerType) {
|
||||
casted = castToLong(value);
|
||||
} else if (targetDataType instanceof StringType) {
|
||||
casted = castToString(value);
|
||||
} else if (targetDataType instanceof BytesType) {
|
||||
casted = castToBinary(value);
|
||||
} else if (targetDataType instanceof DecimalType) {
|
||||
casted = castToDecimal(value);
|
||||
} else if (targetDataType instanceof RealType) {
|
||||
casted = castToDouble(value);
|
||||
} else {
|
||||
casted = value;
|
||||
}
|
||||
row.set(pos, targetDataType, casted);
|
||||
}
|
||||
|
||||
private Double castToDouble(Object obj) {
|
||||
if (obj instanceof Number) {
|
||||
Number num = (Number) obj;
|
||||
return num.doubleValue();
|
||||
}
|
||||
throw new UnsupportedOperationException("can not cast un-number to double ");
|
||||
}
|
||||
|
||||
private BigDecimal castToDecimal(Object obj) {
|
||||
if (obj instanceof Number) {
|
||||
Number num = (Number) obj;
|
||||
return new BigDecimal(num.doubleValue());
|
||||
}
|
||||
throw new UnsupportedOperationException(
|
||||
"Cannot cast to BigDecimal: " + (obj == null ? "null" : obj.getClass().getSimpleName()));
|
||||
}
|
||||
|
||||
private Long castToLong(Object obj) {
|
||||
if (obj instanceof Number) {
|
||||
Number num = (Number) obj;
|
||||
return num.longValue();
|
||||
}
|
||||
throw new UnsupportedOperationException("can not cast un-number to long ");
|
||||
}
|
||||
|
||||
private String castToString(Object obj) {
|
||||
String result;
|
||||
if (obj instanceof byte[]) {
|
||||
result = new String((byte[]) obj, StandardCharsets.UTF_8);
|
||||
} else if (obj instanceof char[]) {
|
||||
result = new String((char[]) obj);
|
||||
} else {
|
||||
result = String.valueOf(obj);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
private byte[] castToBinary(Object obj) {
|
||||
if (obj instanceof byte[]) {
|
||||
return (byte[]) obj;
|
||||
} else {
|
||||
return obj.toString().getBytes();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -1,60 +0,0 @@
|
|||
/*
|
||||
*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*
|
||||
*/
|
||||
|
||||
package org.tikv.operation.transformer;
|
||||
|
||||
import static java.util.Objects.requireNonNull;
|
||||
|
||||
import com.google.common.collect.ImmutableList;
|
||||
import java.util.List;
|
||||
import org.tikv.codec.CodecDataInput;
|
||||
import org.tikv.row.Row;
|
||||
import org.tikv.types.DataType;
|
||||
|
||||
public class MultiKeyDecoder implements Projection {
|
||||
public MultiKeyDecoder(List<DataType> dataTypes) {
|
||||
this.resultTypes = requireNonNull(dataTypes).toArray(new DataType[0]);
|
||||
}
|
||||
|
||||
private DataType[] resultTypes;
|
||||
|
||||
@Override
|
||||
public void set(Object value, Row row, int pos) {
|
||||
byte[] rowData = (byte[]) value;
|
||||
CodecDataInput cdi = new CodecDataInput(rowData);
|
||||
|
||||
for (int i = 0; i < resultTypes.length; i++) {
|
||||
DataType type = resultTypes[i];
|
||||
if (type.isNextNull(cdi)) {
|
||||
cdi.readUnsignedByte();
|
||||
row.setNull(i + pos);
|
||||
} else {
|
||||
row.set(i + pos, type, type.decode(cdi));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public int size() {
|
||||
return resultTypes.length;
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<DataType> getTypes() {
|
||||
return ImmutableList.copyOf(resultTypes);
|
||||
}
|
||||
}
|
||||
|
|
@ -1,47 +0,0 @@
|
|||
/*
|
||||
*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*
|
||||
*/
|
||||
|
||||
package org.tikv.operation.transformer;
|
||||
|
||||
import com.google.common.collect.ImmutableList;
|
||||
import java.util.List;
|
||||
import org.tikv.row.Row;
|
||||
import org.tikv.types.DataType;
|
||||
|
||||
/** Noop is a base type projection, it basically do nothing but copy. */
|
||||
public class NoOp implements Projection {
|
||||
protected DataType targetDataType;
|
||||
|
||||
public NoOp(DataType dataType) {
|
||||
this.targetDataType = dataType;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void set(Object value, Row row, int pos) {
|
||||
row.set(pos, targetDataType, value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int size() {
|
||||
return 1;
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<DataType> getTypes() {
|
||||
return ImmutableList.of(targetDataType);
|
||||
}
|
||||
}
|
||||
|
|
@ -1,30 +0,0 @@
|
|||
/*
|
||||
*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*
|
||||
*/
|
||||
|
||||
package org.tikv.operation.transformer;
|
||||
|
||||
import java.util.List;
|
||||
import org.tikv.row.Row;
|
||||
import org.tikv.types.DataType;
|
||||
|
||||
public interface Projection {
|
||||
void set(Object value, Row row, int pos);
|
||||
|
||||
int size();
|
||||
|
||||
List<DataType> getTypes();
|
||||
}
|
||||
|
|
@ -1,131 +0,0 @@
|
|||
/*
|
||||
*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*
|
||||
*/
|
||||
|
||||
package org.tikv.operation.transformer;
|
||||
|
||||
import static java.util.Objects.requireNonNull;
|
||||
|
||||
import com.google.common.collect.ImmutableList;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.stream.Collectors;
|
||||
import org.tikv.row.ObjectRowImpl;
|
||||
import org.tikv.row.Row;
|
||||
import org.tikv.types.DataType;
|
||||
|
||||
/**
|
||||
* RowTransformer is used along with SchemaInfer and row and provide some operation. If you do not
|
||||
* know your target FieldType, then you do not need use this interface. The reason we provide this
|
||||
* interface is that sometime we need use it. Suppose we have a table t1 and have two column c1 and
|
||||
* s1 select sum(c1) from t1 will return SingleGroup literally and sum(c1). SingleGroup should be
|
||||
* skipped. Hence, skip operation is needed here. Another usage is that sum(c1)'s type is decimal no
|
||||
* matter what real column type is. We need cast it to target type which is column's type. Hence,
|
||||
* cast operation is needed. RowTransformer is executed after row is already read from
|
||||
* CodecDataInput.
|
||||
*/
|
||||
public class RowTransformer {
|
||||
public static Builder newBuilder() {
|
||||
return new Builder();
|
||||
}
|
||||
|
||||
/** A Builder can build a RowTransformer. */
|
||||
public static class Builder {
|
||||
private final List<Projection> projections = new ArrayList<>();
|
||||
private final List<DataType> sourceTypes = new ArrayList<>();
|
||||
|
||||
public RowTransformer build() {
|
||||
return new RowTransformer(sourceTypes, projections);
|
||||
}
|
||||
|
||||
public Builder addProjection(Projection projection) {
|
||||
this.projections.add(projection);
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder addProjections(Projection... projections) {
|
||||
this.projections.addAll(Arrays.asList(projections));
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder addSourceFieldType(DataType fieldType) {
|
||||
this.sourceTypes.add(fieldType);
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder addSourceFieldTypes(DataType... fieldTypes) {
|
||||
this.sourceTypes.addAll(Arrays.asList(fieldTypes));
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder addSourceFieldTypes(List<DataType> fieldTypes) {
|
||||
this.sourceTypes.addAll(fieldTypes);
|
||||
return this;
|
||||
}
|
||||
}
|
||||
|
||||
private final List<Projection> projections;
|
||||
|
||||
private final List<DataType> sourceFieldTypes;
|
||||
|
||||
private RowTransformer(List<DataType> sourceTypes, List<Projection> projections) {
|
||||
this.sourceFieldTypes = ImmutableList.copyOf(requireNonNull(sourceTypes));
|
||||
this.projections = ImmutableList.copyOf(requireNonNull(projections));
|
||||
}
|
||||
|
||||
/**
|
||||
* Transforms input row to a output row according projections operator passed on creation of this
|
||||
* RowTransformer.
|
||||
*
|
||||
* @param inRow input row that need to be transformed.
|
||||
* @return a row that is already transformed.
|
||||
*/
|
||||
public Row transform(Row inRow) {
|
||||
// After transform the length of row is probably not same as the input row.
|
||||
// we need calculate the new length.
|
||||
Row outRow = ObjectRowImpl.create(newRowLength());
|
||||
|
||||
int offset = 0;
|
||||
for (int i = 0; i < inRow.fieldCount(); i++) {
|
||||
Object inVal = inRow.get(i, sourceFieldTypes.get(i));
|
||||
Projection p = getProjection(i);
|
||||
p.set(inVal, outRow, offset);
|
||||
offset += p.size();
|
||||
}
|
||||
return outRow;
|
||||
}
|
||||
|
||||
private Projection getProjection(int index) {
|
||||
return projections.get(index);
|
||||
}
|
||||
|
||||
/**
|
||||
* Collect output row's length.
|
||||
*
|
||||
* @return a int which is the new length of output row.
|
||||
*/
|
||||
private int newRowLength() {
|
||||
return this.projections.stream().reduce(0, (sum, p) -> sum += p.size(), (s1, s2) -> s1 + s2);
|
||||
}
|
||||
|
||||
public List<DataType> getTypes() {
|
||||
return projections
|
||||
.stream()
|
||||
.flatMap(proj -> proj.getTypes().stream())
|
||||
.collect(Collectors.toList());
|
||||
}
|
||||
}
|
||||
|
|
@ -1,40 +0,0 @@
|
|||
/*
|
||||
*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*
|
||||
*/
|
||||
|
||||
package org.tikv.operation.transformer;
|
||||
|
||||
import com.google.common.collect.ImmutableList;
|
||||
import java.util.List;
|
||||
import org.tikv.row.Row;
|
||||
import org.tikv.types.DataType;
|
||||
|
||||
public class Skip implements Projection {
|
||||
public static final Skip SKIP_OP = new Skip();
|
||||
|
||||
@Override
|
||||
public void set(Object value, Row row, int pos) {}
|
||||
|
||||
@Override
|
||||
public int size() {
|
||||
return 0;
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<DataType> getTypes() {
|
||||
return ImmutableList.of();
|
||||
}
|
||||
}
|
||||
|
|
@ -1,46 +0,0 @@
|
|||
/*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.tikv.predicates;
|
||||
|
||||
import com.google.common.collect.Range;
|
||||
import org.tikv.key.Key;
|
||||
import org.tikv.key.TypedKey;
|
||||
|
||||
public class IndexRange {
|
||||
private Key accessKey;
|
||||
private Range<TypedKey> range;
|
||||
|
||||
public IndexRange(Key accessKey, Range<TypedKey> range) {
|
||||
this.accessKey = accessKey;
|
||||
this.range = range;
|
||||
}
|
||||
|
||||
public Key getAccessKey() {
|
||||
return accessKey;
|
||||
}
|
||||
|
||||
public boolean hasAccessKey() {
|
||||
return accessKey != null;
|
||||
}
|
||||
|
||||
public boolean hasRange() {
|
||||
return range != null;
|
||||
}
|
||||
|
||||
public Range<TypedKey> getRange() {
|
||||
return range;
|
||||
}
|
||||
}
|
||||
|
|
@ -1,168 +0,0 @@
|
|||
/*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.tikv.predicates;
|
||||
|
||||
import static java.util.Objects.requireNonNull;
|
||||
import static org.tikv.expression.LogicalBinaryExpression.and;
|
||||
|
||||
import com.google.common.collect.ImmutableList;
|
||||
import com.google.common.collect.Range;
|
||||
import java.util.*;
|
||||
import org.tikv.exception.TiExpressionException;
|
||||
import org.tikv.expression.ColumnRef;
|
||||
import org.tikv.expression.Expression;
|
||||
import org.tikv.expression.Visitor;
|
||||
import org.tikv.expression.visitor.DefaultVisitor;
|
||||
import org.tikv.expression.visitor.IndexRangeBuilder;
|
||||
import org.tikv.key.CompoundKey;
|
||||
import org.tikv.key.Key;
|
||||
import org.tikv.key.TypedKey;
|
||||
import org.tikv.meta.TiIndexInfo;
|
||||
import org.tikv.meta.TiTableInfo;
|
||||
|
||||
public class PredicateUtils {
|
||||
public static Expression mergeCNFExpressions(List<Expression> exprs) {
|
||||
requireNonNull(exprs, "Expression list is null");
|
||||
if (exprs.size() == 0) return null;
|
||||
if (exprs.size() == 1) return exprs.get(0);
|
||||
|
||||
return and(exprs.get(0), mergeCNFExpressions(exprs.subList(1, exprs.size())));
|
||||
}
|
||||
|
||||
public static Set<ColumnRef> extractColumnRefFromExpression(Expression expr) {
|
||||
Set<ColumnRef> columnRefs = new HashSet<>();
|
||||
Visitor<Void, Set<ColumnRef>> visitor =
|
||||
new DefaultVisitor<Void, Set<ColumnRef>>() {
|
||||
@Override
|
||||
protected Void visit(ColumnRef node, Set<ColumnRef> context) {
|
||||
context.add(node);
|
||||
return null;
|
||||
}
|
||||
};
|
||||
|
||||
expr.accept(visitor, columnRefs);
|
||||
return columnRefs;
|
||||
}
|
||||
|
||||
/**
|
||||
* Build index ranges from access points and access conditions
|
||||
*
|
||||
* @param pointPredicates conditions converting to a single point access
|
||||
* @param rangePredicate conditions converting to a range
|
||||
* @return Index Range for scan
|
||||
*/
|
||||
public static List<IndexRange> expressionToIndexRanges(
|
||||
List<Expression> pointPredicates,
|
||||
Optional<Expression> rangePredicate,
|
||||
TiTableInfo table,
|
||||
TiIndexInfo index) {
|
||||
requireNonNull(pointPredicates, "pointPredicates is null");
|
||||
requireNonNull(rangePredicate, "rangePredicate is null");
|
||||
ImmutableList.Builder<IndexRange> builder = ImmutableList.builder();
|
||||
IndexRangeBuilder indexRangeBuilder = new IndexRangeBuilder(table, index);
|
||||
|
||||
if (pointPredicates.size() != 0) {
|
||||
List<Key> pointKeys = expressionToPoints(pointPredicates, table, index);
|
||||
for (Key key : pointKeys) {
|
||||
if (rangePredicate.isPresent()) {
|
||||
Set<Range<TypedKey>> ranges = indexRangeBuilder.buildRange(rangePredicate.get());
|
||||
for (Range<TypedKey> range : ranges) {
|
||||
builder.add(new IndexRange(key, range));
|
||||
}
|
||||
} else {
|
||||
// no predicates with point keys leads to empty range encoding
|
||||
builder.add(new IndexRange(key, null));
|
||||
}
|
||||
}
|
||||
} else {
|
||||
if (rangePredicate.isPresent()) {
|
||||
Set<Range<TypedKey>> ranges = indexRangeBuilder.buildRange(rangePredicate.get());
|
||||
for (Range<TypedKey> range : ranges) {
|
||||
builder.add(new IndexRange(null, range));
|
||||
}
|
||||
} else {
|
||||
// no filter at all means full range
|
||||
builder.add(new IndexRange(null, Range.all()));
|
||||
}
|
||||
}
|
||||
|
||||
return builder.build();
|
||||
}
|
||||
|
||||
/**
|
||||
* Turn access conditions into list of points Each condition is bound to single key We pick up
|
||||
* single condition for each index key and disregard if multiple EQ conditions in DNF
|
||||
*
|
||||
* @param pointPredicates expressions that convertible to access points
|
||||
* @return access points for each index
|
||||
*/
|
||||
private static List<Key> expressionToPoints(
|
||||
List<Expression> pointPredicates, TiTableInfo table, TiIndexInfo index) {
|
||||
requireNonNull(pointPredicates, "pointPredicates cannot be null");
|
||||
|
||||
List<Key> resultKeys = new ArrayList<>();
|
||||
IndexRangeBuilder indexRangeBuilder = new IndexRangeBuilder(table, index);
|
||||
|
||||
for (int i = 0; i < pointPredicates.size(); i++) {
|
||||
Expression predicate = pointPredicates.get(i);
|
||||
try {
|
||||
// each expr will be expand to one or more points
|
||||
Set<Range<TypedKey>> ranges = indexRangeBuilder.buildRange(predicate);
|
||||
List<Key> points = rangesToPoint(ranges);
|
||||
resultKeys = joinKeys(resultKeys, points);
|
||||
} catch (Exception e) {
|
||||
throw new TiExpressionException(
|
||||
String.format("Error converting access points %s", predicate), e);
|
||||
}
|
||||
}
|
||||
return resultKeys;
|
||||
}
|
||||
|
||||
// Convert ranges of equal condition points to List of TypedKeys
|
||||
private static List<Key> rangesToPoint(Set<Range<TypedKey>> ranges) {
|
||||
requireNonNull(ranges, "ranges is null");
|
||||
ImmutableList.Builder<Key> builder = ImmutableList.builder();
|
||||
for (Range<TypedKey> range : ranges) {
|
||||
// test if range is a point
|
||||
if (range.hasLowerBound()
|
||||
&& range.hasUpperBound()
|
||||
&& range.lowerEndpoint().equals(range.upperEndpoint())) {
|
||||
builder.add(range.lowerEndpoint());
|
||||
} else {
|
||||
throw new TiExpressionException("Cannot convert range to point");
|
||||
}
|
||||
}
|
||||
return builder.build();
|
||||
}
|
||||
|
||||
private static List<Key> joinKeys(List<Key> lhsKeys, List<Key> rhsKeys) {
|
||||
requireNonNull(lhsKeys, "lhsKeys is null");
|
||||
requireNonNull(rhsKeys, "rhsKeys is null");
|
||||
if (lhsKeys.isEmpty()) {
|
||||
return rhsKeys;
|
||||
}
|
||||
if (rhsKeys.isEmpty()) {
|
||||
return lhsKeys;
|
||||
}
|
||||
ImmutableList.Builder<Key> builder = ImmutableList.builder();
|
||||
for (Key lKey : lhsKeys) {
|
||||
for (Key rKey : rhsKeys) {
|
||||
builder.add(CompoundKey.concat(lKey, rKey));
|
||||
}
|
||||
}
|
||||
return builder.build();
|
||||
}
|
||||
}
|
||||
|
|
@ -1,402 +0,0 @@
|
|||
/*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.tikv.predicates;
|
||||
|
||||
import static com.google.common.base.Preconditions.checkArgument;
|
||||
import static java.util.Objects.requireNonNull;
|
||||
import static org.tikv.predicates.PredicateUtils.expressionToIndexRanges;
|
||||
import static org.tikv.util.KeyRangeUtils.makeCoprocRange;
|
||||
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
import com.google.common.collect.BoundType;
|
||||
import com.google.common.collect.Range;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
import org.tikv.exception.TiClientInternalException;
|
||||
import org.tikv.expression.Expression;
|
||||
import org.tikv.expression.visitor.IndexMatcher;
|
||||
import org.tikv.expression.visitor.MetaResolver;
|
||||
import org.tikv.key.IndexKey;
|
||||
import org.tikv.key.Key;
|
||||
import org.tikv.key.RowKey;
|
||||
import org.tikv.key.TypedKey;
|
||||
import org.tikv.kvproto.Coprocessor.KeyRange;
|
||||
import org.tikv.meta.TiColumnInfo;
|
||||
import org.tikv.meta.TiIndexColumn;
|
||||
import org.tikv.meta.TiIndexInfo;
|
||||
import org.tikv.meta.TiTableInfo;
|
||||
import org.tikv.statistics.IndexStatistics;
|
||||
import org.tikv.statistics.TableStatistics;
|
||||
import org.tikv.types.DataType;
|
||||
|
||||
public class ScanAnalyzer {
|
||||
private static final double INDEX_SCAN_COST_FACTOR = 1.2;
|
||||
private static final double TABLE_SCAN_COST_FACTOR = 1.0;
|
||||
private static final double DOUBLE_READ_COST_FACTOR = TABLE_SCAN_COST_FACTOR * 3;
|
||||
|
||||
public static class ScanPlan {
|
||||
ScanPlan(
|
||||
List<KeyRange> keyRanges,
|
||||
Set<Expression> filters,
|
||||
TiIndexInfo index,
|
||||
double cost,
|
||||
boolean isDoubleRead,
|
||||
double estimatedRowCount) {
|
||||
this.filters = filters;
|
||||
this.keyRanges = keyRanges;
|
||||
this.cost = cost;
|
||||
this.index = index;
|
||||
this.isDoubleRead = isDoubleRead;
|
||||
this.estimatedRowCount = estimatedRowCount;
|
||||
}
|
||||
|
||||
private final List<KeyRange> keyRanges;
|
||||
private final Set<Expression> filters;
|
||||
private final double cost;
|
||||
private TiIndexInfo index;
|
||||
private final boolean isDoubleRead;
|
||||
private final double estimatedRowCount;
|
||||
|
||||
public double getEstimatedRowCount() {
|
||||
return estimatedRowCount;
|
||||
}
|
||||
|
||||
public List<KeyRange> getKeyRanges() {
|
||||
return keyRanges;
|
||||
}
|
||||
|
||||
public Set<Expression> getFilters() {
|
||||
return filters;
|
||||
}
|
||||
|
||||
public double getCost() {
|
||||
return cost;
|
||||
}
|
||||
|
||||
public boolean isIndexScan() {
|
||||
return index != null && !index.isFakePrimaryKey();
|
||||
}
|
||||
|
||||
public TiIndexInfo getIndex() {
|
||||
return index;
|
||||
}
|
||||
|
||||
public boolean isDoubleRead() {
|
||||
return isDoubleRead;
|
||||
}
|
||||
}
|
||||
|
||||
public ScanPlan buildScan(
|
||||
List<TiColumnInfo> columnList, List<Expression> conditions, TiTableInfo table) {
|
||||
return buildScan(columnList, conditions, table, null);
|
||||
}
|
||||
|
||||
// Build scan plan picking access path with lowest cost by estimation
|
||||
public ScanPlan buildScan(
|
||||
List<TiColumnInfo> columnList,
|
||||
List<Expression> conditions,
|
||||
TiTableInfo table,
|
||||
TableStatistics tableStatistics) {
|
||||
ScanPlan minPlan = buildTableScan(conditions, table, tableStatistics);
|
||||
double minCost = minPlan.getCost();
|
||||
for (TiIndexInfo index : table.getIndices()) {
|
||||
ScanPlan plan = buildScan(columnList, conditions, index, table, tableStatistics);
|
||||
if (plan.getCost() < minCost) {
|
||||
minPlan = plan;
|
||||
minCost = plan.getCost();
|
||||
}
|
||||
}
|
||||
return minPlan;
|
||||
}
|
||||
|
||||
public ScanPlan buildTableScan(
|
||||
List<Expression> conditions, TiTableInfo table, TableStatistics tableStatistics) {
|
||||
TiIndexInfo pkIndex = TiIndexInfo.generateFakePrimaryKeyIndex(table);
|
||||
return buildScan(table.getColumns(), conditions, pkIndex, table, tableStatistics);
|
||||
}
|
||||
|
||||
public ScanPlan buildScan(
|
||||
List<TiColumnInfo> columnList,
|
||||
List<Expression> conditions,
|
||||
TiIndexInfo index,
|
||||
TiTableInfo table,
|
||||
TableStatistics tableStatistics) {
|
||||
requireNonNull(table, "Table cannot be null to encoding keyRange");
|
||||
requireNonNull(conditions, "conditions cannot be null to encoding keyRange");
|
||||
|
||||
MetaResolver.resolve(conditions, table);
|
||||
|
||||
ScanSpec result = extractConditions(conditions, table, index);
|
||||
|
||||
double cost = SelectivityCalculator.calcPseudoSelectivity(result);
|
||||
|
||||
List<IndexRange> irs =
|
||||
expressionToIndexRanges(
|
||||
result.getPointPredicates(), result.getRangePredicate(), table, index);
|
||||
|
||||
List<KeyRange> keyRanges;
|
||||
boolean isDoubleRead = false;
|
||||
double estimatedRowCount = -1;
|
||||
// table name and columns
|
||||
int tableSize = table.getColumns().size() + 1;
|
||||
|
||||
if (index == null || index.isFakePrimaryKey()) {
|
||||
if (tableStatistics != null) {
|
||||
cost = 100.0; // Full table scan cost
|
||||
// TODO: Fine-grained statistics usage
|
||||
}
|
||||
keyRanges = buildTableScanKeyRange(table, irs);
|
||||
cost *= tableSize * TABLE_SCAN_COST_FACTOR;
|
||||
} else {
|
||||
if (tableStatistics != null) {
|
||||
long totalRowCount = tableStatistics.getCount();
|
||||
IndexStatistics indexStatistics = tableStatistics.getIndexHistMap().get(index.getId());
|
||||
if (conditions.isEmpty()) {
|
||||
cost = 100.0; // Full index scan cost
|
||||
// TODO: Fine-grained statistics usage
|
||||
estimatedRowCount = totalRowCount;
|
||||
} else if (indexStatistics != null) {
|
||||
double idxRangeRowCnt = indexStatistics.getRowCount(irs);
|
||||
// guess the percentage of rows hit
|
||||
cost = 100.0 * idxRangeRowCnt / totalRowCount;
|
||||
estimatedRowCount = idxRangeRowCnt;
|
||||
}
|
||||
}
|
||||
isDoubleRead = !isCoveringIndex(columnList, index, table.isPkHandle());
|
||||
// table name, index and handle column
|
||||
int indexSize = index.getIndexColumns().size() + 2;
|
||||
if (isDoubleRead) {
|
||||
cost *= tableSize * DOUBLE_READ_COST_FACTOR + indexSize * INDEX_SCAN_COST_FACTOR;
|
||||
} else {
|
||||
cost *= indexSize * INDEX_SCAN_COST_FACTOR;
|
||||
}
|
||||
keyRanges = buildIndexScanKeyRange(table, index, irs);
|
||||
}
|
||||
|
||||
return new ScanPlan(
|
||||
keyRanges, result.getResidualPredicates(), index, cost, isDoubleRead, estimatedRowCount);
|
||||
}
|
||||
|
||||
@VisibleForTesting
|
||||
List<KeyRange> buildTableScanKeyRange(TiTableInfo table, List<IndexRange> indexRanges) {
|
||||
requireNonNull(table, "Table is null");
|
||||
requireNonNull(indexRanges, "indexRanges is null");
|
||||
|
||||
List<KeyRange> ranges = new ArrayList<>(indexRanges.size());
|
||||
for (IndexRange ir : indexRanges) {
|
||||
Key startKey;
|
||||
Key endKey;
|
||||
if (ir.hasAccessKey()) {
|
||||
checkArgument(
|
||||
!ir.hasRange(), "Table scan must have one and only one access condition / point");
|
||||
|
||||
Key key = ir.getAccessKey();
|
||||
checkArgument(key instanceof TypedKey, "Table scan key range must be typed key");
|
||||
TypedKey typedKey = (TypedKey) key;
|
||||
startKey = RowKey.toRowKey(table.getId(), typedKey);
|
||||
endKey = startKey.next();
|
||||
} else if (ir.hasRange()) {
|
||||
checkArgument(
|
||||
!ir.hasAccessKey(), "Table scan must have one and only one access condition / point");
|
||||
Range<TypedKey> r = ir.getRange();
|
||||
|
||||
if (!r.hasLowerBound()) {
|
||||
// -INF
|
||||
startKey = RowKey.createMin(table.getId());
|
||||
} else {
|
||||
// Comparision with null should be filtered since it yields unknown always
|
||||
startKey = RowKey.toRowKey(table.getId(), r.lowerEndpoint());
|
||||
if (r.lowerBoundType().equals(BoundType.OPEN)) {
|
||||
startKey = startKey.next();
|
||||
}
|
||||
}
|
||||
|
||||
if (!r.hasUpperBound()) {
|
||||
// INF
|
||||
endKey = RowKey.createBeyondMax(table.getId());
|
||||
} else {
|
||||
endKey = RowKey.toRowKey(table.getId(), r.upperEndpoint());
|
||||
if (r.upperBoundType().equals(BoundType.CLOSED)) {
|
||||
endKey = endKey.next();
|
||||
}
|
||||
}
|
||||
} else {
|
||||
throw new TiClientInternalException("Empty access conditions");
|
||||
}
|
||||
|
||||
// This range only possible when < MIN or > MAX
|
||||
if (!startKey.equals(endKey)) {
|
||||
ranges.add(makeCoprocRange(startKey.toByteString(), endKey.toByteString()));
|
||||
}
|
||||
}
|
||||
|
||||
return ranges;
|
||||
}
|
||||
|
||||
@VisibleForTesting
|
||||
List<KeyRange> buildIndexScanKeyRange(
|
||||
TiTableInfo table, TiIndexInfo index, List<IndexRange> indexRanges) {
|
||||
requireNonNull(table, "Table cannot be null to encoding keyRange");
|
||||
requireNonNull(index, "Index cannot be null to encoding keyRange");
|
||||
requireNonNull(indexRanges, "indexRanges cannot be null to encoding keyRange");
|
||||
|
||||
List<KeyRange> ranges = new ArrayList<>(indexRanges.size());
|
||||
|
||||
for (IndexRange ir : indexRanges) {
|
||||
Key pointKey = ir.hasAccessKey() ? ir.getAccessKey() : Key.EMPTY;
|
||||
|
||||
Range<TypedKey> range = ir.getRange();
|
||||
Key lPointKey;
|
||||
Key uPointKey;
|
||||
|
||||
Key lKey;
|
||||
Key uKey;
|
||||
if (!ir.hasRange()) {
|
||||
lPointKey = pointKey;
|
||||
uPointKey = pointKey.next();
|
||||
|
||||
lKey = Key.EMPTY;
|
||||
uKey = Key.EMPTY;
|
||||
} else {
|
||||
lPointKey = pointKey;
|
||||
uPointKey = pointKey;
|
||||
|
||||
if (!range.hasLowerBound()) {
|
||||
// -INF
|
||||
lKey = Key.NULL;
|
||||
} else {
|
||||
lKey = range.lowerEndpoint();
|
||||
if (range.lowerBoundType().equals(BoundType.OPEN)) {
|
||||
lKey = lKey.next();
|
||||
}
|
||||
}
|
||||
|
||||
if (!range.hasUpperBound()) {
|
||||
// INF
|
||||
uKey = Key.MAX;
|
||||
} else {
|
||||
uKey = range.upperEndpoint();
|
||||
if (range.upperBoundType().equals(BoundType.CLOSED)) {
|
||||
uKey = uKey.next();
|
||||
}
|
||||
}
|
||||
}
|
||||
IndexKey lbsKey = IndexKey.toIndexKey(table.getId(), index.getId(), lPointKey, lKey);
|
||||
IndexKey ubsKey = IndexKey.toIndexKey(table.getId(), index.getId(), uPointKey, uKey);
|
||||
|
||||
ranges.add(makeCoprocRange(lbsKey.toByteString(), ubsKey.toByteString()));
|
||||
}
|
||||
|
||||
return ranges;
|
||||
}
|
||||
|
||||
boolean isCoveringIndex(
|
||||
List<TiColumnInfo> columns, TiIndexInfo indexColumns, boolean pkIsHandle) {
|
||||
for (TiColumnInfo colInfo : columns) {
|
||||
if (pkIsHandle && colInfo.isPrimaryKey()) {
|
||||
continue;
|
||||
}
|
||||
if (colInfo.getId() == -1) {
|
||||
continue;
|
||||
}
|
||||
boolean isIndexColumn = false;
|
||||
for (TiIndexColumn indexCol : indexColumns.getIndexColumns()) {
|
||||
boolean isFullLength =
|
||||
indexCol.getLength() == DataType.UNSPECIFIED_LEN
|
||||
|| indexCol.getLength() == colInfo.getType().getLength();
|
||||
if (colInfo.getName().equalsIgnoreCase(indexCol.getName()) && isFullLength) {
|
||||
isIndexColumn = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (!isIndexColumn) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
@VisibleForTesting
|
||||
public static ScanSpec extractConditions(
|
||||
List<Expression> conditions, TiTableInfo table, TiIndexInfo index) {
|
||||
// 0. Different than TiDB implementation, here logic has been unified for TableScan and
|
||||
// IndexScan by
|
||||
// adding fake index on clustered table's pk
|
||||
// 1. Generate access point based on equal conditions
|
||||
// 2. Cut access point condition if index is not continuous
|
||||
// 3. Push back prefix index conditions since prefix index retrieve more result than needed
|
||||
// 4. For remaining indexes (since access conditions consume some index, and they will
|
||||
// not be used in filter push down later), find continuous matching index until first unmatched
|
||||
// 5. Push back index related filter if prefix index, for remaining filters
|
||||
// Equal conditions needs to be process first according to index sequence
|
||||
// When index is null, no access condition can be applied
|
||||
ScanSpec.Builder specBuilder = new ScanSpec.Builder(table, index);
|
||||
if (index != null) {
|
||||
Set<Expression> visited = new HashSet<>();
|
||||
IndexMatchingLoop:
|
||||
for (int i = 0; i < index.getIndexColumns().size(); i++) {
|
||||
// for each index column try matches an equal condition
|
||||
// and push remaining back
|
||||
// TODO: if more than one equal conditions match an
|
||||
// index, it likely yields nothing. Maybe a check needed
|
||||
// to simplify it to a false condition
|
||||
TiIndexColumn col = index.getIndexColumns().get(i);
|
||||
IndexMatcher eqMatcher = IndexMatcher.equalOnlyMatcher(col);
|
||||
boolean found = false;
|
||||
// For first prefix index encountered, it equals to a range
|
||||
// and we cannot push equal conditions further
|
||||
for (Expression cond : conditions) {
|
||||
if (visited.contains(cond)) {
|
||||
continue;
|
||||
}
|
||||
if (eqMatcher.match(cond)) {
|
||||
specBuilder.addPointPredicate(col, cond);
|
||||
if (col.isPrefixIndex()) {
|
||||
specBuilder.addResidualPredicate(cond);
|
||||
break IndexMatchingLoop;
|
||||
}
|
||||
visited.add(cond);
|
||||
found = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (!found) {
|
||||
// For first "broken index chain piece"
|
||||
// search for a matching range condition
|
||||
IndexMatcher matcher = IndexMatcher.matcher(col);
|
||||
for (Expression cond : conditions) {
|
||||
if (visited.contains(cond)) {
|
||||
continue;
|
||||
}
|
||||
if (matcher.match(cond)) {
|
||||
specBuilder.addRangePredicate(col, cond);
|
||||
if (col.isPrefixIndex()) {
|
||||
specBuilder.addResidualPredicate(cond);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
specBuilder.addAllPredicates(conditions);
|
||||
return specBuilder.build();
|
||||
}
|
||||
}
|
||||
|
|
@ -1,144 +0,0 @@
|
|||
/*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.tikv.predicates;
|
||||
|
||||
import static java.util.Objects.requireNonNull;
|
||||
import static org.tikv.predicates.PredicateUtils.mergeCNFExpressions;
|
||||
|
||||
import com.google.common.collect.ImmutableList;
|
||||
import java.util.*;
|
||||
import org.tikv.exception.TiExpressionException;
|
||||
import org.tikv.expression.Expression;
|
||||
import org.tikv.meta.TiColumnInfo;
|
||||
import org.tikv.meta.TiIndexColumn;
|
||||
import org.tikv.meta.TiIndexInfo;
|
||||
import org.tikv.meta.TiTableInfo;
|
||||
import org.tikv.types.DataType;
|
||||
|
||||
public class ScanSpec {
|
||||
public static class Builder {
|
||||
private final IdentityHashMap<TiIndexColumn, List<Expression>> pointPredicates =
|
||||
new IdentityHashMap<>();
|
||||
private TiIndexColumn rangeColumn;
|
||||
private final TiTableInfo table;
|
||||
private final TiIndexInfo index;
|
||||
private final List<Expression> rangePredicates = new ArrayList<>();
|
||||
private final List<Expression> residualPredicates = new ArrayList<>();
|
||||
private Set<Expression> residualCandidates = new HashSet<>();
|
||||
|
||||
public Builder(TiTableInfo table, TiIndexInfo index) {
|
||||
this.table = table;
|
||||
this.index = index;
|
||||
}
|
||||
|
||||
public void addResidualPredicate(Expression predicate) {
|
||||
residualPredicates.add(predicate);
|
||||
}
|
||||
|
||||
public void addAllPredicates(List<Expression> predicates) {
|
||||
residualCandidates.addAll(predicates);
|
||||
}
|
||||
|
||||
public void addPointPredicate(TiIndexColumn col, Expression predicate) {
|
||||
requireNonNull(col, "index column is null");
|
||||
requireNonNull(predicate, "predicate is null");
|
||||
if (pointPredicates.containsKey(col)) {
|
||||
List<Expression> predicates = pointPredicates.get(col);
|
||||
predicates.add(predicate);
|
||||
} else {
|
||||
List<Expression> predicates = new ArrayList<>();
|
||||
predicates.add(predicate);
|
||||
pointPredicates.put(col, predicates);
|
||||
}
|
||||
}
|
||||
|
||||
public void addRangePredicate(TiIndexColumn col, Expression predicate) {
|
||||
requireNonNull(col, "col is null");
|
||||
if (rangeColumn == null) {
|
||||
rangeColumn = col;
|
||||
} else if (!rangeColumn.equals(col)) {
|
||||
throw new TiExpressionException("Cannot reset range predicates");
|
||||
}
|
||||
rangePredicates.add(predicate);
|
||||
}
|
||||
|
||||
public ScanSpec build() {
|
||||
List<Expression> points = new ArrayList<>();
|
||||
List<DataType> pointTypes = new ArrayList<>();
|
||||
Set<Expression> pushedPredicates = new HashSet<>();
|
||||
if (index != null) {
|
||||
for (TiIndexColumn indexColumn : index.getIndexColumns()) {
|
||||
List<Expression> predicates = pointPredicates.get(indexColumn);
|
||||
if (predicates == null) {
|
||||
break;
|
||||
}
|
||||
pushedPredicates.addAll(predicates);
|
||||
TiColumnInfo tiColumnInfo = table.getColumn(indexColumn.getOffset());
|
||||
DataType type = tiColumnInfo.getType();
|
||||
points.add(mergeCNFExpressions(predicates));
|
||||
pointTypes.add(type);
|
||||
}
|
||||
}
|
||||
Optional<Expression> newRangePred =
|
||||
rangePredicates.isEmpty()
|
||||
? Optional.empty()
|
||||
: Optional.of(mergeCNFExpressions(rangePredicates));
|
||||
pushedPredicates.addAll(rangePredicates);
|
||||
|
||||
Set<Expression> newResidualPredicates = new HashSet<>(residualPredicates);
|
||||
for (Expression pred : residualCandidates) {
|
||||
if (!pushedPredicates.contains(pred)) {
|
||||
newResidualPredicates.add(pred);
|
||||
}
|
||||
}
|
||||
|
||||
Optional<DataType> rangeType;
|
||||
if (rangeColumn == null) {
|
||||
rangeType = Optional.empty();
|
||||
} else {
|
||||
TiColumnInfo col = table.getColumn(rangeColumn.getOffset());
|
||||
rangeType = Optional.of(col.getType());
|
||||
}
|
||||
|
||||
return new ScanSpec(ImmutableList.copyOf(points), newRangePred, newResidualPredicates);
|
||||
}
|
||||
}
|
||||
|
||||
private final List<Expression> pointPredicates;
|
||||
private final Optional<Expression> rangePredicate;
|
||||
private final Set<Expression> residualPredicates;
|
||||
|
||||
private ScanSpec(
|
||||
List<Expression> pointPredicates,
|
||||
Optional<Expression> rangePredicate,
|
||||
Set<Expression> residualPredicates) {
|
||||
this.pointPredicates = pointPredicates;
|
||||
this.rangePredicate = rangePredicate;
|
||||
this.residualPredicates = residualPredicates;
|
||||
}
|
||||
|
||||
public List<Expression> getPointPredicates() {
|
||||
return pointPredicates;
|
||||
}
|
||||
|
||||
public Optional<Expression> getRangePredicate() {
|
||||
return rangePredicate;
|
||||
}
|
||||
|
||||
public Set<Expression> getResidualPredicates() {
|
||||
return residualPredicates;
|
||||
}
|
||||
}
|
||||
|
|
@ -1,42 +0,0 @@
|
|||
/*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.tikv.predicates;
|
||||
|
||||
import java.util.Optional;
|
||||
import org.tikv.expression.Expression;
|
||||
import org.tikv.expression.visitor.DefaultVisitor;
|
||||
import org.tikv.expression.visitor.PseudoCostCalculator;
|
||||
|
||||
public class SelectivityCalculator extends DefaultVisitor<Double, Void> {
|
||||
public static double calcPseudoSelectivity(ScanSpec spec) {
|
||||
Optional<Expression> rangePred = spec.getRangePredicate();
|
||||
double cost = 100.0;
|
||||
if (spec.getPointPredicates() != null) {
|
||||
for (Expression expr : spec.getPointPredicates()) {
|
||||
cost *= PseudoCostCalculator.calculateCost(expr);
|
||||
}
|
||||
}
|
||||
if (rangePred.isPresent()) {
|
||||
cost *= PseudoCostCalculator.calculateCost(rangePred.get());
|
||||
}
|
||||
return cost;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Double process(Expression node, Void context) {
|
||||
return 1.0;
|
||||
}
|
||||
}
|
||||
|
|
@ -177,6 +177,9 @@ public class RegionManager {
|
|||
if (store.getState().equals(StoreState.Tombstone)) {
|
||||
return null;
|
||||
}
|
||||
if (logger.isDebugEnabled()) {
|
||||
logger.debug(String.format("getStoreById ID[%s] -> Store[%s]", id, store));
|
||||
}
|
||||
storeCache.put(id, store);
|
||||
return store;
|
||||
} catch (Exception e) {
|
||||
|
|
|
|||
|
|
@ -19,26 +19,17 @@ package org.tikv.region;
|
|||
|
||||
import static com.google.common.base.Preconditions.checkArgument;
|
||||
import static com.google.common.base.Preconditions.checkNotNull;
|
||||
import static org.tikv.region.RegionStoreClient.RequestTypes.REQ_TYPE_DAG;
|
||||
import static org.tikv.util.BackOffFunction.BackOffFuncType.BoRegionMiss;
|
||||
import static org.tikv.util.BackOffFunction.BackOffFuncType.BoTxnLockFast;
|
||||
|
||||
import com.google.protobuf.ByteString;
|
||||
import com.google.protobuf.InvalidProtocolBufferException;
|
||||
import com.pingcap.tidb.tipb.DAGRequest;
|
||||
import com.pingcap.tidb.tipb.SelectResponse;
|
||||
import io.grpc.ManagedChannel;
|
||||
import java.util.*;
|
||||
import java.util.function.Supplier;
|
||||
import jline.internal.TestAccessible;
|
||||
import org.apache.hadoop.hive.ql.lockmgr.LockException;
|
||||
import org.apache.log4j.Logger;
|
||||
import org.tikv.AbstractGRPCClient;
|
||||
import org.tikv.TiSession;
|
||||
import org.tikv.exception.*;
|
||||
import org.tikv.kvproto.Coprocessor;
|
||||
import org.tikv.kvproto.Coprocessor.KeyRange;
|
||||
import org.tikv.kvproto.Errorpb;
|
||||
import org.tikv.kvproto.Kvrpcpb.BatchGetRequest;
|
||||
import org.tikv.kvproto.Kvrpcpb.BatchGetResponse;
|
||||
import org.tikv.kvproto.Kvrpcpb.Context;
|
||||
|
|
@ -60,34 +51,13 @@ import org.tikv.kvproto.TikvGrpc;
|
|||
import org.tikv.kvproto.TikvGrpc.TikvBlockingStub;
|
||||
import org.tikv.kvproto.TikvGrpc.TikvStub;
|
||||
import org.tikv.operation.KVErrorHandler;
|
||||
import org.tikv.streaming.StreamingResponse;
|
||||
import org.tikv.txn.Lock;
|
||||
import org.tikv.txn.LockResolverClient;
|
||||
import org.tikv.util.BackOffFunction;
|
||||
import org.tikv.util.BackOffer;
|
||||
import org.tikv.util.ConcreteBackOffer;
|
||||
import org.tikv.util.RangeSplitter;
|
||||
|
||||
// RegionStore itself is not thread-safe
|
||||
public class RegionStoreClient extends AbstractGRPCClient<TikvBlockingStub, TikvStub>
|
||||
implements RegionErrorReceiver {
|
||||
public enum RequestTypes {
|
||||
REQ_TYPE_SELECT(101),
|
||||
REQ_TYPE_INDEX(102),
|
||||
REQ_TYPE_DAG(103),
|
||||
REQ_TYPE_ANALYZE(104),
|
||||
BATCH_ROW_COUNT(64);
|
||||
|
||||
private final int value;
|
||||
|
||||
RequestTypes(int value) {
|
||||
this.value = value;
|
||||
}
|
||||
|
||||
public int getValue() {
|
||||
return value;
|
||||
}
|
||||
}
|
||||
|
||||
private static final Logger logger = Logger.getLogger(RegionStoreClient.class);
|
||||
private TiRegion region;
|
||||
|
|
@ -99,7 +69,7 @@ public class RegionStoreClient extends AbstractGRPCClient<TikvBlockingStub, Tikv
|
|||
// lockResolverClient, after implements the
|
||||
// write implementation of tispark, we can change
|
||||
// it to private
|
||||
@TestAccessible public final LockResolverClient lockResolverClient;
|
||||
public final LockResolverClient lockResolverClient;
|
||||
private TikvBlockingStub blockingStub;
|
||||
private TikvStub asyncStub;
|
||||
|
||||
|
|
@ -384,145 +354,6 @@ public class RegionStoreClient extends AbstractGRPCClient<TikvBlockingStub, Tikv
|
|||
return resp.getKvsList();
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute and retrieve the response from TiKV server.
|
||||
*
|
||||
* @param req Select request to process
|
||||
* @param ranges Key range list
|
||||
* @return Remaining tasks of this request, if task split happens, null otherwise
|
||||
*/
|
||||
public List<RangeSplitter.RegionTask> coprocess(
|
||||
BackOffer backOffer,
|
||||
DAGRequest req,
|
||||
List<KeyRange> ranges,
|
||||
Queue<SelectResponse> responseQueue) {
|
||||
if (req == null || ranges == null || req.getExecutorsCount() < 1) {
|
||||
throw new IllegalArgumentException("Invalid coprocess argument!");
|
||||
}
|
||||
|
||||
Supplier<Coprocessor.Request> reqToSend =
|
||||
() ->
|
||||
Coprocessor.Request.newBuilder()
|
||||
.setContext(region.getContext())
|
||||
.setTp(REQ_TYPE_DAG.getValue())
|
||||
.setData(req.toByteString())
|
||||
.addAllRanges(ranges)
|
||||
.build();
|
||||
|
||||
// we should handle the region error ourselves
|
||||
KVErrorHandler<Coprocessor.Response> handler =
|
||||
new KVErrorHandler<>(
|
||||
regionManager,
|
||||
this,
|
||||
region,
|
||||
resp -> resp.hasRegionError() ? resp.getRegionError() : null);
|
||||
Coprocessor.Response resp =
|
||||
callWithRetry(backOffer, TikvGrpc.METHOD_COPROCESSOR, reqToSend, handler);
|
||||
return handleCopResponse(backOffer, resp, ranges, responseQueue);
|
||||
}
|
||||
|
||||
// handleCopResponse checks coprocessor Response for region split and lock,
|
||||
// returns more tasks when that happens, or handles the response if no error.
|
||||
// if we're handling streaming coprocessor response, lastRange is the range of last
|
||||
// successful response, otherwise it's nil.
|
||||
private List<RangeSplitter.RegionTask> handleCopResponse(
|
||||
BackOffer backOffer,
|
||||
Coprocessor.Response response,
|
||||
List<KeyRange> ranges,
|
||||
Queue<SelectResponse> responseQueue) {
|
||||
if (response.hasRegionError()) {
|
||||
Errorpb.Error regionError = response.getRegionError();
|
||||
backOffer.doBackOff(
|
||||
BackOffFunction.BackOffFuncType.BoRegionMiss, new GrpcException(regionError.toString()));
|
||||
logger.warn("Re-splitting region task due to region error:" + regionError.getMessage());
|
||||
// Split ranges
|
||||
return RangeSplitter.newSplitter(session.getRegionManager()).splitRangeByRegion(ranges);
|
||||
}
|
||||
|
||||
if (response.hasLocked()) {
|
||||
logger.debug(String.format("coprocessor encounters locks: %s", response.getLocked()));
|
||||
Lock lock = new Lock(response.getLocked());
|
||||
boolean ok = lockResolverClient.resolveLocks(backOffer, new ArrayList<>(Arrays.asList(lock)));
|
||||
if (!ok) {
|
||||
backOffer.doBackOff(BoTxnLockFast, new LockException());
|
||||
}
|
||||
// Split ranges
|
||||
return RangeSplitter.newSplitter(session.getRegionManager()).splitRangeByRegion(ranges);
|
||||
}
|
||||
|
||||
String otherError = response.getOtherError();
|
||||
if (otherError != null && !otherError.isEmpty()) {
|
||||
logger.warn(String.format("Other error occurred, message: %s", otherError));
|
||||
throw new GrpcException(otherError);
|
||||
}
|
||||
|
||||
responseQueue.offer(coprocessorHelper(response));
|
||||
return null;
|
||||
}
|
||||
|
||||
// TODO: wait for future fix
|
||||
// coprocessStreaming doesn't handle split error
|
||||
// future work should handle it and do the resolve
|
||||
// locks correspondingly
|
||||
public Iterator<SelectResponse> coprocessStreaming(DAGRequest req, List<KeyRange> ranges) {
|
||||
Supplier<Coprocessor.Request> reqToSend =
|
||||
() ->
|
||||
Coprocessor.Request.newBuilder()
|
||||
.setContext(region.getContext())
|
||||
// TODO: If no executors...?
|
||||
.setTp(REQ_TYPE_DAG.getValue())
|
||||
.setData(req.toByteString())
|
||||
.addAllRanges(ranges)
|
||||
.build();
|
||||
|
||||
KVErrorHandler<StreamingResponse> handler =
|
||||
new KVErrorHandler<>(
|
||||
regionManager,
|
||||
this,
|
||||
region,
|
||||
StreamingResponse::getFirstRegionError // TODO: handle all errors in streaming respinse
|
||||
);
|
||||
|
||||
StreamingResponse responseIterator =
|
||||
this.callServerStreamingWithRetry(
|
||||
ConcreteBackOffer.newCopNextMaxBackOff(),
|
||||
TikvGrpc.METHOD_COPROCESSOR_STREAM,
|
||||
reqToSend,
|
||||
handler);
|
||||
return coprocessorHelper(responseIterator);
|
||||
}
|
||||
|
||||
private Iterator<SelectResponse> coprocessorHelper(StreamingResponse response) {
|
||||
Iterator<Coprocessor.Response> responseIterator = response.iterator();
|
||||
// If we got nothing to handle, return null
|
||||
if (!responseIterator.hasNext()) return null;
|
||||
|
||||
// Simply wrap it
|
||||
return new Iterator<SelectResponse>() {
|
||||
@Override
|
||||
public boolean hasNext() {
|
||||
return responseIterator.hasNext();
|
||||
}
|
||||
|
||||
@Override
|
||||
public SelectResponse next() {
|
||||
return coprocessorHelper(responseIterator.next());
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
private SelectResponse coprocessorHelper(Coprocessor.Response resp) {
|
||||
try {
|
||||
SelectResponse selectResp = SelectResponse.parseFrom(resp.getData());
|
||||
if (selectResp.hasError()) {
|
||||
throw new SelectException(selectResp.getError(), selectResp.getError().getMsg());
|
||||
}
|
||||
return selectResp;
|
||||
} catch (InvalidProtocolBufferException e) {
|
||||
throw new TiClientInternalException("Error parsing protobuf for coprocessor response.", e);
|
||||
}
|
||||
}
|
||||
|
||||
public TiSession getSession() {
|
||||
return session;
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,48 +0,0 @@
|
|||
/*
|
||||
*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*
|
||||
*/
|
||||
|
||||
package org.tikv.row;
|
||||
|
||||
import org.tikv.codec.CodecDataInput;
|
||||
import org.tikv.types.DataType;
|
||||
|
||||
public class DefaultRowReader implements RowReader {
|
||||
private final CodecDataInput cdi;
|
||||
|
||||
public static DefaultRowReader create(CodecDataInput cdi) {
|
||||
return new DefaultRowReader(cdi);
|
||||
}
|
||||
|
||||
DefaultRowReader(CodecDataInput cdi) {
|
||||
this.cdi = cdi;
|
||||
}
|
||||
|
||||
public Row readRow(DataType[] dataTypes) {
|
||||
int length = dataTypes.length;
|
||||
Row row = ObjectRowImpl.create(length);
|
||||
for (int i = 0; i < length; i++) {
|
||||
DataType type = dataTypes[i];
|
||||
if (type.isNextNull(cdi)) {
|
||||
cdi.readUnsignedByte();
|
||||
row.setNull(i);
|
||||
} else {
|
||||
row.set(i, type, type.decode(cdi));
|
||||
}
|
||||
}
|
||||
return row;
|
||||
}
|
||||
}
|
||||
|
|
@ -1,174 +0,0 @@
|
|||
/*
|
||||
*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*
|
||||
*/
|
||||
|
||||
package org.tikv.row;
|
||||
|
||||
import java.math.BigDecimal;
|
||||
import java.sql.Date;
|
||||
import java.sql.Time;
|
||||
import java.sql.Timestamp;
|
||||
import org.tikv.types.DataType;
|
||||
|
||||
// A dummy implementation of Row interface
|
||||
// Using non-memory compact format
|
||||
public class ObjectRowImpl implements Row {
|
||||
private final Object[] values;
|
||||
|
||||
public static Row create(int fieldCount) {
|
||||
return new ObjectRowImpl(fieldCount);
|
||||
}
|
||||
|
||||
private ObjectRowImpl(int fieldCount) {
|
||||
values = new Object[fieldCount];
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setNull(int pos) {
|
||||
values[pos] = null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isNull(int pos) {
|
||||
return values[pos] == null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setFloat(int pos, float v) {
|
||||
values[pos] = v;
|
||||
}
|
||||
|
||||
@Override
|
||||
public float getFloat(int pos) {
|
||||
return (float) values[pos];
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setInteger(int pos, int v) {
|
||||
values[pos] = v;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int getInteger(int pos) {
|
||||
return (int) values[pos];
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setShort(int pos, short v) {
|
||||
values[pos] = v;
|
||||
}
|
||||
|
||||
@Override
|
||||
public short getShort(int pos) {
|
||||
return (short) values[pos];
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setDouble(int pos, double v) {
|
||||
values[pos] = v;
|
||||
}
|
||||
|
||||
@Override
|
||||
public double getDouble(int pos) {
|
||||
// Null should be handled by client code with isNull
|
||||
// below all get method behave the same
|
||||
return (double) values[pos];
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setLong(int pos, long v) {
|
||||
values[pos] = v;
|
||||
}
|
||||
|
||||
@Override
|
||||
public long getLong(int pos) {
|
||||
// Null should be handled by client code with isNull
|
||||
// below all get method behave the same
|
||||
return (long) values[pos];
|
||||
}
|
||||
|
||||
@Override
|
||||
public long getUnsignedLong(int pos) {
|
||||
return ((BigDecimal) values[pos]).longValue();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setString(int pos, String v) {
|
||||
values[pos] = v;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getString(int pos) {
|
||||
return (String) values[pos];
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setTime(int pos, Time v) {
|
||||
values[pos] = v;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Date getTime(int pos) {
|
||||
return (Date) values[pos];
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setTimestamp(int pos, Timestamp v) {
|
||||
values[pos] = v;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Timestamp getTimestamp(int pos) {
|
||||
return (Timestamp) values[pos];
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setDate(int pos, Date v) {
|
||||
values[pos] = v;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Date getDate(int pos) {
|
||||
return (Date) values[pos];
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setBytes(int pos, byte[] v) {
|
||||
values[pos] = v;
|
||||
}
|
||||
|
||||
@Override
|
||||
public byte[] getBytes(int pos) {
|
||||
return (byte[]) values[pos];
|
||||
}
|
||||
|
||||
@Override
|
||||
public void set(int pos, DataType type, Object v) {
|
||||
// Ignore type for this implementation since no serialization happens
|
||||
values[pos] = v;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object get(int pos, DataType type) {
|
||||
// Ignore type for this implementation since no serialization happens
|
||||
return values[pos];
|
||||
}
|
||||
|
||||
@Override
|
||||
public int fieldCount() {
|
||||
return values.length;
|
||||
}
|
||||
}
|
||||
|
|
@ -1,81 +0,0 @@
|
|||
/*
|
||||
*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*
|
||||
*/
|
||||
|
||||
package org.tikv.row;
|
||||
|
||||
import java.sql.Date;
|
||||
import java.sql.Time;
|
||||
import java.sql.Timestamp;
|
||||
import org.tikv.types.DataType;
|
||||
|
||||
/**
|
||||
* Even in case of mem-buffer-based row we can ignore field types when en/decoding if we put some
|
||||
* padding bits for fixed length and use fixed length index for var-length
|
||||
*/
|
||||
public interface Row {
|
||||
void setNull(int pos);
|
||||
|
||||
boolean isNull(int pos);
|
||||
|
||||
void setFloat(int pos, float v);
|
||||
|
||||
float getFloat(int pos);
|
||||
|
||||
void setDouble(int pos, double v);
|
||||
|
||||
double getDouble(int pos);
|
||||
|
||||
void setInteger(int pos, int v);
|
||||
|
||||
int getInteger(int pos);
|
||||
|
||||
void setShort(int pos, short v);
|
||||
|
||||
short getShort(int pos);
|
||||
|
||||
void setLong(int pos, long v);
|
||||
|
||||
long getLong(int pos);
|
||||
|
||||
long getUnsignedLong(int pos);
|
||||
|
||||
void setString(int pos, String v);
|
||||
|
||||
String getString(int pos);
|
||||
|
||||
void setTime(int pos, Time v);
|
||||
|
||||
Date getTime(int pos);
|
||||
|
||||
void setTimestamp(int pos, Timestamp v);
|
||||
|
||||
Timestamp getTimestamp(int pos);
|
||||
|
||||
void setDate(int pos, Date v);
|
||||
|
||||
Date getDate(int pos);
|
||||
|
||||
void setBytes(int pos, byte[] v);
|
||||
|
||||
byte[] getBytes(int pos);
|
||||
|
||||
void set(int pos, DataType type, Object v);
|
||||
|
||||
Object get(int pos, DataType type);
|
||||
|
||||
int fieldCount();
|
||||
}
|
||||
|
|
@ -1,24 +0,0 @@
|
|||
/*
|
||||
*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*
|
||||
*/
|
||||
|
||||
package org.tikv.row;
|
||||
|
||||
import org.tikv.types.DataType;
|
||||
|
||||
public interface RowReader {
|
||||
Row readRow(DataType[] dataTypes);
|
||||
}
|
||||
|
|
@ -1,26 +0,0 @@
|
|||
/*
|
||||
*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*
|
||||
*/
|
||||
|
||||
package org.tikv.row;
|
||||
|
||||
import org.tikv.codec.CodecDataInput;
|
||||
|
||||
public class RowReaderFactory {
|
||||
public static RowReader createRowReader(CodecDataInput cdi) {
|
||||
return new DefaultRowReader(cdi);
|
||||
}
|
||||
}
|
||||
|
|
@ -1,116 +0,0 @@
|
|||
/*
|
||||
*
|
||||
* Copyright 2018 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*
|
||||
*/
|
||||
|
||||
package org.tikv.statistics;
|
||||
|
||||
import com.google.protobuf.ByteString;
|
||||
import org.tikv.key.Key;
|
||||
|
||||
/**
|
||||
* Bucket is histogram element.
|
||||
*
|
||||
* <p>Bucket bound is the smallest and greatest values stored in the bucket. The lower and upper
|
||||
* bound are stored in bucket as lowerBound and upperBound.
|
||||
*
|
||||
* <p>A bucket count is the number of items stored in all previous buckets and the current bucket.
|
||||
* Bucket counts are always in increasing order.
|
||||
*
|
||||
* <p>A bucket repeat is the number of repeats of the greatest bucket value, it can be used to find
|
||||
* popular values.
|
||||
*
|
||||
* <p>Note that lowerBound and upperBound keys should be 'comparable objects', and these bounds are
|
||||
* encoded as `binary` type in TiDB. Intuitively, you should also use Keys encoded as binary format
|
||||
* to do comparison in row count estimation.
|
||||
*/
|
||||
public class Bucket implements Comparable<Bucket> {
|
||||
public long count;
|
||||
private long repeats;
|
||||
private Key lowerBound;
|
||||
private Key upperBound;
|
||||
|
||||
public Bucket(long count, long repeats, ByteString lowerBound, ByteString upperBound) {
|
||||
this.count = count;
|
||||
this.repeats = repeats;
|
||||
this.lowerBound = Key.toRawKey(lowerBound);
|
||||
this.upperBound = Key.toRawKey(upperBound);
|
||||
}
|
||||
|
||||
public Bucket(long count, long repeats, Key lowerBound, Key upperBound) {
|
||||
this.count = count;
|
||||
this.repeats = repeats;
|
||||
this.lowerBound = lowerBound;
|
||||
this.upperBound = upperBound;
|
||||
assert upperBound != null;
|
||||
}
|
||||
|
||||
/** used for binary search only */
|
||||
public Bucket(Key upperBound) {
|
||||
this.upperBound = upperBound;
|
||||
assert upperBound != null;
|
||||
}
|
||||
|
||||
@Override
|
||||
@SuppressWarnings("unchecked")
|
||||
public int compareTo(Bucket b) {
|
||||
return upperBound.compareTo(b.upperBound);
|
||||
}
|
||||
|
||||
public long getCount() {
|
||||
return count;
|
||||
}
|
||||
|
||||
public void setCount(long count) {
|
||||
this.count = count;
|
||||
}
|
||||
|
||||
public long getRepeats() {
|
||||
return repeats;
|
||||
}
|
||||
|
||||
public void setRepeats(long repeats) {
|
||||
this.repeats = repeats;
|
||||
}
|
||||
|
||||
public Key getLowerBound() {
|
||||
return lowerBound;
|
||||
}
|
||||
|
||||
public void setLowerBound(Key lowerBound) {
|
||||
this.lowerBound = lowerBound;
|
||||
}
|
||||
|
||||
public Key getUpperBound() {
|
||||
return upperBound;
|
||||
}
|
||||
|
||||
public void setUpperBound(Key upperBound) {
|
||||
this.upperBound = upperBound;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "{count="
|
||||
+ count
|
||||
+ ", repeats="
|
||||
+ repeats
|
||||
+ ", range=["
|
||||
+ lowerBound
|
||||
+ ", "
|
||||
+ upperBound.toString()
|
||||
+ "]}";
|
||||
}
|
||||
}
|
||||
|
|
@ -1,96 +0,0 @@
|
|||
/*
|
||||
* Copyright 2018 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*
|
||||
*/
|
||||
|
||||
package org.tikv.statistics;
|
||||
|
||||
import com.sangupta.murmur.Murmur3;
|
||||
import java.util.Arrays;
|
||||
|
||||
public class CMSketch {
|
||||
private int depth;
|
||||
private int width;
|
||||
private long count;
|
||||
private long[][] table;
|
||||
|
||||
public int getDepth() {
|
||||
return depth;
|
||||
}
|
||||
|
||||
public void setDepth(int depth) {
|
||||
this.depth = depth;
|
||||
}
|
||||
|
||||
public int getWidth() {
|
||||
return width;
|
||||
}
|
||||
|
||||
public void setWidth(int width) {
|
||||
this.width = width;
|
||||
}
|
||||
|
||||
public long getCount() {
|
||||
return count;
|
||||
}
|
||||
|
||||
public void setCount(long count) {
|
||||
this.count = count;
|
||||
}
|
||||
|
||||
public long[][] getTable() {
|
||||
return table;
|
||||
}
|
||||
|
||||
public void setTable(long[][] table) {
|
||||
this.table = table;
|
||||
}
|
||||
|
||||
// Hide constructor
|
||||
private CMSketch() {}
|
||||
|
||||
public static CMSketch newCMSketch(int d, int w) {
|
||||
CMSketch sketch = new CMSketch();
|
||||
sketch.setTable(new long[d][w]);
|
||||
sketch.setDepth(d);
|
||||
sketch.setWidth(w);
|
||||
return sketch;
|
||||
}
|
||||
|
||||
public long queryBytes(byte[] bytes) {
|
||||
long[] randNums = Murmur3.hash_x64_128(bytes, bytes.length, 0);
|
||||
long h1 = randNums[0];
|
||||
long h2 = randNums[1];
|
||||
long min = Long.MAX_VALUE;
|
||||
long[] vals = new long[depth];
|
||||
for (int i = 0; i < table.length; i++) {
|
||||
int j = (int) ((h1 + h2 * i) % width);
|
||||
if (min > table[i][j]) {
|
||||
min = table[i][j];
|
||||
}
|
||||
long noise = (count - table[i][j]) / (width - 1);
|
||||
if (table[i][j] < noise) {
|
||||
vals[i] = 0;
|
||||
} else {
|
||||
vals[i] = table[i][j] - noise;
|
||||
}
|
||||
}
|
||||
Arrays.sort(vals);
|
||||
long res = vals[(depth - 1) / 2] + (vals[depth / 2] - vals[(depth - 1) / 2]) / 2;
|
||||
if (res > min) {
|
||||
return min;
|
||||
}
|
||||
return res;
|
||||
}
|
||||
}
|
||||
|
|
@ -1,71 +0,0 @@
|
|||
/*
|
||||
*
|
||||
* Copyright 2018 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*
|
||||
*/
|
||||
|
||||
package org.tikv.statistics;
|
||||
|
||||
import org.tikv.meta.TiColumnInfo;
|
||||
|
||||
/**
|
||||
* Each Column will have a single {@link ColumnStatistics} to store {@link Histogram} info and
|
||||
* {@link CMSketch} info, if any.
|
||||
*/
|
||||
public class ColumnStatistics {
|
||||
private Histogram histogram;
|
||||
private CMSketch cmSketch;
|
||||
private long count;
|
||||
private TiColumnInfo columnInfo;
|
||||
|
||||
public ColumnStatistics(
|
||||
Histogram histogram, CMSketch cmSketch, long count, TiColumnInfo columnInfo) {
|
||||
this.histogram = histogram;
|
||||
this.cmSketch = cmSketch;
|
||||
this.count = count;
|
||||
this.columnInfo = columnInfo;
|
||||
}
|
||||
|
||||
public Histogram getHistogram() {
|
||||
return histogram;
|
||||
}
|
||||
|
||||
public void setHistogram(Histogram histogram) {
|
||||
this.histogram = histogram;
|
||||
}
|
||||
|
||||
public CMSketch getCmSketch() {
|
||||
return cmSketch;
|
||||
}
|
||||
|
||||
public void setCmSketch(CMSketch cmSketch) {
|
||||
this.cmSketch = cmSketch;
|
||||
}
|
||||
|
||||
public long getCount() {
|
||||
return count;
|
||||
}
|
||||
|
||||
public void setCount(long count) {
|
||||
this.count = count;
|
||||
}
|
||||
|
||||
public TiColumnInfo getColumnInfo() {
|
||||
return columnInfo;
|
||||
}
|
||||
|
||||
public void setColumnInfo(TiColumnInfo columnInfo) {
|
||||
this.columnInfo = columnInfo;
|
||||
}
|
||||
}
|
||||
|
|
@ -1,264 +0,0 @@
|
|||
/*
|
||||
* Copyright 2018 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*
|
||||
*/
|
||||
|
||||
package org.tikv.statistics;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
import org.tikv.key.Key;
|
||||
|
||||
/**
|
||||
* Histogram represents core statistics for a column or index.
|
||||
*
|
||||
* <p>Each Histogram will have at most 256 buckets, each bucket contains a lower bound, a upper
|
||||
* bound and the number of rows contain in such bound. With this information, SQL layer will be able
|
||||
* to estimate how many rows will a index scan selects and determine which index to use will have
|
||||
* the lowest cost.
|
||||
*
|
||||
* @see Bucket for moreinformation on the core data structure.
|
||||
*/
|
||||
public class Histogram {
|
||||
|
||||
private final long id; // Column ID
|
||||
private final long numberOfDistinctValue; // Number of distinct values.
|
||||
private List<Bucket> buckets; // Histogram bucket list.
|
||||
private final long nullCount;
|
||||
private final long lastUpdateVersion;
|
||||
|
||||
public static class Builder {
|
||||
private long id;
|
||||
private long NDV;
|
||||
private List<Bucket> buckets = new ArrayList<>();
|
||||
private long nullCount;
|
||||
private long lastUpdateVersion;
|
||||
|
||||
public Builder setId(long id) {
|
||||
this.id = id;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder setNDV(long NDV) {
|
||||
this.NDV = NDV;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder setBuckets(List<Bucket> buckets) {
|
||||
this.buckets = new ArrayList<>(buckets);
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder setNullCount(long nullCount) {
|
||||
this.nullCount = nullCount;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder setLastUpdateVersion(long lastUpdateVersion) {
|
||||
this.lastUpdateVersion = lastUpdateVersion;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Histogram build() {
|
||||
return new Histogram(id, NDV, buckets, nullCount, lastUpdateVersion);
|
||||
}
|
||||
}
|
||||
|
||||
public static Builder newBuilder() {
|
||||
return new Builder();
|
||||
}
|
||||
|
||||
private Histogram(
|
||||
long id,
|
||||
long numberOfDistinctValue,
|
||||
List<Bucket> buckets,
|
||||
long nullCount,
|
||||
long lastUpdateVersion) {
|
||||
this.id = id;
|
||||
this.numberOfDistinctValue = numberOfDistinctValue;
|
||||
this.buckets = buckets;
|
||||
this.nullCount = nullCount;
|
||||
this.lastUpdateVersion = lastUpdateVersion;
|
||||
}
|
||||
|
||||
public long getNumberOfDistinctValue() {
|
||||
return numberOfDistinctValue;
|
||||
}
|
||||
|
||||
public List<Bucket> getBuckets() {
|
||||
return buckets;
|
||||
}
|
||||
|
||||
public long getNullCount() {
|
||||
return nullCount;
|
||||
}
|
||||
|
||||
public long getLastUpdateVersion() {
|
||||
return lastUpdateVersion;
|
||||
}
|
||||
|
||||
public long getId() {
|
||||
return id;
|
||||
}
|
||||
|
||||
/** equalRowCount estimates the row count where the column equals to value. */
|
||||
double equalRowCount(Key values) {
|
||||
int index = lowerBound(values);
|
||||
// index not in range
|
||||
if (index == -buckets.size() - 1) {
|
||||
return 0;
|
||||
}
|
||||
// index found
|
||||
if (index >= 0) {
|
||||
return buckets.get(index).getRepeats();
|
||||
}
|
||||
// index not found
|
||||
index = -index - 1;
|
||||
int cmp;
|
||||
if (buckets.get(index).getLowerBound() == null) {
|
||||
cmp = 1;
|
||||
} else {
|
||||
Objects.requireNonNull(buckets.get(index).getLowerBound());
|
||||
cmp = values.compareTo(buckets.get(index).getLowerBound());
|
||||
}
|
||||
if (cmp < 0) {
|
||||
return 0;
|
||||
}
|
||||
return totalRowCount() / numberOfDistinctValue;
|
||||
}
|
||||
|
||||
/** greaterRowCount estimates the row count where the column greater than value. */
|
||||
double greaterRowCount(Key values) {
|
||||
double lessCount = lessRowCount(values);
|
||||
double equalCount = equalRowCount(values);
|
||||
double greaterCount;
|
||||
greaterCount = totalRowCount() - lessCount - equalCount;
|
||||
if (greaterCount < 0) {
|
||||
greaterCount = 0;
|
||||
}
|
||||
return greaterCount;
|
||||
}
|
||||
|
||||
/** greaterAndEqRowCount estimates the row count where the column less than or equal to value. */
|
||||
private double greaterAndEqRowCount(Key values) {
|
||||
double greaterCount = greaterRowCount(values);
|
||||
double equalCount = equalRowCount(values);
|
||||
return greaterCount + equalCount;
|
||||
}
|
||||
|
||||
/** lessRowCount estimates the row count where the column less than values. */
|
||||
double lessRowCount(Key values) {
|
||||
int index = lowerBound(values);
|
||||
// index not in range
|
||||
if (index == -buckets.size() - 1) {
|
||||
return totalRowCount();
|
||||
}
|
||||
if (index < 0) {
|
||||
index = -index - 1;
|
||||
} else {
|
||||
return buckets.get(index).count - buckets.get(index).getRepeats();
|
||||
}
|
||||
double curCount = buckets.get(index).count;
|
||||
double preCount = 0;
|
||||
if (index > 0) {
|
||||
preCount = buckets.get(index - 1).count;
|
||||
}
|
||||
double lessThanBucketValueCount = curCount - buckets.get(index).getRepeats();
|
||||
Key lowerBound = buckets.get(index).getLowerBound();
|
||||
int c;
|
||||
if (lowerBound != null) {
|
||||
c = values.compareTo(lowerBound);
|
||||
} else {
|
||||
c = 1;
|
||||
}
|
||||
if (c < 0) {
|
||||
return preCount;
|
||||
}
|
||||
return (preCount + lessThanBucketValueCount) / 2;
|
||||
}
|
||||
|
||||
/** lessAndEqRowCount estimates the row count where the column less than or equal to value. */
|
||||
public double lessAndEqRowCount(Key values) {
|
||||
double lessCount = lessRowCount(values);
|
||||
double equalCount = equalRowCount(values);
|
||||
return lessCount + equalCount;
|
||||
}
|
||||
|
||||
/**
|
||||
* betweenRowCount estimates the row count where column greater than or equal to a and less than
|
||||
* b.
|
||||
*/
|
||||
double betweenRowCount(Key a, Key b) {
|
||||
double lessCountA = lessRowCount(a);
|
||||
double lessCountB = lessRowCount(b);
|
||||
// If lessCountA is not less than lessCountB, it may be that they fall to the same bucket and we
|
||||
// cannot estimate
|
||||
// the fraction, so we use `totalCount / NDV` to estimate the row count, but the result should
|
||||
// not greater than lessCountB.
|
||||
if (lessCountA >= lessCountB) {
|
||||
return Math.min(lessCountB, totalRowCount() / numberOfDistinctValue);
|
||||
}
|
||||
return lessCountB - lessCountA;
|
||||
}
|
||||
|
||||
public double totalRowCount() {
|
||||
if (buckets.isEmpty()) {
|
||||
return 0;
|
||||
}
|
||||
return buckets.get(buckets.size() - 1).count + nullCount;
|
||||
}
|
||||
|
||||
/**
|
||||
* lowerBound returns the smallest index of the searched key and returns (-[insertion point] - 1)
|
||||
* if the key is not found in buckets where [insertion point] denotes the index of the first
|
||||
* element greater than the key
|
||||
*/
|
||||
public int lowerBound(Key key) {
|
||||
assert key.getClass() == buckets.get(0).getUpperBound().getClass();
|
||||
return Arrays.binarySearch(buckets.toArray(), new Bucket(key));
|
||||
}
|
||||
|
||||
/**
|
||||
* mergeBuckets is used to merge every two neighbor buckets.
|
||||
*
|
||||
* @param bucketIdx: index of the last bucket.
|
||||
*/
|
||||
public void mergeBlock(int bucketIdx) {
|
||||
int curBuck = 0;
|
||||
for (int i = 0; i + 1 <= bucketIdx; i += 2) {
|
||||
buckets.set(
|
||||
curBuck++,
|
||||
new Bucket(
|
||||
buckets.get(i + 1).count,
|
||||
buckets.get(i + 1).getRepeats(),
|
||||
buckets.get(i + 1).getLowerBound(),
|
||||
buckets.get(i).getUpperBound()));
|
||||
}
|
||||
if (bucketIdx % 2 == 0) {
|
||||
buckets.set(curBuck++, buckets.get(bucketIdx));
|
||||
}
|
||||
buckets = buckets.subList(0, curBuck);
|
||||
}
|
||||
|
||||
/** getIncreaseFactor will return a factor of data increasing after the last analysis. */
|
||||
double getIncreaseFactor(long totalCount) {
|
||||
long columnCount = buckets.get(buckets.size() - 1).count + nullCount;
|
||||
if (columnCount == 0) {
|
||||
return 1.0;
|
||||
}
|
||||
return (double) totalCount / (double) columnCount;
|
||||
}
|
||||
}
|
||||
|
|
@ -1,133 +0,0 @@
|
|||
/*
|
||||
* Copyright 2018 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*
|
||||
*/
|
||||
|
||||
package org.tikv.statistics;
|
||||
|
||||
import com.google.common.collect.BoundType;
|
||||
import com.google.common.collect.Range;
|
||||
import java.util.List;
|
||||
import org.tikv.codec.CodecDataOutput;
|
||||
import org.tikv.key.Key;
|
||||
import org.tikv.key.TypedKey;
|
||||
import org.tikv.meta.TiIndexInfo;
|
||||
import org.tikv.predicates.IndexRange;
|
||||
import org.tikv.types.DataTypeFactory;
|
||||
import org.tikv.types.MySQLType;
|
||||
|
||||
/**
|
||||
* Each Index will have a single {@link IndexStatistics} to store {@link Histogram} info and {@link
|
||||
* CMSketch} info, if any.
|
||||
*/
|
||||
public class IndexStatistics {
|
||||
private Histogram histogram;
|
||||
private CMSketch cmSketch;
|
||||
private TiIndexInfo indexInfo;
|
||||
|
||||
public IndexStatistics(Histogram histogram, CMSketch cmSketch, TiIndexInfo indexInfo) {
|
||||
this.histogram = histogram;
|
||||
this.cmSketch = cmSketch;
|
||||
this.indexInfo = indexInfo;
|
||||
}
|
||||
|
||||
public Histogram getHistogram() {
|
||||
return histogram;
|
||||
}
|
||||
|
||||
public void setHistogram(Histogram histogram) {
|
||||
this.histogram = histogram;
|
||||
}
|
||||
|
||||
public CMSketch getCmSketch() {
|
||||
return cmSketch;
|
||||
}
|
||||
|
||||
public void setCmSketch(CMSketch cmSketch) {
|
||||
this.cmSketch = cmSketch;
|
||||
}
|
||||
|
||||
public TiIndexInfo getIndexInfo() {
|
||||
return indexInfo;
|
||||
}
|
||||
|
||||
public void setIndexInfo(TiIndexInfo indexInfo) {
|
||||
this.indexInfo = indexInfo;
|
||||
}
|
||||
|
||||
public double getRowCount(List<IndexRange> indexRanges) {
|
||||
double rowCount = 0.0;
|
||||
for (IndexRange ir : indexRanges) {
|
||||
double cnt = 0.0;
|
||||
Key pointKey = ir.hasAccessKey() ? ir.getAccessKey() : Key.EMPTY;
|
||||
Range<TypedKey> range = ir.getRange();
|
||||
Key lPointKey;
|
||||
Key uPointKey;
|
||||
|
||||
Key lKey;
|
||||
Key uKey;
|
||||
if (pointKey != Key.EMPTY) {
|
||||
Key convertedKey =
|
||||
TypedKey.toTypedKey(pointKey.getBytes(), DataTypeFactory.of(MySQLType.TypeBlob));
|
||||
Key convertedNext =
|
||||
TypedKey.toTypedKey(pointKey.next().getBytes(), DataTypeFactory.of(MySQLType.TypeBlob));
|
||||
// TODO: Implement CMSketch point query
|
||||
// if (cmSketch != null) {
|
||||
// rowCount += cmSketch.queryBytes(convertedKey.getBytes());
|
||||
// } else {
|
||||
rowCount += histogram.betweenRowCount(convertedKey, convertedNext);
|
||||
// }
|
||||
}
|
||||
if (range != null) {
|
||||
lPointKey = pointKey;
|
||||
uPointKey = pointKey;
|
||||
|
||||
if (!range.hasLowerBound()) {
|
||||
// -INF
|
||||
lKey = Key.MIN;
|
||||
} else {
|
||||
lKey = range.lowerEndpoint();
|
||||
if (range.lowerBoundType().equals(BoundType.OPEN)) {
|
||||
lKey = lKey.next();
|
||||
}
|
||||
}
|
||||
if (!range.hasUpperBound()) {
|
||||
// INF
|
||||
uKey = Key.MAX;
|
||||
} else {
|
||||
uKey = range.upperEndpoint();
|
||||
if (range.upperBoundType().equals(BoundType.CLOSED)) {
|
||||
uKey = uKey.next();
|
||||
}
|
||||
}
|
||||
CodecDataOutput cdo = new CodecDataOutput();
|
||||
cdo.write(lPointKey.getBytes());
|
||||
cdo.write(lKey.getBytes());
|
||||
Key lowerBound = TypedKey.toTypedKey(cdo.toBytes(), DataTypeFactory.of(MySQLType.TypeBlob));
|
||||
cdo.reset();
|
||||
cdo.write(uPointKey.getBytes());
|
||||
cdo.write(uKey.getBytes());
|
||||
Key upperBound = TypedKey.toTypedKey(cdo.toBytes(), DataTypeFactory.of(MySQLType.TypeBlob));
|
||||
cnt += histogram.betweenRowCount(lowerBound, upperBound);
|
||||
}
|
||||
rowCount += cnt;
|
||||
}
|
||||
if (rowCount > histogram.totalRowCount()) {
|
||||
rowCount = histogram.totalRowCount();
|
||||
} else if (rowCount < 0) {
|
||||
rowCount = 0;
|
||||
}
|
||||
return rowCount;
|
||||
}
|
||||
}
|
||||
|
|
@ -1,78 +0,0 @@
|
|||
/*
|
||||
*
|
||||
* Copyright 2018 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*
|
||||
*/
|
||||
|
||||
package org.tikv.statistics;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* A TableStatistics Java plain object.
|
||||
*
|
||||
* <p>Usually each table will have two types of statistics information: 1. Meta info (tableId,
|
||||
* count, modifyCount, version) 2. Column/Index histogram info (columnsHistMap, indexHistMap)
|
||||
*/
|
||||
public class TableStatistics {
|
||||
private final long tableId; // Which table it belongs to
|
||||
private final Map<Long, ColumnStatistics> columnsHistMap =
|
||||
new HashMap<>(); // ColumnId -> ColumnStatistics map
|
||||
private final Map<Long, IndexStatistics> indexHistMap =
|
||||
new HashMap<>(); // IndexId -> IndexStatistics map
|
||||
private long count; // Total row count in a table.
|
||||
private long modifyCount; // Total modify count in a table.
|
||||
private long version; // Version of this statistics info
|
||||
|
||||
public TableStatistics(long tableId) {
|
||||
this.tableId = tableId;
|
||||
}
|
||||
|
||||
public long getTableId() {
|
||||
return tableId;
|
||||
}
|
||||
|
||||
public Map<Long, ColumnStatistics> getColumnsHistMap() {
|
||||
return columnsHistMap;
|
||||
}
|
||||
|
||||
public Map<Long, IndexStatistics> getIndexHistMap() {
|
||||
return indexHistMap;
|
||||
}
|
||||
|
||||
public long getCount() {
|
||||
return count;
|
||||
}
|
||||
|
||||
public void setCount(long count) {
|
||||
this.count = count;
|
||||
}
|
||||
|
||||
public long getModifyCount() {
|
||||
return modifyCount;
|
||||
}
|
||||
|
||||
public void setModifyCount(long modifyCount) {
|
||||
this.modifyCount = modifyCount;
|
||||
}
|
||||
|
||||
public long getVersion() {
|
||||
return version;
|
||||
}
|
||||
|
||||
public void setVersion(long version) {
|
||||
this.version = version;
|
||||
}
|
||||
}
|
||||
|
|
@ -1,68 +0,0 @@
|
|||
/*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.tikv.tools;
|
||||
|
||||
import static java.util.Objects.requireNonNull;
|
||||
|
||||
import com.google.common.collect.ImmutableList;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import org.tikv.TiSession;
|
||||
import org.tikv.meta.TiTableInfo;
|
||||
import org.tikv.predicates.ScanAnalyzer;
|
||||
import org.tikv.util.RangeSplitter;
|
||||
import org.tikv.util.RangeSplitter.RegionTask;
|
||||
|
||||
public class RegionUtils {
|
||||
public static Map<String, Integer> getRegionDistribution(
|
||||
TiSession session, String databaseName, String tableName) {
|
||||
List<RegionTask> tasks = getRegionTasks(session, databaseName, tableName);
|
||||
Map<String, Integer> regionMap = new HashMap<>();
|
||||
for (RegionTask task : tasks) {
|
||||
regionMap.merge(task.getHost() + "_" + task.getStore().getId(), 1, Integer::sum);
|
||||
}
|
||||
return regionMap;
|
||||
}
|
||||
|
||||
public static Map<Long, List<Long>> getStoreRegionIdDistribution(
|
||||
TiSession session, String databaseName, String tableName) {
|
||||
List<RegionTask> tasks = getRegionTasks(session, databaseName, tableName);
|
||||
Map<Long, List<Long>> storeMap = new HashMap<>();
|
||||
for (RegionTask task : tasks) {
|
||||
long regionId = task.getRegion().getId();
|
||||
long storeId = task.getStore().getId();
|
||||
storeMap.putIfAbsent(storeId, new ArrayList<>());
|
||||
storeMap.get(storeId).add(regionId);
|
||||
}
|
||||
return storeMap;
|
||||
}
|
||||
|
||||
private static List<RegionTask> getRegionTasks(
|
||||
TiSession session, String databaseName, String tableName) {
|
||||
requireNonNull(session, "session is null");
|
||||
requireNonNull(databaseName, "databaseName is null");
|
||||
requireNonNull(tableName, "tableName is null");
|
||||
TiTableInfo table = session.getCatalog().getTable(databaseName, tableName);
|
||||
requireNonNull(table, String.format("Table not found %s.%s", databaseName, tableName));
|
||||
ScanAnalyzer builder = new ScanAnalyzer();
|
||||
ScanAnalyzer.ScanPlan scanPlan =
|
||||
builder.buildScan(ImmutableList.of(), ImmutableList.of(), table);
|
||||
return RangeSplitter.newSplitter(session.getRegionManager())
|
||||
.splitRangeByRegion(scanPlan.getKeyRanges());
|
||||
}
|
||||
}
|
||||
|
|
@ -1,82 +0,0 @@
|
|||
package org.tikv.types;
|
||||
|
||||
import com.pingcap.tidb.tipb.ExprType;
|
||||
import org.joda.time.DateTime;
|
||||
import org.joda.time.DateTimeZone;
|
||||
import org.joda.time.LocalDate;
|
||||
import org.tikv.codec.Codec;
|
||||
import org.tikv.codec.Codec.DateCodec;
|
||||
import org.tikv.codec.Codec.DateTimeCodec;
|
||||
import org.tikv.codec.CodecDataInput;
|
||||
import org.tikv.codec.CodecDataOutput;
|
||||
import org.tikv.exception.InvalidCodecFormatException;
|
||||
import org.tikv.meta.TiColumnInfo.InternalTypeHolder;
|
||||
|
||||
public abstract class AbstractDateTimeType extends DataType {
|
||||
AbstractDateTimeType(InternalTypeHolder holder) {
|
||||
super(holder);
|
||||
}
|
||||
|
||||
AbstractDateTimeType(MySQLType tp) {
|
||||
super(tp);
|
||||
}
|
||||
|
||||
/** Return timezone used for encoding and decoding */
|
||||
protected abstract DateTimeZone getTimezone();
|
||||
|
||||
/**
|
||||
* Decode DateTime from packed long value In TiDB / MySQL, timestamp type is converted to UTC and
|
||||
* stored
|
||||
*/
|
||||
DateTime decodeDateTime(int flag, CodecDataInput cdi) {
|
||||
DateTime dateTime;
|
||||
if (flag == Codec.UVARINT_FLAG) {
|
||||
dateTime = DateTimeCodec.readFromUVarInt(cdi, getTimezone());
|
||||
} else if (flag == Codec.UINT_FLAG) {
|
||||
dateTime = DateTimeCodec.readFromUInt(cdi, getTimezone());
|
||||
} else {
|
||||
throw new InvalidCodecFormatException(
|
||||
"Invalid Flag type for " + getClass().getSimpleName() + ": " + flag);
|
||||
}
|
||||
return dateTime;
|
||||
}
|
||||
|
||||
/** Decode Date from packed long value */
|
||||
LocalDate decodeDate(int flag, CodecDataInput cdi) {
|
||||
LocalDate date;
|
||||
if (flag == Codec.UVARINT_FLAG) {
|
||||
date = DateCodec.readFromUVarInt(cdi);
|
||||
} else if (flag == Codec.UINT_FLAG) {
|
||||
date = DateCodec.readFromUInt(cdi);
|
||||
} else {
|
||||
throw new InvalidCodecFormatException(
|
||||
"Invalid Flag type for " + getClass().getSimpleName() + ": " + flag);
|
||||
}
|
||||
return date;
|
||||
}
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
protected void encodeKey(CodecDataOutput cdo, Object value) {
|
||||
DateTime dt = Converter.convertToDateTime(value);
|
||||
DateTimeCodec.writeDateTimeFully(cdo, dt, getTimezone());
|
||||
}
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
protected void encodeValue(CodecDataOutput cdo, Object value) {
|
||||
encodeKey(cdo, value);
|
||||
}
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
protected void encodeProto(CodecDataOutput cdo, Object value) {
|
||||
DateTime dt = Converter.convertToDateTime(value);
|
||||
DateTimeCodec.writeDateTimeProto(cdo, dt, getTimezone());
|
||||
}
|
||||
|
||||
@Override
|
||||
public ExprType getProtoExprType() {
|
||||
return ExprType.MysqlTime;
|
||||
}
|
||||
}
|
||||
|
|
@ -1,56 +0,0 @@
|
|||
/*
|
||||
*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*
|
||||
*/
|
||||
|
||||
package org.tikv.types;
|
||||
|
||||
import org.tikv.codec.Codec;
|
||||
import org.tikv.codec.Codec.IntegerCodec;
|
||||
import org.tikv.codec.CodecDataInput;
|
||||
import org.tikv.exception.TypeException;
|
||||
import org.tikv.meta.TiColumnInfo;
|
||||
|
||||
public class BitType extends IntegerType {
|
||||
public static final BitType BIT = new BitType(MySQLType.TypeBit);
|
||||
|
||||
public static final MySQLType[] subTypes = new MySQLType[] {MySQLType.TypeBit};
|
||||
|
||||
private BitType(MySQLType tp) {
|
||||
super(tp);
|
||||
}
|
||||
|
||||
protected BitType(TiColumnInfo.InternalTypeHolder holder) {
|
||||
super(holder);
|
||||
}
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
protected Object decodeNotNull(int flag, CodecDataInput cdi) {
|
||||
switch (flag) {
|
||||
case Codec.UVARINT_FLAG:
|
||||
return IntegerCodec.readUVarLong(cdi);
|
||||
case Codec.UINT_FLAG:
|
||||
return IntegerCodec.readULong(cdi);
|
||||
default:
|
||||
throw new TypeException("Invalid IntegerType flag: " + flag);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isUnsigned() {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
|
@ -1,102 +0,0 @@
|
|||
/*
|
||||
*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*
|
||||
*/
|
||||
|
||||
package org.tikv.types;
|
||||
|
||||
import com.pingcap.tidb.tipb.ExprType;
|
||||
import org.tikv.codec.Codec;
|
||||
import org.tikv.codec.Codec.BytesCodec;
|
||||
import org.tikv.codec.CodecDataInput;
|
||||
import org.tikv.codec.CodecDataOutput;
|
||||
import org.tikv.exception.InvalidCodecFormatException;
|
||||
import org.tikv.meta.TiColumnInfo;
|
||||
|
||||
/**
|
||||
* TODO: if we need to unify string type and binary types? Indeed they are encoded as the same
|
||||
* However, decode to string actually going through encoding/decoding by whatever charset.encoding
|
||||
* format we set, and essentially changed underlying data
|
||||
*/
|
||||
public class BytesType extends DataType {
|
||||
public static final BytesType BLOB = new BytesType(MySQLType.TypeBlob);
|
||||
public static final BytesType LONG_TEXT = new BytesType(MySQLType.TypeLongBlob);
|
||||
public static final BytesType MEDIUM_TEXT = new BytesType(MySQLType.TypeMediumBlob);
|
||||
public static final BytesType TEXT = new BytesType(MySQLType.TypeBlob);
|
||||
public static final BytesType TINY_BLOB = new BytesType(MySQLType.TypeTinyBlob);
|
||||
|
||||
public static final MySQLType[] subTypes =
|
||||
new MySQLType[] {
|
||||
MySQLType.TypeBlob, MySQLType.TypeLongBlob,
|
||||
MySQLType.TypeMediumBlob, MySQLType.TypeTinyBlob
|
||||
};
|
||||
|
||||
protected BytesType(MySQLType tp) {
|
||||
super(tp);
|
||||
}
|
||||
|
||||
protected BytesType(TiColumnInfo.InternalTypeHolder holder) {
|
||||
super(holder);
|
||||
}
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
protected Object decodeNotNull(int flag, CodecDataInput cdi) {
|
||||
if (flag == Codec.COMPACT_BYTES_FLAG) {
|
||||
return BytesCodec.readCompactBytes(cdi);
|
||||
} else if (flag == Codec.BYTES_FLAG) {
|
||||
return BytesCodec.readBytes(cdi);
|
||||
} else {
|
||||
throw new InvalidCodecFormatException("Invalid Flag type for : " + flag);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean isPrefixIndexSupported() {
|
||||
return true;
|
||||
}
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
protected void encodeKey(CodecDataOutput cdo, Object value) {
|
||||
byte[] bytes = Converter.convertToBytes(value);
|
||||
BytesCodec.writeBytesFully(cdo, bytes);
|
||||
}
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
protected void encodeValue(CodecDataOutput cdo, Object value) {
|
||||
byte[] bytes = Converter.convertToBytes(value);
|
||||
BytesCodec.writeCompactBytesFully(cdo, bytes);
|
||||
}
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
protected void encodeProto(CodecDataOutput cdo, Object value) {
|
||||
byte[] bytes = Converter.convertToBytes(value);
|
||||
BytesCodec.writeBytesRaw(cdo, bytes);
|
||||
}
|
||||
|
||||
@Override
|
||||
public ExprType getProtoExprType() {
|
||||
return getCharset().equals(Charset.CharsetBin) ? ExprType.Bytes : ExprType.String;
|
||||
}
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
public Object getOriginDefaultValueNonNull(String value) {
|
||||
return value;
|
||||
}
|
||||
}
|
||||
|
|
@ -1,39 +0,0 @@
|
|||
/*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.tikv.types;
|
||||
|
||||
public class Charset {
|
||||
// CharsetBin is used for marking binary charset.
|
||||
public static final String CharsetBin = "binary";
|
||||
// CollationBin is the default collation for CharsetBin.
|
||||
public static final String CollationBin = "binary";
|
||||
// CharsetUTF8 is the default charset for string types.
|
||||
public static final String CharsetUTF8 = "utf8";
|
||||
// CollationUTF8 is the default collation for CharsetUTF8.
|
||||
public static final String CollationUTF8 = "utf8_bin";
|
||||
// CharsetUTF8MB4 represents 4 bytes utf8, which works the same way as utf8 in Go.
|
||||
public static final String CharsetUTF8MB4 = "utf8mb4";
|
||||
// CollationUTF8MB4 is the default collation for CharsetUTF8MB4.
|
||||
public static final String CollationUTF8MB4 = "utf8mb4_bin";
|
||||
// CharsetASCII is a subset of UTF8.
|
||||
public static final String CharsetASCII = "ascii";
|
||||
// CollationASCII is the default collation for CharsetACSII.
|
||||
public static final String CollationASCII = "ascii_bin";
|
||||
// CharsetLatin1 is a single byte charset.
|
||||
public static final String CharsetLatin1 = "latin1";
|
||||
// CollationLatin1 is the default collation for CharsetLatin1.
|
||||
public static final String CollationLatin1 = "latin1_bin";
|
||||
}
|
||||
|
|
@ -1,179 +0,0 @@
|
|||
/*
|
||||
*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*
|
||||
*/
|
||||
|
||||
package org.tikv.types;
|
||||
|
||||
import static java.util.Objects.requireNonNull;
|
||||
|
||||
import java.math.BigDecimal;
|
||||
import java.math.BigInteger;
|
||||
import java.sql.Date;
|
||||
import java.sql.Timestamp;
|
||||
import java.util.Arrays;
|
||||
import org.apache.spark.unsafe.types.UTF8String;
|
||||
import org.joda.time.DateTime;
|
||||
import org.joda.time.DateTimeZone;
|
||||
import org.joda.time.format.DateTimeFormat;
|
||||
import org.joda.time.format.DateTimeFormatter;
|
||||
import org.tikv.exception.TypeException;
|
||||
|
||||
public class Converter {
|
||||
public static long convertToLong(Object val) {
|
||||
requireNonNull(val, "val is null");
|
||||
if (val instanceof Number) {
|
||||
return ((Number) val).longValue();
|
||||
} else if (val instanceof String) {
|
||||
return Long.parseLong(val.toString());
|
||||
}
|
||||
throw new TypeException(
|
||||
String.format("Cannot cast %s to long", val.getClass().getSimpleName()));
|
||||
}
|
||||
|
||||
public static double convertToDouble(Object val) {
|
||||
requireNonNull(val, "val is null");
|
||||
if (val instanceof Number) {
|
||||
return ((Number) val).doubleValue();
|
||||
} else if (val instanceof String) {
|
||||
return Double.parseDouble(val.toString());
|
||||
}
|
||||
throw new TypeException(
|
||||
String.format("Cannot cast %s to double", val.getClass().getSimpleName()));
|
||||
}
|
||||
|
||||
public static String convertToString(Object val) {
|
||||
requireNonNull(val, "val is null");
|
||||
return val.toString();
|
||||
}
|
||||
|
||||
public static byte[] convertToBytes(Object val) {
|
||||
requireNonNull(val, "val is null");
|
||||
if (val instanceof byte[]) {
|
||||
return (byte[]) val;
|
||||
} else if (val instanceof String) {
|
||||
return ((String) val).getBytes();
|
||||
}
|
||||
throw new TypeException(
|
||||
String.format("Cannot cast %s to bytes", val.getClass().getSimpleName()));
|
||||
}
|
||||
|
||||
static byte[] convertToBytes(Object val, int prefixLength) {
|
||||
requireNonNull(val, "val is null");
|
||||
if (val instanceof byte[]) {
|
||||
return Arrays.copyOf((byte[]) val, prefixLength);
|
||||
} else if (val instanceof String) {
|
||||
return Arrays.copyOf(((String) val).getBytes(), prefixLength);
|
||||
}
|
||||
throw new TypeException(
|
||||
String.format("Cannot cast %s to bytes", val.getClass().getSimpleName()));
|
||||
}
|
||||
|
||||
static byte[] convertUtf8ToBytes(Object val, int prefixLength) {
|
||||
requireNonNull(val, "val is null");
|
||||
if (val instanceof byte[]) {
|
||||
return UTF8String.fromBytes(((byte[]) val)).substring(0, prefixLength).getBytes();
|
||||
} else if (val instanceof String) {
|
||||
return UTF8String.fromString(((String) val)).substring(0, prefixLength).getBytes();
|
||||
}
|
||||
throw new TypeException(
|
||||
String.format("Cannot cast %s to bytes", val.getClass().getSimpleName()));
|
||||
}
|
||||
|
||||
private static final DateTimeZone localTimeZone = DateTimeZone.getDefault();
|
||||
private static final DateTimeFormatter localDateTimeFormatter =
|
||||
DateTimeFormat.forPattern("yyyy-MM-dd HH:mm:ss").withZone(localTimeZone);
|
||||
private static final DateTimeFormatter localDateFormatter =
|
||||
DateTimeFormat.forPattern("yyyy-MM-dd").withZone(localTimeZone);
|
||||
|
||||
public static DateTimeZone getLocalTimezone() {
|
||||
return localTimeZone;
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert an object to Datetime If constant is a string, it parses as local timezone If it is an
|
||||
* long, it parsed as UTC epoch
|
||||
*
|
||||
* @param val value to be converted to DateTime
|
||||
* @return joda.time.DateTime indicating local Datetime
|
||||
*/
|
||||
public static DateTime convertToDateTime(Object val) {
|
||||
requireNonNull(val, "val is null");
|
||||
if (val instanceof DateTime) {
|
||||
return (DateTime) val;
|
||||
} else if (val instanceof String) {
|
||||
// interpret string as in local timezone
|
||||
try {
|
||||
return DateTime.parse((String) val, localDateTimeFormatter);
|
||||
} catch (Exception e) {
|
||||
throw new TypeException(
|
||||
String.format("Error parsing string %s to datetime", (String) val), e);
|
||||
}
|
||||
} else if (val instanceof Long) {
|
||||
return new DateTime((long) val);
|
||||
} else if (val instanceof Timestamp) {
|
||||
return new DateTime(((Timestamp) val).getTime());
|
||||
} else if (val instanceof Date) {
|
||||
return new DateTime(((Date) val).getTime());
|
||||
} else {
|
||||
throw new TypeException("Can not cast Object to LocalDateTime ");
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert an object to Date If constant is a string, it parses as local timezone If it is an
|
||||
* long, it parsed as UTC epoch
|
||||
*
|
||||
* @param val value to be converted to DateTime
|
||||
* @return java.sql.Date indicating Date
|
||||
*/
|
||||
public static Date convertToDate(Object val) {
|
||||
requireNonNull(val, "val is null");
|
||||
if (val instanceof Date) {
|
||||
return (Date) val;
|
||||
} else if (val instanceof String) {
|
||||
try {
|
||||
return new Date(DateTime.parse((String) val, localDateFormatter).toDate().getTime());
|
||||
} catch (Exception e) {
|
||||
throw new TypeException(String.format("Error parsing string %s to date", (String) val), e);
|
||||
}
|
||||
} else if (val instanceof Long) {
|
||||
return new Date((long) val);
|
||||
} else if (val instanceof Timestamp) {
|
||||
return new Date(((Timestamp) val).getTime());
|
||||
} else if (val instanceof DateTime) {
|
||||
return new Date(((DateTime) val).getMillis());
|
||||
} else {
|
||||
throw new TypeException("Can not cast Object to LocalDate");
|
||||
}
|
||||
}
|
||||
|
||||
public static BigDecimal convertToBigDecimal(Object val) {
|
||||
requireNonNull(val, "val is null");
|
||||
if (val instanceof BigDecimal) {
|
||||
return (BigDecimal) val;
|
||||
} else if (val instanceof Double || val instanceof Float) {
|
||||
return new BigDecimal((Double) val);
|
||||
} else if (val instanceof BigInteger) {
|
||||
return new BigDecimal((BigInteger) val);
|
||||
} else if (val instanceof Number) {
|
||||
return new BigDecimal(((Number) val).longValue());
|
||||
} else if (val instanceof String) {
|
||||
return new BigDecimal((String) val);
|
||||
} else {
|
||||
throw new TypeException("can not cast non Number type to Double");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -1,361 +0,0 @@
|
|||
/*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.tikv.types;
|
||||
|
||||
import static java.util.Objects.requireNonNull;
|
||||
import static org.tikv.codec.Codec.isNullFlag;
|
||||
|
||||
import com.google.common.collect.ImmutableList;
|
||||
import com.pingcap.tidb.tipb.ExprType;
|
||||
import java.io.Serializable;
|
||||
import java.util.List;
|
||||
import org.tikv.codec.Codec;
|
||||
import org.tikv.codec.CodecDataInput;
|
||||
import org.tikv.codec.CodecDataOutput;
|
||||
import org.tikv.exception.TypeException;
|
||||
import org.tikv.meta.Collation;
|
||||
import org.tikv.meta.TiColumnInfo;
|
||||
import org.tikv.meta.TiColumnInfo.InternalTypeHolder;
|
||||
|
||||
/** Base Type for encoding and decoding TiDB row information. */
|
||||
public abstract class DataType implements Serializable {
|
||||
|
||||
// Flag Information for strict mysql type
|
||||
public static final int NotNullFlag = 1; /* Field can't be NULL */
|
||||
public static final int PriKeyFlag = 2; /* Field is part of a primary key */
|
||||
public static final int UniqueKeyFlag = 4; /* Field is part of a unique key */
|
||||
public static final int MultipleKeyFlag = 8; /* Field is part of a key */
|
||||
public static final int BlobFlag = 16; /* Field is a blob */
|
||||
public static final int UnsignedFlag = 32; /* Field is unsigned */
|
||||
public static final int ZerofillFlag = 64; /* Field is zerofill */
|
||||
public static final int BinaryFlag = 128; /* Field is binary */
|
||||
public static final int EnumFlag = 256; /* Field is an enum */
|
||||
public static final int AutoIncrementFlag = 512; /* Field is an auto increment field */
|
||||
public static final int TimestampFlag = 1024; /* Field is a timestamp */
|
||||
public static final int SetFlag = 2048; /* Field is a set */
|
||||
public static final int NoDefaultValueFlag = 4096; /* Field doesn't have a default value */
|
||||
public static final int OnUpdateNowFlag = 8192; /* Field is set to NOW on UPDATE */
|
||||
public static final int NumFlag = 32768; /* Field is a num (for clients) */
|
||||
|
||||
public enum EncodeType {
|
||||
KEY,
|
||||
VALUE,
|
||||
PROTO
|
||||
}
|
||||
|
||||
public static final int UNSPECIFIED_LEN = -1;
|
||||
|
||||
// MySQL type
|
||||
protected final MySQLType tp;
|
||||
// Not Encode/Decode flag, this is used to strict mysql type
|
||||
// such as not null, timestamp
|
||||
protected final int flag;
|
||||
protected final int decimal;
|
||||
private final String charset;
|
||||
protected final int collation;
|
||||
protected final long length;
|
||||
private final List<String> elems;
|
||||
|
||||
protected DataType(TiColumnInfo.InternalTypeHolder holder) {
|
||||
this.tp = MySQLType.fromTypeCode(holder.getTp());
|
||||
this.flag = holder.getFlag();
|
||||
this.length = holder.getFlen();
|
||||
this.decimal = holder.getDecimal();
|
||||
this.charset = holder.getCharset();
|
||||
this.collation = Collation.translate(holder.getCollate());
|
||||
this.elems = holder.getElems() == null ? ImmutableList.of() : holder.getElems();
|
||||
}
|
||||
|
||||
protected DataType(MySQLType type) {
|
||||
this.tp = type;
|
||||
this.flag = 0;
|
||||
this.elems = ImmutableList.of();
|
||||
this.length = UNSPECIFIED_LEN;
|
||||
this.decimal = UNSPECIFIED_LEN;
|
||||
this.charset = "";
|
||||
this.collation = Collation.DEF_COLLATION_CODE;
|
||||
}
|
||||
|
||||
protected DataType(
|
||||
MySQLType type, int flag, int len, int decimal, String charset, int collation) {
|
||||
this.tp = type;
|
||||
this.flag = flag;
|
||||
this.elems = ImmutableList.of();
|
||||
this.length = len;
|
||||
this.decimal = decimal;
|
||||
this.charset = charset;
|
||||
this.collation = collation;
|
||||
}
|
||||
|
||||
protected abstract Object decodeNotNull(int flag, CodecDataInput cdi);
|
||||
|
||||
/**
|
||||
* decode value from row which is nothing.
|
||||
*
|
||||
* @param cdi source of data.
|
||||
*/
|
||||
public Object decode(CodecDataInput cdi) {
|
||||
int flag = cdi.readUnsignedByte();
|
||||
if (isNullFlag(flag)) {
|
||||
return null;
|
||||
}
|
||||
return decodeNotNull(flag, cdi);
|
||||
}
|
||||
|
||||
public boolean isNextNull(CodecDataInput cdi) {
|
||||
return isNullFlag(cdi.peekByte());
|
||||
}
|
||||
|
||||
public static void encodeMaxValue(CodecDataOutput cdo) {
|
||||
cdo.writeByte(Codec.MAX_FLAG);
|
||||
}
|
||||
|
||||
public static void encodeNull(CodecDataOutput cdo) {
|
||||
cdo.writeByte(Codec.NULL_FLAG);
|
||||
}
|
||||
|
||||
public static void encodeIndex(CodecDataOutput cdo) {
|
||||
cdo.writeByte(Codec.BYTES_FLAG);
|
||||
}
|
||||
|
||||
/**
|
||||
* encode a Row to CodecDataOutput
|
||||
*
|
||||
* @param cdo destination of data.
|
||||
* @param encodeType Key or Value.
|
||||
* @param value value to be encoded.
|
||||
*/
|
||||
public void encode(CodecDataOutput cdo, EncodeType encodeType, Object value) {
|
||||
requireNonNull(cdo, "cdo is null");
|
||||
if (value == null) {
|
||||
if (encodeType != EncodeType.PROTO) {
|
||||
encodeNull(cdo);
|
||||
}
|
||||
} else {
|
||||
switch (encodeType) {
|
||||
case KEY:
|
||||
encodeKey(cdo, value);
|
||||
return;
|
||||
case VALUE:
|
||||
encodeValue(cdo, value);
|
||||
return;
|
||||
case PROTO:
|
||||
encodeProto(cdo, value);
|
||||
return;
|
||||
default:
|
||||
throw new TypeException("Unknown encoding type " + encodeType);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
protected abstract void encodeKey(CodecDataOutput cdo, Object value);
|
||||
|
||||
protected abstract void encodeValue(CodecDataOutput cdo, Object value);
|
||||
|
||||
protected abstract void encodeProto(CodecDataOutput cdo, Object value);
|
||||
|
||||
/**
|
||||
* encode a Key's prefix to CodecDataOutput
|
||||
*
|
||||
* @param cdo destination of data.
|
||||
* @param value value to be encoded.
|
||||
* @param type data value type.
|
||||
* @param prefixLength specifies prefix length of value to be encoded. When prefixLength is
|
||||
* DataType.UNSPECIFIED_LEN, encode full length of value.
|
||||
*/
|
||||
public void encodeKey(CodecDataOutput cdo, Object value, DataType type, int prefixLength) {
|
||||
requireNonNull(cdo, "cdo is null");
|
||||
if (value == null) {
|
||||
encodeNull(cdo);
|
||||
} else if (prefixLength == DataType.UNSPECIFIED_LEN) {
|
||||
encodeKey(cdo, value);
|
||||
} else if (isPrefixIndexSupported()) {
|
||||
byte[] bytes;
|
||||
// When charset is utf8/utf8mb4, prefix length should be the number of utf8 characters
|
||||
// rather than length of its encoded byte value.
|
||||
if (type.getCharset().equalsIgnoreCase("utf8")
|
||||
|| type.getCharset().equalsIgnoreCase("utf8mb4")) {
|
||||
bytes = Converter.convertUtf8ToBytes(value, prefixLength);
|
||||
} else {
|
||||
bytes = Converter.convertToBytes(value, prefixLength);
|
||||
}
|
||||
Codec.BytesCodec.writeBytesFully(cdo, bytes);
|
||||
} else {
|
||||
throw new TypeException("Data type can not encode with prefix");
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Indicates whether a data type supports prefix index
|
||||
*
|
||||
* @return returns true iff the type is BytesType
|
||||
*/
|
||||
protected boolean isPrefixIndexSupported() {
|
||||
return false;
|
||||
}
|
||||
|
||||
public abstract ExprType getProtoExprType();
|
||||
|
||||
/**
|
||||
* get origin default value
|
||||
*
|
||||
* @param value a int value represents in string
|
||||
* @return a int object
|
||||
*/
|
||||
public abstract Object getOriginDefaultValueNonNull(String value);
|
||||
|
||||
public Object getOriginDefaultValue(String value) {
|
||||
if (value == null) return null;
|
||||
return getOriginDefaultValueNonNull(value);
|
||||
}
|
||||
|
||||
public int getCollationCode() {
|
||||
return collation;
|
||||
}
|
||||
|
||||
public long getLength() {
|
||||
return length;
|
||||
}
|
||||
|
||||
public int getDecimal() {
|
||||
return decimal;
|
||||
}
|
||||
|
||||
public int getFlag() {
|
||||
return flag;
|
||||
}
|
||||
|
||||
public List<String> getElems() {
|
||||
return this.elems;
|
||||
}
|
||||
|
||||
public int getTypeCode() {
|
||||
return tp.getTypeCode();
|
||||
}
|
||||
|
||||
public MySQLType getType() {
|
||||
return tp;
|
||||
}
|
||||
|
||||
public String getCharset() {
|
||||
return charset;
|
||||
}
|
||||
|
||||
public boolean isPrimaryKey() {
|
||||
return (flag & PriKeyFlag) > 0;
|
||||
}
|
||||
|
||||
public boolean isNotNull() {
|
||||
return (flag & NotNullFlag) > 0;
|
||||
}
|
||||
|
||||
public boolean isNoDefault() {
|
||||
return (flag & NoDefaultValueFlag) > 0;
|
||||
}
|
||||
|
||||
public boolean isAutoIncrement() {
|
||||
return (flag & AutoIncrementFlag) > 0;
|
||||
}
|
||||
|
||||
public boolean isZeroFill() {
|
||||
return (flag & ZerofillFlag) > 0;
|
||||
}
|
||||
|
||||
public boolean isBinary() {
|
||||
return (flag & BinaryFlag) > 0;
|
||||
}
|
||||
|
||||
public boolean isUniqueKey() {
|
||||
return (flag & UniqueKeyFlag) > 0;
|
||||
}
|
||||
|
||||
public boolean isMultiKey() {
|
||||
return (flag & MultipleKeyFlag) > 0;
|
||||
}
|
||||
|
||||
public boolean isTimestamp() {
|
||||
return (flag & TimestampFlag) > 0;
|
||||
}
|
||||
|
||||
public boolean isOnUpdateNow() {
|
||||
return (flag & OnUpdateNowFlag) > 0;
|
||||
}
|
||||
|
||||
public boolean isBlob() {
|
||||
return (flag & BlobFlag) > 0;
|
||||
}
|
||||
|
||||
public boolean isEnum() {
|
||||
return (flag & EnumFlag) > 0;
|
||||
}
|
||||
|
||||
public boolean isSet() {
|
||||
return (flag & SetFlag) > 0;
|
||||
}
|
||||
|
||||
public boolean isNum() {
|
||||
return (flag & NumFlag) > 0;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return String.format("%s:%s", this.getClass().getSimpleName(), getType());
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object other) {
|
||||
if (other instanceof DataType) {
|
||||
DataType otherType = (DataType) other;
|
||||
// tp implies Class is the same
|
||||
// and that might not always hold
|
||||
// TODO: reconsider design here
|
||||
return tp == otherType.tp
|
||||
&& flag == otherType.flag
|
||||
&& decimal == otherType.decimal
|
||||
&& (charset != null && charset.equals(otherType.charset))
|
||||
&& collation == otherType.collation
|
||||
&& length == otherType.length
|
||||
&& elems.equals(otherType.elems);
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return (int)
|
||||
(31
|
||||
* (tp.getTypeCode() == 0 ? 1 : tp.getTypeCode())
|
||||
* (flag == 0 ? 1 : flag)
|
||||
* (decimal == 0 ? 1 : decimal)
|
||||
* (charset == null ? 1 : charset.hashCode())
|
||||
* (collation == 0 ? 1 : collation)
|
||||
* (length == 0 ? 1 : length)
|
||||
* (elems.hashCode()));
|
||||
}
|
||||
|
||||
public InternalTypeHolder toTypeHolder() {
|
||||
return new InternalTypeHolder(
|
||||
getTypeCode(),
|
||||
flag,
|
||||
length,
|
||||
decimal,
|
||||
charset,
|
||||
"",
|
||||
"",
|
||||
Collation.translate(collation),
|
||||
elems);
|
||||
}
|
||||
}
|
||||
|
|
@ -1,107 +0,0 @@
|
|||
/*
|
||||
*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*
|
||||
*/
|
||||
|
||||
package org.tikv.types;
|
||||
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import java.lang.reflect.Constructor;
|
||||
import java.util.Arrays;
|
||||
import java.util.Map;
|
||||
import org.tikv.exception.TypeException;
|
||||
import org.tikv.meta.TiColumnInfo.InternalTypeHolder;
|
||||
|
||||
public class DataTypeFactory {
|
||||
private static final Map<MySQLType, Constructor<? extends DataType>> dataTypeCreatorMap;
|
||||
private static final Map<MySQLType, DataType> dataTypeInstanceMap;
|
||||
|
||||
static {
|
||||
ImmutableMap.Builder<MySQLType, Constructor<? extends DataType>> builder =
|
||||
ImmutableMap.builder();
|
||||
ImmutableMap.Builder<MySQLType, DataType> instBuilder = ImmutableMap.builder();
|
||||
extractTypeMap(BitType.subTypes, BitType.class, builder, instBuilder);
|
||||
extractTypeMap(StringType.subTypes, StringType.class, builder, instBuilder);
|
||||
extractTypeMap(DateTimeType.subTypes, DateTimeType.class, builder, instBuilder);
|
||||
extractTypeMap(DateType.subTypes, DateType.class, builder, instBuilder);
|
||||
extractTypeMap(DecimalType.subTypes, DecimalType.class, builder, instBuilder);
|
||||
extractTypeMap(IntegerType.subTypes, IntegerType.class, builder, instBuilder);
|
||||
extractTypeMap(BytesType.subTypes, BytesType.class, builder, instBuilder);
|
||||
extractTypeMap(RealType.subTypes, RealType.class, builder, instBuilder);
|
||||
extractTypeMap(TimestampType.subTypes, TimestampType.class, builder, instBuilder);
|
||||
extractTypeMap(EnumType.subTypes, EnumType.class, builder, instBuilder);
|
||||
extractTypeMap(SetType.subTypes, SetType.class, builder, instBuilder);
|
||||
extractTypeMap(YearType.subTypes, YearType.class, builder, instBuilder);
|
||||
extractTypeMap(JsonType.subTypes, JsonType.class, builder, instBuilder);
|
||||
dataTypeCreatorMap = builder.build();
|
||||
dataTypeInstanceMap = instBuilder.build();
|
||||
}
|
||||
|
||||
private static void extractTypeMap(
|
||||
MySQLType[] types,
|
||||
Class<? extends DataType> cls,
|
||||
ImmutableMap.Builder<MySQLType, Constructor<? extends DataType>> holderBuilder,
|
||||
ImmutableMap.Builder<MySQLType, DataType> instBuilder) {
|
||||
for (MySQLType type : types) {
|
||||
try {
|
||||
Constructor ctorByHolder = cls.getDeclaredConstructor(InternalTypeHolder.class);
|
||||
Constructor ctorByType = cls.getDeclaredConstructor(MySQLType.class);
|
||||
ctorByHolder.setAccessible(true);
|
||||
ctorByType.setAccessible(true);
|
||||
holderBuilder.put(type, ctorByHolder);
|
||||
instBuilder.put(type, (DataType) ctorByType.newInstance(type));
|
||||
} catch (Exception e) {
|
||||
throw new TypeException(
|
||||
String.format("Type %s does not have a proper constructor", cls.getName()), e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public static DataType of(MySQLType type) {
|
||||
DataType dataType = dataTypeInstanceMap.get(type);
|
||||
if (dataType == null) {
|
||||
throw new TypeException("Type not found for " + type);
|
||||
}
|
||||
return dataType;
|
||||
}
|
||||
|
||||
// Convert non-binary to string type
|
||||
private static MySQLType convertType(MySQLType type, InternalTypeHolder holder) {
|
||||
if (Arrays.asList(BytesType.subTypes).contains(type)
|
||||
&& !Charset.CharsetBin.equals(holder.getCharset())) {
|
||||
return MySQLType.TypeVarchar;
|
||||
}
|
||||
if (Arrays.asList(StringType.subTypes).contains(type)
|
||||
&& Charset.CharsetBin.equals(holder.getCharset())) {
|
||||
return MySQLType.TypeBlob;
|
||||
}
|
||||
return type;
|
||||
}
|
||||
|
||||
public static DataType of(InternalTypeHolder holder) {
|
||||
MySQLType type = MySQLType.fromTypeCode(holder.getTp());
|
||||
type = convertType(type, holder);
|
||||
Constructor<? extends DataType> ctor = dataTypeCreatorMap.get(type);
|
||||
if (ctor == null) {
|
||||
throw new NullPointerException(
|
||||
"tp " + holder.getTp() + " passed in can not retrieved DataType info.");
|
||||
}
|
||||
try {
|
||||
return ctor.newInstance(holder);
|
||||
} catch (Exception e) {
|
||||
throw new TypeException("Cannot create type from " + holder.getTp(), e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -1,66 +0,0 @@
|
|||
/*
|
||||
*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*
|
||||
*/
|
||||
|
||||
package org.tikv.types;
|
||||
|
||||
import java.sql.Timestamp;
|
||||
import org.joda.time.DateTime;
|
||||
import org.joda.time.DateTimeZone;
|
||||
import org.tikv.codec.CodecDataInput;
|
||||
import org.tikv.meta.TiColumnInfo;
|
||||
|
||||
/**
|
||||
* Datetime is a timezone neutral version of timestamp While most of decoding logic is the same it
|
||||
* interpret as local timezone to be able to compute with date/time data
|
||||
*/
|
||||
public class DateTimeType extends AbstractDateTimeType {
|
||||
public static final DateTimeType DATETIME = new DateTimeType(MySQLType.TypeDatetime);
|
||||
public static final MySQLType[] subTypes = new MySQLType[] {MySQLType.TypeDatetime};
|
||||
|
||||
private DateTimeType(MySQLType tp) {
|
||||
super(tp);
|
||||
}
|
||||
|
||||
DateTimeType(TiColumnInfo.InternalTypeHolder holder) {
|
||||
super(holder);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected DateTimeZone getTimezone() {
|
||||
return Converter.getLocalTimezone();
|
||||
}
|
||||
|
||||
/**
|
||||
* Decode timestamp from packed long value In TiDB / MySQL, timestamp type is converted to UTC and
|
||||
* stored
|
||||
*/
|
||||
@Override
|
||||
protected Timestamp decodeNotNull(int flag, CodecDataInput cdi) {
|
||||
DateTime dateTime = decodeDateTime(flag, cdi);
|
||||
// Even though null is filtered out but data like 0000-00-00 exists
|
||||
// according to MySQL JDBC behavior, it's converted to null
|
||||
if (dateTime == null) {
|
||||
return null;
|
||||
}
|
||||
return new Timestamp(dateTime.getMillis());
|
||||
}
|
||||
|
||||
@Override
|
||||
public DateTime getOriginDefaultValueNonNull(String value) {
|
||||
return Converter.convertToDateTime(value);
|
||||
}
|
||||
}
|
||||
|
|
@ -1,73 +0,0 @@
|
|||
/*
|
||||
*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*
|
||||
*/
|
||||
|
||||
package org.tikv.types;
|
||||
|
||||
import java.sql.Date;
|
||||
import org.joda.time.DateTimeZone;
|
||||
import org.joda.time.LocalDate;
|
||||
import org.tikv.codec.Codec;
|
||||
import org.tikv.codec.CodecDataInput;
|
||||
import org.tikv.codec.CodecDataOutput;
|
||||
import org.tikv.meta.TiColumnInfo;
|
||||
|
||||
public class DateType extends AbstractDateTimeType {
|
||||
public static final DateType DATE = new DateType(MySQLType.TypeDate);
|
||||
public static final MySQLType[] subTypes = new MySQLType[] {MySQLType.TypeDate};
|
||||
|
||||
private DateType(MySQLType tp) {
|
||||
super(tp);
|
||||
}
|
||||
|
||||
DateType(TiColumnInfo.InternalTypeHolder holder) {
|
||||
super(holder);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected DateTimeZone getTimezone() {
|
||||
return Converter.getLocalTimezone();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Date getOriginDefaultValueNonNull(String value) {
|
||||
return Converter.convertToDate(value);
|
||||
}
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
protected void encodeKey(CodecDataOutput cdo, Object value) {
|
||||
Date dt = Converter.convertToDate(value);
|
||||
Codec.DateCodec.writeDateFully(cdo, dt, getTimezone());
|
||||
}
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
protected void encodeProto(CodecDataOutput cdo, Object value) {
|
||||
Date dt = Converter.convertToDate(value);
|
||||
Codec.DateCodec.writeDateProto(cdo, dt, getTimezone());
|
||||
}
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
protected Date decodeNotNull(int flag, CodecDataInput cdi) {
|
||||
LocalDate date = decodeDate(flag, cdi);
|
||||
if (date == null) {
|
||||
return null;
|
||||
}
|
||||
return new Date(date.toDate().getTime());
|
||||
}
|
||||
}
|
||||
|
|
@ -1,78 +0,0 @@
|
|||
/*
|
||||
*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*
|
||||
*/
|
||||
|
||||
package org.tikv.types;
|
||||
|
||||
import com.pingcap.tidb.tipb.ExprType;
|
||||
import java.math.BigDecimal;
|
||||
import org.tikv.codec.Codec;
|
||||
import org.tikv.codec.Codec.DecimalCodec;
|
||||
import org.tikv.codec.CodecDataInput;
|
||||
import org.tikv.codec.CodecDataOutput;
|
||||
import org.tikv.exception.InvalidCodecFormatException;
|
||||
import org.tikv.meta.TiColumnInfo;
|
||||
|
||||
public class DecimalType extends DataType {
|
||||
public static final DecimalType DECIMAL = new DecimalType(MySQLType.TypeNewDecimal);
|
||||
public static final MySQLType[] subTypes = new MySQLType[] {MySQLType.TypeNewDecimal};
|
||||
|
||||
private DecimalType(MySQLType tp) {
|
||||
super(tp);
|
||||
}
|
||||
|
||||
DecimalType(TiColumnInfo.InternalTypeHolder holder) {
|
||||
super(holder);
|
||||
}
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
protected Object decodeNotNull(int flag, CodecDataInput cdi) {
|
||||
if (flag != Codec.DECIMAL_FLAG) {
|
||||
throw new InvalidCodecFormatException("Invalid Flag type for decimal type: " + flag);
|
||||
}
|
||||
return DecimalCodec.readDecimal(cdi);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void encodeKey(CodecDataOutput cdo, Object value) {
|
||||
BigDecimal val = Converter.convertToBigDecimal(value);
|
||||
DecimalCodec.writeDecimalFully(cdo, val);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void encodeValue(CodecDataOutput cdo, Object value) {
|
||||
BigDecimal val = Converter.convertToBigDecimal(value);
|
||||
DecimalCodec.writeDecimalFully(cdo, val);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void encodeProto(CodecDataOutput cdo, Object value) {
|
||||
BigDecimal val = Converter.convertToBigDecimal(value);
|
||||
DecimalCodec.writeDecimal(cdo, val);
|
||||
}
|
||||
|
||||
@Override
|
||||
public ExprType getProtoExprType() {
|
||||
return ExprType.MysqlDecimal;
|
||||
}
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
public Object getOriginDefaultValueNonNull(String value) {
|
||||
return new BigDecimal(value);
|
||||
}
|
||||
}
|
||||
|
|
@ -1,75 +0,0 @@
|
|||
/*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.tikv.types;
|
||||
|
||||
import com.pingcap.tidb.tipb.ExprType;
|
||||
import org.tikv.codec.CodecDataInput;
|
||||
import org.tikv.codec.CodecDataOutput;
|
||||
import org.tikv.exception.UnsupportedTypeException;
|
||||
import org.tikv.meta.TiColumnInfo;
|
||||
|
||||
/**
|
||||
* TODO: Support Enum Type EnumType class is set now only to indicate this type exists, so that we
|
||||
* could throw UnsupportedTypeException when encountered. Its logic is not yet implemented.
|
||||
*/
|
||||
public class EnumType extends DataType {
|
||||
public static final EnumType ENUM = new EnumType(MySQLType.TypeEnum);
|
||||
|
||||
public static final MySQLType[] subTypes = new MySQLType[] {MySQLType.TypeEnum};
|
||||
|
||||
private EnumType(MySQLType tp) {
|
||||
super(tp);
|
||||
}
|
||||
|
||||
protected EnumType(TiColumnInfo.InternalTypeHolder holder) {
|
||||
super(holder);
|
||||
}
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
protected Object decodeNotNull(int flag, CodecDataInput cdi) {
|
||||
throw new UnsupportedTypeException("Enum type not supported");
|
||||
}
|
||||
|
||||
/** {@inheritDoc} Enum is encoded as unsigned int64 with its 0-based value. */
|
||||
@Override
|
||||
protected void encodeKey(CodecDataOutput cdo, Object value) {
|
||||
throw new UnsupportedTypeException("Enum type not supported");
|
||||
}
|
||||
|
||||
/** {@inheritDoc} Enum is encoded as unsigned int64 with its 0-based value. */
|
||||
@Override
|
||||
protected void encodeValue(CodecDataOutput cdo, Object value) {
|
||||
throw new UnsupportedTypeException("Enum type not supported");
|
||||
}
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
protected void encodeProto(CodecDataOutput cdo, Object value) {
|
||||
throw new UnsupportedTypeException("Enum type not supported");
|
||||
}
|
||||
|
||||
@Override
|
||||
public ExprType getProtoExprType() {
|
||||
return ExprType.MysqlEnum;
|
||||
}
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
public Object getOriginDefaultValueNonNull(String value) {
|
||||
return value;
|
||||
}
|
||||
}
|
||||
|
|
@ -1,143 +0,0 @@
|
|||
/*
|
||||
*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*
|
||||
*/
|
||||
|
||||
package org.tikv.types;
|
||||
|
||||
import com.google.common.primitives.UnsignedLong;
|
||||
import com.pingcap.tidb.tipb.ExprType;
|
||||
import java.math.BigDecimal;
|
||||
import org.tikv.codec.Codec;
|
||||
import org.tikv.codec.Codec.IntegerCodec;
|
||||
import org.tikv.codec.CodecDataInput;
|
||||
import org.tikv.codec.CodecDataOutput;
|
||||
import org.tikv.exception.TypeException;
|
||||
import org.tikv.meta.Collation;
|
||||
import org.tikv.meta.TiColumnInfo;
|
||||
|
||||
public class IntegerType extends DataType {
|
||||
public static final IntegerType TINYINT = new IntegerType(MySQLType.TypeTiny);
|
||||
public static final IntegerType SMALLINT = new IntegerType(MySQLType.TypeShort);
|
||||
public static final IntegerType MEDIUMINT = new IntegerType(MySQLType.TypeInt24);
|
||||
public static final IntegerType INT = new IntegerType(MySQLType.TypeLong);
|
||||
public static final IntegerType BIGINT = new IntegerType(MySQLType.TypeLonglong);
|
||||
public static final IntegerType BOOLEAN = TINYINT;
|
||||
|
||||
public static final IntegerType ROW_ID_TYPE =
|
||||
new IntegerType(MySQLType.TypeLonglong, PriKeyFlag, 20, 0);
|
||||
|
||||
public static final MySQLType[] subTypes =
|
||||
new MySQLType[] {
|
||||
MySQLType.TypeTiny,
|
||||
MySQLType.TypeShort,
|
||||
MySQLType.TypeInt24,
|
||||
MySQLType.TypeLong,
|
||||
MySQLType.TypeLonglong
|
||||
};
|
||||
|
||||
protected IntegerType(MySQLType type, int flag, int len, int decimal) {
|
||||
super(type, flag, len, decimal, "", Collation.DEF_COLLATION_CODE);
|
||||
}
|
||||
|
||||
protected IntegerType(MySQLType tp) {
|
||||
super(tp);
|
||||
}
|
||||
|
||||
private static BigDecimal unsignedValueOf(long x) {
|
||||
return new BigDecimal(UnsignedLong.fromLongBits(x).bigIntegerValue());
|
||||
}
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
protected Object decodeNotNull(int flag, CodecDataInput cdi) {
|
||||
long ret;
|
||||
switch (flag) {
|
||||
case Codec.UVARINT_FLAG:
|
||||
ret = IntegerCodec.readUVarLong(cdi);
|
||||
break;
|
||||
case Codec.UINT_FLAG:
|
||||
ret = IntegerCodec.readULong(cdi);
|
||||
break;
|
||||
case Codec.VARINT_FLAG:
|
||||
ret = IntegerCodec.readVarLong(cdi);
|
||||
break;
|
||||
case Codec.INT_FLAG:
|
||||
ret = IntegerCodec.readLong(cdi);
|
||||
break;
|
||||
default:
|
||||
throw new TypeException("Invalid IntegerType flag: " + flag);
|
||||
}
|
||||
if (isUnsignedLong()) {
|
||||
return unsignedValueOf(ret);
|
||||
}
|
||||
return ret;
|
||||
}
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
protected void encodeKey(CodecDataOutput cdo, Object value) {
|
||||
long longVal = Converter.convertToLong(value);
|
||||
if (isUnsigned()) {
|
||||
IntegerCodec.writeULongFully(cdo, longVal, true);
|
||||
} else {
|
||||
IntegerCodec.writeLongFully(cdo, longVal, true);
|
||||
}
|
||||
}
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
protected void encodeValue(CodecDataOutput cdo, Object value) {
|
||||
long longVal = Converter.convertToLong(value);
|
||||
if (isUnsigned()) {
|
||||
IntegerCodec.writeULongFully(cdo, longVal, false);
|
||||
} else {
|
||||
IntegerCodec.writeLongFully(cdo, longVal, false);
|
||||
}
|
||||
}
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
protected void encodeProto(CodecDataOutput cdo, Object value) {
|
||||
long longVal = Converter.convertToLong(value);
|
||||
if (isUnsigned()) {
|
||||
IntegerCodec.writeULong(cdo, longVal);
|
||||
} else {
|
||||
IntegerCodec.writeLong(cdo, longVal);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public ExprType getProtoExprType() {
|
||||
return isUnsigned() ? ExprType.Uint64 : ExprType.Int64;
|
||||
}
|
||||
|
||||
public boolean isUnsignedLong() {
|
||||
return tp == MySQLType.TypeLonglong && isUnsigned();
|
||||
}
|
||||
|
||||
public boolean isUnsigned() {
|
||||
return (flag & UnsignedFlag) > 0;
|
||||
}
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
public Object getOriginDefaultValueNonNull(String value) {
|
||||
return Integer.parseInt(value);
|
||||
}
|
||||
|
||||
protected IntegerType(TiColumnInfo.InternalTypeHolder holder) {
|
||||
super(holder);
|
||||
}
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue