mirror of https://github.com/tikv/client-java.git
Reform java client to combine with TiSpark's java client (#91)
This commit is contained in:
parent
26121ee224
commit
df03a622e0
|
|
@ -0,0 +1,21 @@
|
|||
# TiSpark Dev Tools Guide
|
||||
|
||||
## Formatting
|
||||
|
||||
### Java Format
|
||||
|
||||
TiKV Java Client formats its code using [Google-Java-Format Maven Plugin](https://github.com/coveooss/fmt-maven-plugin) which follows Google's code styleguide. It is also checked on CI before build.
|
||||
|
||||
1. In Intellij IDEA
|
||||
|
||||
1. you should download the [Google-Java-format Plugin](https://plugins.jetbrains.com/plugin/8527-google-java-format) via marketplace. Restart IDE, and enable google-java-format by checking the box in `Other Settings`.
|
||||
|
||||
2. you may also use [Java-Google-style xml file](./intellij-java-google-style.xml) and export the schema to Intellij:
|
||||
|
||||
`Preferences`->`Editor`->`Code Style`->`Import Scheme`->`Intellij IDEA Code Style XML`.
|
||||
|
||||
2. You may also run [Java format script](./javafmt) before you commit & push to corresponding dev branch.
|
||||
|
||||
```shell script
|
||||
./dev/javafmt
|
||||
```
|
||||
|
|
@ -0,0 +1,598 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<code_scheme name="GoogleStyle">
|
||||
<option name="OTHER_INDENT_OPTIONS">
|
||||
<value>
|
||||
<option name="INDENT_SIZE" value="2" />
|
||||
<option name="CONTINUATION_INDENT_SIZE" value="4" />
|
||||
<option name="TAB_SIZE" value="2" />
|
||||
<option name="USE_TAB_CHARACTER" value="false" />
|
||||
<option name="SMART_TABS" value="false" />
|
||||
<option name="LABEL_INDENT_SIZE" value="0" />
|
||||
<option name="LABEL_INDENT_ABSOLUTE" value="false" />
|
||||
<option name="USE_RELATIVE_INDENTS" value="false" />
|
||||
</value>
|
||||
</option>
|
||||
<option name="INSERT_INNER_CLASS_IMPORTS" value="true" />
|
||||
<option name="CLASS_COUNT_TO_USE_IMPORT_ON_DEMAND" value="999" />
|
||||
<option name="NAMES_COUNT_TO_USE_IMPORT_ON_DEMAND" value="999" />
|
||||
<option name="PACKAGES_TO_USE_IMPORT_ON_DEMAND">
|
||||
<value />
|
||||
</option>
|
||||
<option name="IMPORT_LAYOUT_TABLE">
|
||||
<value>
|
||||
<package name="" withSubpackages="true" static="true" />
|
||||
<emptyLine />
|
||||
<package name="" withSubpackages="true" static="false" />
|
||||
</value>
|
||||
</option>
|
||||
<option name="RIGHT_MARGIN" value="100" />
|
||||
<option name="JD_ALIGN_PARAM_COMMENTS" value="false" />
|
||||
<option name="JD_ALIGN_EXCEPTION_COMMENTS" value="false" />
|
||||
<option name="JD_P_AT_EMPTY_LINES" value="false" />
|
||||
<option name="JD_KEEP_EMPTY_PARAMETER" value="false" />
|
||||
<option name="JD_KEEP_EMPTY_EXCEPTION" value="false" />
|
||||
<option name="JD_KEEP_EMPTY_RETURN" value="false" />
|
||||
<option name="KEEP_CONTROL_STATEMENT_IN_ONE_LINE" value="false" />
|
||||
<option name="KEEP_BLANK_LINES_BEFORE_RBRACE" value="0" />
|
||||
<option name="KEEP_BLANK_LINES_IN_CODE" value="1" />
|
||||
<option name="BLANK_LINES_AFTER_CLASS_HEADER" value="0" />
|
||||
<option name="ALIGN_MULTILINE_PARAMETERS" value="false" />
|
||||
<option name="ALIGN_MULTILINE_FOR" value="false" />
|
||||
<option name="CALL_PARAMETERS_WRAP" value="1" />
|
||||
<option name="METHOD_PARAMETERS_WRAP" value="1" />
|
||||
<option name="EXTENDS_LIST_WRAP" value="1" />
|
||||
<option name="THROWS_KEYWORD_WRAP" value="1" />
|
||||
<option name="METHOD_CALL_CHAIN_WRAP" value="1" />
|
||||
<option name="BINARY_OPERATION_WRAP" value="1" />
|
||||
<option name="BINARY_OPERATION_SIGN_ON_NEXT_LINE" value="true" />
|
||||
<option name="TERNARY_OPERATION_WRAP" value="1" />
|
||||
<option name="TERNARY_OPERATION_SIGNS_ON_NEXT_LINE" value="true" />
|
||||
<option name="FOR_STATEMENT_WRAP" value="1" />
|
||||
<option name="ARRAY_INITIALIZER_WRAP" value="1" />
|
||||
<option name="WRAP_COMMENTS" value="true" />
|
||||
<option name="IF_BRACE_FORCE" value="3" />
|
||||
<option name="DOWHILE_BRACE_FORCE" value="3" />
|
||||
<option name="WHILE_BRACE_FORCE" value="3" />
|
||||
<option name="FOR_BRACE_FORCE" value="3" />
|
||||
<option name="SPACE_BEFORE_ARRAY_INITIALIZER_LBRACE" value="true" />
|
||||
<AndroidXmlCodeStyleSettings>
|
||||
<option name="USE_CUSTOM_SETTINGS" value="true" />
|
||||
<option name="LAYOUT_SETTINGS">
|
||||
<value>
|
||||
<option name="INSERT_BLANK_LINE_BEFORE_TAG" value="false" />
|
||||
</value>
|
||||
</option>
|
||||
</AndroidXmlCodeStyleSettings>
|
||||
<JSCodeStyleSettings>
|
||||
<option name="INDENT_CHAINED_CALLS" value="false" />
|
||||
</JSCodeStyleSettings>
|
||||
<Python>
|
||||
<option name="USE_CONTINUATION_INDENT_FOR_ARGUMENTS" value="true" />
|
||||
</Python>
|
||||
<TypeScriptCodeStyleSettings>
|
||||
<option name="INDENT_CHAINED_CALLS" value="false" />
|
||||
</TypeScriptCodeStyleSettings>
|
||||
<XML>
|
||||
<option name="XML_ALIGN_ATTRIBUTES" value="false" />
|
||||
<option name="XML_LEGACY_SETTINGS_IMPORTED" value="true" />
|
||||
</XML>
|
||||
<codeStyleSettings language="CSS">
|
||||
<indentOptions>
|
||||
<option name="INDENT_SIZE" value="2" />
|
||||
<option name="CONTINUATION_INDENT_SIZE" value="4" />
|
||||
<option name="TAB_SIZE" value="2" />
|
||||
</indentOptions>
|
||||
</codeStyleSettings>
|
||||
<codeStyleSettings language="ECMA Script Level 4">
|
||||
<option name="KEEP_BLANK_LINES_IN_CODE" value="1" />
|
||||
<option name="ALIGN_MULTILINE_PARAMETERS" value="false" />
|
||||
<option name="ALIGN_MULTILINE_FOR" value="false" />
|
||||
<option name="CALL_PARAMETERS_WRAP" value="1" />
|
||||
<option name="METHOD_PARAMETERS_WRAP" value="1" />
|
||||
<option name="EXTENDS_LIST_WRAP" value="1" />
|
||||
<option name="BINARY_OPERATION_WRAP" value="1" />
|
||||
<option name="BINARY_OPERATION_SIGN_ON_NEXT_LINE" value="true" />
|
||||
<option name="TERNARY_OPERATION_WRAP" value="1" />
|
||||
<option name="TERNARY_OPERATION_SIGNS_ON_NEXT_LINE" value="true" />
|
||||
<option name="FOR_STATEMENT_WRAP" value="1" />
|
||||
<option name="ARRAY_INITIALIZER_WRAP" value="1" />
|
||||
<option name="IF_BRACE_FORCE" value="3" />
|
||||
<option name="DOWHILE_BRACE_FORCE" value="3" />
|
||||
<option name="WHILE_BRACE_FORCE" value="3" />
|
||||
<option name="FOR_BRACE_FORCE" value="3" />
|
||||
<option name="PARENT_SETTINGS_INSTALLED" value="true" />
|
||||
</codeStyleSettings>
|
||||
<codeStyleSettings language="HTML">
|
||||
<indentOptions>
|
||||
<option name="INDENT_SIZE" value="2" />
|
||||
<option name="CONTINUATION_INDENT_SIZE" value="4" />
|
||||
<option name="TAB_SIZE" value="2" />
|
||||
</indentOptions>
|
||||
</codeStyleSettings>
|
||||
<codeStyleSettings language="JAVA">
|
||||
<option name="KEEP_CONTROL_STATEMENT_IN_ONE_LINE" value="false" />
|
||||
<option name="KEEP_BLANK_LINES_IN_CODE" value="1" />
|
||||
<option name="BLANK_LINES_AFTER_CLASS_HEADER" value="1" />
|
||||
<option name="ALIGN_MULTILINE_PARAMETERS" value="false" />
|
||||
<option name="ALIGN_MULTILINE_RESOURCES" value="false" />
|
||||
<option name="ALIGN_MULTILINE_FOR" value="false" />
|
||||
<option name="CALL_PARAMETERS_WRAP" value="1" />
|
||||
<option name="METHOD_PARAMETERS_WRAP" value="1" />
|
||||
<option name="EXTENDS_LIST_WRAP" value="1" />
|
||||
<option name="THROWS_KEYWORD_WRAP" value="1" />
|
||||
<option name="METHOD_CALL_CHAIN_WRAP" value="1" />
|
||||
<option name="BINARY_OPERATION_WRAP" value="1" />
|
||||
<option name="BINARY_OPERATION_SIGN_ON_NEXT_LINE" value="true" />
|
||||
<option name="TERNARY_OPERATION_WRAP" value="1" />
|
||||
<option name="TERNARY_OPERATION_SIGNS_ON_NEXT_LINE" value="true" />
|
||||
<option name="FOR_STATEMENT_WRAP" value="1" />
|
||||
<option name="ARRAY_INITIALIZER_WRAP" value="1" />
|
||||
<option name="WRAP_COMMENTS" value="true" />
|
||||
<option name="IF_BRACE_FORCE" value="3" />
|
||||
<option name="DOWHILE_BRACE_FORCE" value="3" />
|
||||
<option name="WHILE_BRACE_FORCE" value="3" />
|
||||
<option name="FOR_BRACE_FORCE" value="3" />
|
||||
<option name="PARENT_SETTINGS_INSTALLED" value="true" />
|
||||
<indentOptions>
|
||||
<option name="INDENT_SIZE" value="2" />
|
||||
<option name="CONTINUATION_INDENT_SIZE" value="4" />
|
||||
<option name="TAB_SIZE" value="2" />
|
||||
</indentOptions>
|
||||
</codeStyleSettings>
|
||||
<codeStyleSettings language="JSON">
|
||||
<indentOptions>
|
||||
<option name="CONTINUATION_INDENT_SIZE" value="4" />
|
||||
<option name="TAB_SIZE" value="2" />
|
||||
</indentOptions>
|
||||
</codeStyleSettings>
|
||||
<codeStyleSettings language="JavaScript">
|
||||
<option name="RIGHT_MARGIN" value="80" />
|
||||
<option name="KEEP_BLANK_LINES_IN_CODE" value="1" />
|
||||
<option name="ALIGN_MULTILINE_PARAMETERS" value="false" />
|
||||
<option name="ALIGN_MULTILINE_FOR" value="false" />
|
||||
<option name="CALL_PARAMETERS_WRAP" value="1" />
|
||||
<option name="METHOD_PARAMETERS_WRAP" value="1" />
|
||||
<option name="BINARY_OPERATION_WRAP" value="1" />
|
||||
<option name="BINARY_OPERATION_SIGN_ON_NEXT_LINE" value="true" />
|
||||
<option name="TERNARY_OPERATION_WRAP" value="1" />
|
||||
<option name="TERNARY_OPERATION_SIGNS_ON_NEXT_LINE" value="true" />
|
||||
<option name="FOR_STATEMENT_WRAP" value="1" />
|
||||
<option name="ARRAY_INITIALIZER_WRAP" value="1" />
|
||||
<option name="IF_BRACE_FORCE" value="3" />
|
||||
<option name="DOWHILE_BRACE_FORCE" value="3" />
|
||||
<option name="WHILE_BRACE_FORCE" value="3" />
|
||||
<option name="FOR_BRACE_FORCE" value="3" />
|
||||
<option name="PARENT_SETTINGS_INSTALLED" value="true" />
|
||||
<indentOptions>
|
||||
<option name="INDENT_SIZE" value="2" />
|
||||
<option name="TAB_SIZE" value="2" />
|
||||
</indentOptions>
|
||||
</codeStyleSettings>
|
||||
<codeStyleSettings language="PROTO">
|
||||
<option name="RIGHT_MARGIN" value="80" />
|
||||
<indentOptions>
|
||||
<option name="INDENT_SIZE" value="2" />
|
||||
<option name="CONTINUATION_INDENT_SIZE" value="2" />
|
||||
<option name="TAB_SIZE" value="2" />
|
||||
</indentOptions>
|
||||
</codeStyleSettings>
|
||||
<codeStyleSettings language="protobuf">
|
||||
<option name="RIGHT_MARGIN" value="80" />
|
||||
<indentOptions>
|
||||
<option name="INDENT_SIZE" value="2" />
|
||||
<option name="CONTINUATION_INDENT_SIZE" value="2" />
|
||||
<option name="TAB_SIZE" value="2" />
|
||||
</indentOptions>
|
||||
</codeStyleSettings>
|
||||
<codeStyleSettings language="Python">
|
||||
<option name="KEEP_BLANK_LINES_IN_CODE" value="1" />
|
||||
<option name="RIGHT_MARGIN" value="80" />
|
||||
<option name="ALIGN_MULTILINE_PARAMETERS" value="false" />
|
||||
<option name="PARENT_SETTINGS_INSTALLED" value="true" />
|
||||
<indentOptions>
|
||||
<option name="INDENT_SIZE" value="2" />
|
||||
<option name="CONTINUATION_INDENT_SIZE" value="4" />
|
||||
<option name="TAB_SIZE" value="2" />
|
||||
</indentOptions>
|
||||
</codeStyleSettings>
|
||||
<codeStyleSettings language="SASS">
|
||||
<indentOptions>
|
||||
<option name="CONTINUATION_INDENT_SIZE" value="4" />
|
||||
<option name="TAB_SIZE" value="2" />
|
||||
</indentOptions>
|
||||
</codeStyleSettings>
|
||||
<codeStyleSettings language="SCSS">
|
||||
<indentOptions>
|
||||
<option name="CONTINUATION_INDENT_SIZE" value="4" />
|
||||
<option name="TAB_SIZE" value="2" />
|
||||
</indentOptions>
|
||||
</codeStyleSettings>
|
||||
<codeStyleSettings language="TypeScript">
|
||||
<indentOptions>
|
||||
<option name="INDENT_SIZE" value="2" />
|
||||
<option name="TAB_SIZE" value="2" />
|
||||
</indentOptions>
|
||||
</codeStyleSettings>
|
||||
<codeStyleSettings language="XML">
|
||||
<indentOptions>
|
||||
<option name="INDENT_SIZE" value="2" />
|
||||
<option name="CONTINUATION_INDENT_SIZE" value="2" />
|
||||
<option name="TAB_SIZE" value="2" />
|
||||
</indentOptions>
|
||||
<arrangement>
|
||||
<rules>
|
||||
<section>
|
||||
<rule>
|
||||
<match>
|
||||
<AND>
|
||||
<NAME>xmlns:android</NAME>
|
||||
<XML_ATTRIBUTE />
|
||||
<XML_NAMESPACE>^$</XML_NAMESPACE>
|
||||
</AND>
|
||||
</match>
|
||||
</rule>
|
||||
</section>
|
||||
<section>
|
||||
<rule>
|
||||
<match>
|
||||
<AND>
|
||||
<NAME>xmlns:.*</NAME>
|
||||
<XML_ATTRIBUTE />
|
||||
<XML_NAMESPACE>^$</XML_NAMESPACE>
|
||||
</AND>
|
||||
</match>
|
||||
<order>BY_NAME</order>
|
||||
</rule>
|
||||
</section>
|
||||
<section>
|
||||
<rule>
|
||||
<match>
|
||||
<AND>
|
||||
<NAME>.*:id</NAME>
|
||||
<XML_ATTRIBUTE />
|
||||
<XML_NAMESPACE>http://schemas.android.com/apk/res/android</XML_NAMESPACE>
|
||||
</AND>
|
||||
</match>
|
||||
</rule>
|
||||
</section>
|
||||
<section>
|
||||
<rule>
|
||||
<match>
|
||||
<AND>
|
||||
<NAME>style</NAME>
|
||||
<XML_ATTRIBUTE />
|
||||
<XML_NAMESPACE>^$</XML_NAMESPACE>
|
||||
</AND>
|
||||
</match>
|
||||
</rule>
|
||||
</section>
|
||||
<section>
|
||||
<rule>
|
||||
<match>
|
||||
<AND>
|
||||
<NAME>.*</NAME>
|
||||
<XML_ATTRIBUTE />
|
||||
<XML_NAMESPACE>^$</XML_NAMESPACE>
|
||||
</AND>
|
||||
</match>
|
||||
<order>BY_NAME</order>
|
||||
</rule>
|
||||
</section>
|
||||
<section>
|
||||
<rule>
|
||||
<match>
|
||||
<AND>
|
||||
<NAME>.*:.*Style</NAME>
|
||||
<XML_ATTRIBUTE />
|
||||
<XML_NAMESPACE>http://schemas.android.com/apk/res/android</XML_NAMESPACE>
|
||||
</AND>
|
||||
</match>
|
||||
<order>BY_NAME</order>
|
||||
</rule>
|
||||
</section>
|
||||
<section>
|
||||
<rule>
|
||||
<match>
|
||||
<AND>
|
||||
<NAME>.*:layout_width</NAME>
|
||||
<XML_ATTRIBUTE />
|
||||
<XML_NAMESPACE>http://schemas.android.com/apk/res/android</XML_NAMESPACE>
|
||||
</AND>
|
||||
</match>
|
||||
</rule>
|
||||
</section>
|
||||
<section>
|
||||
<rule>
|
||||
<match>
|
||||
<AND>
|
||||
<NAME>.*:layout_height</NAME>
|
||||
<XML_ATTRIBUTE />
|
||||
<XML_NAMESPACE>http://schemas.android.com/apk/res/android</XML_NAMESPACE>
|
||||
</AND>
|
||||
</match>
|
||||
</rule>
|
||||
</section>
|
||||
<section>
|
||||
<rule>
|
||||
<match>
|
||||
<AND>
|
||||
<NAME>.*:layout_weight</NAME>
|
||||
<XML_ATTRIBUTE />
|
||||
<XML_NAMESPACE>http://schemas.android.com/apk/res/android</XML_NAMESPACE>
|
||||
</AND>
|
||||
</match>
|
||||
</rule>
|
||||
</section>
|
||||
<section>
|
||||
<rule>
|
||||
<match>
|
||||
<AND>
|
||||
<NAME>.*:layout_margin</NAME>
|
||||
<XML_ATTRIBUTE />
|
||||
<XML_NAMESPACE>http://schemas.android.com/apk/res/android</XML_NAMESPACE>
|
||||
</AND>
|
||||
</match>
|
||||
</rule>
|
||||
</section>
|
||||
<section>
|
||||
<rule>
|
||||
<match>
|
||||
<AND>
|
||||
<NAME>.*:layout_marginTop</NAME>
|
||||
<XML_ATTRIBUTE />
|
||||
<XML_NAMESPACE>http://schemas.android.com/apk/res/android</XML_NAMESPACE>
|
||||
</AND>
|
||||
</match>
|
||||
</rule>
|
||||
</section>
|
||||
<section>
|
||||
<rule>
|
||||
<match>
|
||||
<AND>
|
||||
<NAME>.*:layout_marginBottom</NAME>
|
||||
<XML_ATTRIBUTE />
|
||||
<XML_NAMESPACE>http://schemas.android.com/apk/res/android</XML_NAMESPACE>
|
||||
</AND>
|
||||
</match>
|
||||
</rule>
|
||||
</section>
|
||||
<section>
|
||||
<rule>
|
||||
<match>
|
||||
<AND>
|
||||
<NAME>.*:layout_marginStart</NAME>
|
||||
<XML_ATTRIBUTE />
|
||||
<XML_NAMESPACE>http://schemas.android.com/apk/res/android</XML_NAMESPACE>
|
||||
</AND>
|
||||
</match>
|
||||
</rule>
|
||||
</section>
|
||||
<section>
|
||||
<rule>
|
||||
<match>
|
||||
<AND>
|
||||
<NAME>.*:layout_marginEnd</NAME>
|
||||
<XML_ATTRIBUTE />
|
||||
<XML_NAMESPACE>http://schemas.android.com/apk/res/android</XML_NAMESPACE>
|
||||
</AND>
|
||||
</match>
|
||||
</rule>
|
||||
</section>
|
||||
<section>
|
||||
<rule>
|
||||
<match>
|
||||
<AND>
|
||||
<NAME>.*:layout_marginLeft</NAME>
|
||||
<XML_ATTRIBUTE />
|
||||
<XML_NAMESPACE>http://schemas.android.com/apk/res/android</XML_NAMESPACE>
|
||||
</AND>
|
||||
</match>
|
||||
</rule>
|
||||
</section>
|
||||
<section>
|
||||
<rule>
|
||||
<match>
|
||||
<AND>
|
||||
<NAME>.*:layout_marginRight</NAME>
|
||||
<XML_ATTRIBUTE />
|
||||
<XML_NAMESPACE>http://schemas.android.com/apk/res/android</XML_NAMESPACE>
|
||||
</AND>
|
||||
</match>
|
||||
</rule>
|
||||
</section>
|
||||
<section>
|
||||
<rule>
|
||||
<match>
|
||||
<AND>
|
||||
<NAME>.*:layout_.*</NAME>
|
||||
<XML_ATTRIBUTE />
|
||||
<XML_NAMESPACE>http://schemas.android.com/apk/res/android</XML_NAMESPACE>
|
||||
</AND>
|
||||
</match>
|
||||
<order>BY_NAME</order>
|
||||
</rule>
|
||||
</section>
|
||||
<section>
|
||||
<rule>
|
||||
<match>
|
||||
<AND>
|
||||
<NAME>.*:padding</NAME>
|
||||
<XML_ATTRIBUTE />
|
||||
<XML_NAMESPACE>http://schemas.android.com/apk/res/android</XML_NAMESPACE>
|
||||
</AND>
|
||||
</match>
|
||||
</rule>
|
||||
</section>
|
||||
<section>
|
||||
<rule>
|
||||
<match>
|
||||
<AND>
|
||||
<NAME>.*:paddingTop</NAME>
|
||||
<XML_ATTRIBUTE />
|
||||
<XML_NAMESPACE>http://schemas.android.com/apk/res/android</XML_NAMESPACE>
|
||||
</AND>
|
||||
</match>
|
||||
</rule>
|
||||
</section>
|
||||
<section>
|
||||
<rule>
|
||||
<match>
|
||||
<AND>
|
||||
<NAME>.*:paddingBottom</NAME>
|
||||
<XML_ATTRIBUTE />
|
||||
<XML_NAMESPACE>http://schemas.android.com/apk/res/android</XML_NAMESPACE>
|
||||
</AND>
|
||||
</match>
|
||||
</rule>
|
||||
</section>
|
||||
<section>
|
||||
<rule>
|
||||
<match>
|
||||
<AND>
|
||||
<NAME>.*:paddingStart</NAME>
|
||||
<XML_ATTRIBUTE />
|
||||
<XML_NAMESPACE>http://schemas.android.com/apk/res/android</XML_NAMESPACE>
|
||||
</AND>
|
||||
</match>
|
||||
</rule>
|
||||
</section>
|
||||
<section>
|
||||
<rule>
|
||||
<match>
|
||||
<AND>
|
||||
<NAME>.*:paddingEnd</NAME>
|
||||
<XML_ATTRIBUTE />
|
||||
<XML_NAMESPACE>http://schemas.android.com/apk/res/android</XML_NAMESPACE>
|
||||
</AND>
|
||||
</match>
|
||||
</rule>
|
||||
</section>
|
||||
<section>
|
||||
<rule>
|
||||
<match>
|
||||
<AND>
|
||||
<NAME>.*:paddingLeft</NAME>
|
||||
<XML_ATTRIBUTE />
|
||||
<XML_NAMESPACE>http://schemas.android.com/apk/res/android</XML_NAMESPACE>
|
||||
</AND>
|
||||
</match>
|
||||
</rule>
|
||||
</section>
|
||||
<section>
|
||||
<rule>
|
||||
<match>
|
||||
<AND>
|
||||
<NAME>.*:paddingRight</NAME>
|
||||
<XML_ATTRIBUTE />
|
||||
<XML_NAMESPACE>http://schemas.android.com/apk/res/android</XML_NAMESPACE>
|
||||
</AND>
|
||||
</match>
|
||||
</rule>
|
||||
</section>
|
||||
<section>
|
||||
<rule>
|
||||
<match>
|
||||
<AND>
|
||||
<NAME>.*</NAME>
|
||||
<XML_NAMESPACE>http://schemas.android.com/apk/res/android</XML_NAMESPACE>
|
||||
</AND>
|
||||
</match>
|
||||
<order>BY_NAME</order>
|
||||
</rule>
|
||||
</section>
|
||||
<section>
|
||||
<rule>
|
||||
<match>
|
||||
<AND>
|
||||
<NAME>.*</NAME>
|
||||
<XML_NAMESPACE>http://schemas.android.com/apk/res-auto</XML_NAMESPACE>
|
||||
</AND>
|
||||
</match>
|
||||
<order>BY_NAME</order>
|
||||
</rule>
|
||||
</section>
|
||||
<section>
|
||||
<rule>
|
||||
<match>
|
||||
<AND>
|
||||
<NAME>.*</NAME>
|
||||
<XML_NAMESPACE>http://schemas.android.com/tools</XML_NAMESPACE>
|
||||
</AND>
|
||||
</match>
|
||||
<order>BY_NAME</order>
|
||||
</rule>
|
||||
</section>
|
||||
<section>
|
||||
<rule>
|
||||
<match>
|
||||
<AND>
|
||||
<NAME>.*</NAME>
|
||||
<XML_NAMESPACE>.*</XML_NAMESPACE>
|
||||
</AND>
|
||||
</match>
|
||||
<order>BY_NAME</order>
|
||||
</rule>
|
||||
</section>
|
||||
</rules>
|
||||
</arrangement>
|
||||
</codeStyleSettings>
|
||||
<Objective-C>
|
||||
<option name="INDENT_NAMESPACE_MEMBERS" value="0" />
|
||||
<option name="INDENT_C_STRUCT_MEMBERS" value="2" />
|
||||
<option name="INDENT_CLASS_MEMBERS" value="2" />
|
||||
<option name="INDENT_VISIBILITY_KEYWORDS" value="1" />
|
||||
<option name="INDENT_INSIDE_CODE_BLOCK" value="2" />
|
||||
<option name="KEEP_STRUCTURES_IN_ONE_LINE" value="true" />
|
||||
<option name="FUNCTION_PARAMETERS_WRAP" value="5" />
|
||||
<option name="FUNCTION_CALL_ARGUMENTS_WRAP" value="5" />
|
||||
<option name="TEMPLATE_CALL_ARGUMENTS_WRAP" value="5" />
|
||||
<option name="TEMPLATE_CALL_ARGUMENTS_ALIGN_MULTILINE" value="true" />
|
||||
<option name="ALIGN_INIT_LIST_IN_COLUMNS" value="false" />
|
||||
<option name="SPACE_BEFORE_SUPERCLASS_COLON" value="false" />
|
||||
</Objective-C>
|
||||
<Objective-C-extensions>
|
||||
<option name="GENERATE_INSTANCE_VARIABLES_FOR_PROPERTIES" value="ASK" />
|
||||
<option name="RELEASE_STYLE" value="IVAR" />
|
||||
<option name="TYPE_QUALIFIERS_PLACEMENT" value="BEFORE" />
|
||||
<file>
|
||||
<option name="com.jetbrains.cidr.lang.util.OCDeclarationKind" value="Import" />
|
||||
<option name="com.jetbrains.cidr.lang.util.OCDeclarationKind" value="Macro" />
|
||||
<option name="com.jetbrains.cidr.lang.util.OCDeclarationKind" value="Typedef" />
|
||||
<option name="com.jetbrains.cidr.lang.util.OCDeclarationKind" value="Enum" />
|
||||
<option name="com.jetbrains.cidr.lang.util.OCDeclarationKind" value="Constant" />
|
||||
<option name="com.jetbrains.cidr.lang.util.OCDeclarationKind" value="Global" />
|
||||
<option name="com.jetbrains.cidr.lang.util.OCDeclarationKind" value="Struct" />
|
||||
<option name="com.jetbrains.cidr.lang.util.OCDeclarationKind" value="FunctionPredecl" />
|
||||
<option name="com.jetbrains.cidr.lang.util.OCDeclarationKind" value="Function" />
|
||||
</file>
|
||||
<class>
|
||||
<option name="com.jetbrains.cidr.lang.util.OCDeclarationKind" value="Property" />
|
||||
<option name="com.jetbrains.cidr.lang.util.OCDeclarationKind" value="Synthesize" />
|
||||
<option name="com.jetbrains.cidr.lang.util.OCDeclarationKind" value="InitMethod" />
|
||||
<option name="com.jetbrains.cidr.lang.util.OCDeclarationKind" value="StaticMethod" />
|
||||
<option name="com.jetbrains.cidr.lang.util.OCDeclarationKind" value="InstanceMethod" />
|
||||
<option name="com.jetbrains.cidr.lang.util.OCDeclarationKind" value="DeallocMethod" />
|
||||
</class>
|
||||
<extensions>
|
||||
<pair source="cc" header="h" />
|
||||
<pair source="c" header="h" />
|
||||
</extensions>
|
||||
</Objective-C-extensions>
|
||||
<codeStyleSettings language="ObjectiveC">
|
||||
<option name="RIGHT_MARGIN" value="80" />
|
||||
<option name="KEEP_BLANK_LINES_BEFORE_RBRACE" value="1" />
|
||||
<option name="BLANK_LINES_BEFORE_IMPORTS" value="0" />
|
||||
<option name="BLANK_LINES_AFTER_IMPORTS" value="0" />
|
||||
<option name="BLANK_LINES_AROUND_CLASS" value="0" />
|
||||
<option name="BLANK_LINES_AROUND_METHOD" value="0" />
|
||||
<option name="BLANK_LINES_AROUND_METHOD_IN_INTERFACE" value="0" />
|
||||
<option name="ALIGN_MULTILINE_BINARY_OPERATION" value="false" />
|
||||
<option name="BINARY_OPERATION_SIGN_ON_NEXT_LINE" value="true" />
|
||||
<option name="FOR_STATEMENT_WRAP" value="1" />
|
||||
<option name="ASSIGNMENT_WRAP" value="1" />
|
||||
<indentOptions>
|
||||
<option name="INDENT_SIZE" value="2" />
|
||||
<option name="CONTINUATION_INDENT_SIZE" value="4" />
|
||||
</indentOptions>
|
||||
</codeStyleSettings>
|
||||
</code_scheme>
|
||||
|
|
@ -0,0 +1,5 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
echo $MAVEN_HOME
|
||||
|
||||
mvn com.coveo:fmt-maven-plugin:format
|
||||
69
pom.xml
69
pom.xml
|
|
@ -5,7 +5,7 @@
|
|||
|
||||
<groupId>org.tikv</groupId>
|
||||
<artifactId>tikv-client-java</artifactId>
|
||||
<version>2.0-SNAPSHOT</version>
|
||||
<version>3.0.0</version>
|
||||
<packaging>jar</packaging>
|
||||
<name>TiSpark Project TiKV Java Client</name>
|
||||
|
||||
|
|
@ -19,6 +19,7 @@
|
|||
<powermock.version>1.6.6</powermock.version>
|
||||
<jackson.version>2.10.0</jackson.version>
|
||||
<trove4j.version>3.0.1</trove4j.version>
|
||||
<jetcd.version>0.4.1</jetcd.version>
|
||||
<joda-time.version>2.9.9</joda-time.version>
|
||||
<joda-convert.version>1.9.2</joda-convert.version>
|
||||
<proto.folder>${basedir}/proto</proto.folder>
|
||||
|
|
@ -27,6 +28,11 @@
|
|||
</properties>
|
||||
|
||||
<dependencies>
|
||||
<dependency>
|
||||
<groupId>org.antlr</groupId>
|
||||
<artifactId>antlr4-runtime</artifactId>
|
||||
<version>4.7.1</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.slf4j</groupId>
|
||||
<artifactId>slf4j-api</artifactId>
|
||||
|
|
@ -51,6 +57,11 @@
|
|||
<version>${slf4j.version}</version>
|
||||
<scope>provided</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.sangupta</groupId>
|
||||
<artifactId>murmur</artifactId>
|
||||
<version>1.0.0</version>
|
||||
</dependency>
|
||||
<!-- grpc dependencies -->
|
||||
<dependency>
|
||||
<groupId>io.grpc</groupId>
|
||||
|
|
@ -73,6 +84,35 @@
|
|||
<version>${grpc.version}</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.fasterxml.jackson.core</groupId>
|
||||
<artifactId>jackson-annotations</artifactId>
|
||||
<version>${jackson.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.fasterxml.jackson.core</groupId>
|
||||
<artifactId>jackson-databind</artifactId>
|
||||
<version>${jackson.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>io.etcd</groupId>
|
||||
<artifactId>jetcd-core</artifactId>
|
||||
<exclusions>
|
||||
<exclusion>
|
||||
<groupId>io.etcd</groupId>
|
||||
<artifactId>jetcd-resolver</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>io.etcd</groupId>
|
||||
<artifactId>jetcd-common</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>io.grpc</groupId>
|
||||
<artifactId>grpc-grpclb</artifactId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
<version>${jetcd.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>joda-time</groupId>
|
||||
<artifactId>joda-time</artifactId>
|
||||
|
|
@ -95,6 +135,12 @@
|
|||
<version>3.9</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.commons</groupId>
|
||||
<artifactId>commons-lang3</artifactId>
|
||||
<version>3.9</version>
|
||||
<scope>compile</scope>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
<build>
|
||||
<resources>
|
||||
|
|
@ -110,6 +156,27 @@
|
|||
</extension>
|
||||
</extensions>
|
||||
<plugins>
|
||||
<plugin>
|
||||
<groupId>org.antlr</groupId>
|
||||
<artifactId>antlr4-maven-plugin</artifactId>
|
||||
<version>4.7.1</version>
|
||||
<executions>
|
||||
<execution>
|
||||
<goals>
|
||||
<goal>antlr4</goal>
|
||||
</goals>
|
||||
</execution>
|
||||
</executions>
|
||||
<configuration>
|
||||
<arguments>
|
||||
<argument>-package</argument>
|
||||
<argument>org.tikv.common.parser</argument>
|
||||
</arguments>
|
||||
<visitor>true</visitor>
|
||||
<sourceDirectory>./src/main/java/org/tikv/common/parser</sourceDirectory>
|
||||
<outputDirectory>./target/generated-sources/antlr4/java/org/tikv/common/parser</outputDirectory>
|
||||
</configuration>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>org.codehaus.mojo</groupId>
|
||||
<artifactId>exec-maven-plugin</artifactId>
|
||||
|
|
|
|||
|
|
@ -0,0 +1,34 @@
|
|||
/*
|
||||
* Copyright 2020 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.tikv.common;
|
||||
|
||||
public class BytePairWrapper {
|
||||
private final byte[] key;
|
||||
private final byte[] value;
|
||||
|
||||
public BytePairWrapper(byte[] key, byte[] value) {
|
||||
this.key = key;
|
||||
this.value = value;
|
||||
}
|
||||
|
||||
public byte[] getKey() {
|
||||
return key;
|
||||
}
|
||||
|
||||
public byte[] getValue() {
|
||||
return value;
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,49 @@
|
|||
/*
|
||||
* Copyright 2020 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.tikv.common;
|
||||
|
||||
import java.util.Arrays;
|
||||
|
||||
public class ByteWrapper {
|
||||
private final byte[] bytes;
|
||||
|
||||
public ByteWrapper(byte[] bytes) {
|
||||
this.bytes = bytes;
|
||||
}
|
||||
|
||||
public byte[] getBytes() {
|
||||
return this.bytes;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) {
|
||||
return true;
|
||||
}
|
||||
if (o == null || getClass() != o.getClass()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
ByteWrapper that = (ByteWrapper) o;
|
||||
|
||||
return Arrays.equals(bytes, that.bytes);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Arrays.hashCode(bytes);
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,67 @@
|
|||
/*
|
||||
*
|
||||
* Copyright 2019 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*
|
||||
*/
|
||||
|
||||
package org.tikv.common;
|
||||
|
||||
import java.sql.Timestamp;
|
||||
import org.joda.time.DateTime;
|
||||
|
||||
/** Extend joda DateTime to support micro second */
|
||||
public class ExtendedDateTime {
|
||||
|
||||
private final DateTime dateTime;
|
||||
private final int microsOfMillis;
|
||||
|
||||
/**
|
||||
* if timestamp = 2019-11-11 11:11:11 123456, then dateTime = 2019-11-11 11:11:11 123
|
||||
* microInMillis = 456
|
||||
*
|
||||
* @param dateTime
|
||||
* @param microsOfMillis
|
||||
*/
|
||||
public ExtendedDateTime(DateTime dateTime, int microsOfMillis) {
|
||||
this.dateTime = dateTime;
|
||||
this.microsOfMillis = microsOfMillis;
|
||||
}
|
||||
|
||||
public ExtendedDateTime(DateTime dateTime) {
|
||||
this.dateTime = dateTime;
|
||||
this.microsOfMillis = 0;
|
||||
}
|
||||
|
||||
public DateTime getDateTime() {
|
||||
return dateTime;
|
||||
}
|
||||
|
||||
public int getMicrosOfSeconds() {
|
||||
return dateTime.getMillisOfSecond() * 1000 + microsOfMillis;
|
||||
}
|
||||
|
||||
public int getMicrosOfMillis() {
|
||||
return microsOfMillis;
|
||||
}
|
||||
|
||||
public Timestamp toTimeStamp() {
|
||||
Timestamp timestamp = new Timestamp(dateTime.getMillis() / 1000 * 1000);
|
||||
timestamp.setNanos(dateTime.getMillisOfSecond() * 1000000 + microsOfMillis * 1000);
|
||||
return timestamp;
|
||||
}
|
||||
|
||||
public long toEpochMicro() {
|
||||
return toTimeStamp().getTime() * 1000 + getMicrosOfMillis();
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,272 @@
|
|||
/*
|
||||
*
|
||||
* Copyright 2019 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*
|
||||
*/
|
||||
|
||||
package org.tikv.common;
|
||||
|
||||
import com.google.common.util.concurrent.ThreadFactoryBuilder;
|
||||
import com.google.protobuf.ByteString;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
import java.util.concurrent.ExecutionException;
|
||||
import java.util.concurrent.ExecutorCompletionService;
|
||||
import java.util.concurrent.ExecutorService;
|
||||
import java.util.concurrent.Executors;
|
||||
import java.util.stream.Collectors;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.tikv.common.exception.GrpcException;
|
||||
import org.tikv.common.exception.TiKVException;
|
||||
import org.tikv.common.operation.iterator.ConcreteScanIterator;
|
||||
import org.tikv.common.region.RegionStoreClient;
|
||||
import org.tikv.common.region.RegionStoreClient.RegionStoreClientBuilder;
|
||||
import org.tikv.common.region.TiRegion;
|
||||
import org.tikv.common.util.BackOffFunction;
|
||||
import org.tikv.common.util.BackOffer;
|
||||
import org.tikv.common.util.ConcreteBackOffer;
|
||||
import org.tikv.kvproto.Kvrpcpb.KvPair;
|
||||
|
||||
public class KVClient implements AutoCloseable {
|
||||
private static final Logger logger = LoggerFactory.getLogger(KVClient.class);
|
||||
private static final int BATCH_GET_SIZE = 16 * 1024;
|
||||
private final RegionStoreClientBuilder clientBuilder;
|
||||
private final TiConfiguration conf;
|
||||
private final ExecutorService executorService;
|
||||
|
||||
public KVClient(TiConfiguration conf, RegionStoreClientBuilder clientBuilder) {
|
||||
Objects.requireNonNull(conf, "conf is null");
|
||||
Objects.requireNonNull(clientBuilder, "clientBuilder is null");
|
||||
this.conf = conf;
|
||||
this.clientBuilder = clientBuilder;
|
||||
executorService =
|
||||
Executors.newFixedThreadPool(
|
||||
conf.getKvClientConcurrency(),
|
||||
new ThreadFactoryBuilder().setNameFormat("kvclient-pool-%d").setDaemon(true).build());
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() {
|
||||
if (executorService != null) {
|
||||
executorService.shutdownNow();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a key-value pair from TiKV if key exists
|
||||
*
|
||||
* @param key key
|
||||
* @return a ByteString value if key exists, ByteString.EMPTY if key does not exist
|
||||
*/
|
||||
public ByteString get(ByteString key, long version) throws GrpcException {
|
||||
BackOffer backOffer = ConcreteBackOffer.newGetBackOff();
|
||||
while (true) {
|
||||
RegionStoreClient client = clientBuilder.build(key);
|
||||
try {
|
||||
return client.get(backOffer, key, version);
|
||||
} catch (final TiKVException e) {
|
||||
backOffer.doBackOff(BackOffFunction.BackOffFuncType.BoRegionMiss, e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a set of key-value pair by keys from TiKV
|
||||
*
|
||||
* @param backOffer
|
||||
* @param keys
|
||||
* @param version
|
||||
* @return
|
||||
* @throws GrpcException
|
||||
*/
|
||||
public List<KvPair> batchGet(BackOffer backOffer, List<ByteString> keys, long version)
|
||||
throws GrpcException {
|
||||
return doSendBatchGet(backOffer, keys, version);
|
||||
}
|
||||
|
||||
/**
|
||||
* Scan key-value pairs from TiKV in range [startKey, endKey)
|
||||
*
|
||||
* @param startKey start key, inclusive
|
||||
* @param endKey end key, exclusive
|
||||
* @return list of key-value pairs in range
|
||||
*/
|
||||
public List<KvPair> scan(ByteString startKey, ByteString endKey, long version)
|
||||
throws GrpcException {
|
||||
Iterator<KvPair> iterator = scanIterator(conf, clientBuilder, startKey, endKey, version);
|
||||
List<KvPair> result = new ArrayList<>();
|
||||
iterator.forEachRemaining(result::add);
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Scan key-value pairs from TiKV in range [startKey, ♾), maximum to `limit` pairs
|
||||
*
|
||||
* @param startKey start key, inclusive
|
||||
* @param limit limit of kv pairs
|
||||
* @return list of key-value pairs in range
|
||||
*/
|
||||
public List<KvPair> scan(ByteString startKey, long version, int limit) throws GrpcException {
|
||||
Iterator<KvPair> iterator = scanIterator(conf, clientBuilder, startKey, version, limit);
|
||||
List<KvPair> result = new ArrayList<>();
|
||||
iterator.forEachRemaining(result::add);
|
||||
return result;
|
||||
}
|
||||
|
||||
public List<KvPair> scan(ByteString startKey, long version) throws GrpcException {
|
||||
return scan(startKey, version, Integer.MAX_VALUE);
|
||||
}
|
||||
|
||||
private List<KvPair> doSendBatchGet(BackOffer backOffer, List<ByteString> keys, long version) {
|
||||
ExecutorCompletionService<List<KvPair>> completionService =
|
||||
new ExecutorCompletionService<>(executorService);
|
||||
|
||||
Map<TiRegion, List<ByteString>> groupKeys = groupKeysByRegion(keys);
|
||||
List<Batch> batches = new ArrayList<>();
|
||||
|
||||
for (Map.Entry<TiRegion, List<ByteString>> entry : groupKeys.entrySet()) {
|
||||
appendBatches(batches, entry.getKey(), entry.getValue(), BATCH_GET_SIZE);
|
||||
}
|
||||
|
||||
for (Batch batch : batches) {
|
||||
BackOffer singleBatchBackOffer = ConcreteBackOffer.create(backOffer);
|
||||
completionService.submit(
|
||||
() -> doSendBatchGetInBatchesWithRetry(singleBatchBackOffer, batch, version));
|
||||
}
|
||||
|
||||
try {
|
||||
List<KvPair> result = new ArrayList<>();
|
||||
for (int i = 0; i < batches.size(); i++) {
|
||||
result.addAll(completionService.take().get());
|
||||
}
|
||||
return result;
|
||||
} catch (InterruptedException e) {
|
||||
Thread.currentThread().interrupt();
|
||||
throw new TiKVException("Current thread interrupted.", e);
|
||||
} catch (ExecutionException e) {
|
||||
throw new TiKVException("Execution exception met.", e);
|
||||
}
|
||||
}
|
||||
|
||||
private List<KvPair> doSendBatchGetInBatchesWithRetry(
|
||||
BackOffer backOffer, Batch batch, long version) {
|
||||
TiRegion oldRegion = batch.region;
|
||||
TiRegion currentRegion =
|
||||
clientBuilder.getRegionManager().getRegionByKey(oldRegion.getStartKey());
|
||||
|
||||
if (oldRegion.equals(currentRegion)) {
|
||||
RegionStoreClient client = clientBuilder.build(batch.region);
|
||||
try {
|
||||
return client.batchGet(backOffer, batch.keys, version);
|
||||
} catch (final TiKVException e) {
|
||||
backOffer.doBackOff(BackOffFunction.BackOffFuncType.BoRegionMiss, e);
|
||||
clientBuilder.getRegionManager().invalidateRegion(batch.region.getId());
|
||||
logger.warn("ReSplitting ranges for BatchGetRequest", e);
|
||||
|
||||
// retry
|
||||
return doSendBatchGetWithRefetchRegion(backOffer, batch, version);
|
||||
}
|
||||
} else {
|
||||
return doSendBatchGetWithRefetchRegion(backOffer, batch, version);
|
||||
}
|
||||
}
|
||||
|
||||
private List<KvPair> doSendBatchGetWithRefetchRegion(
|
||||
BackOffer backOffer, Batch batch, long version) {
|
||||
Map<TiRegion, List<ByteString>> groupKeys = groupKeysByRegion(batch.keys);
|
||||
List<Batch> retryBatches = new ArrayList<>();
|
||||
|
||||
for (Map.Entry<TiRegion, List<ByteString>> entry : groupKeys.entrySet()) {
|
||||
appendBatches(retryBatches, entry.getKey(), entry.getValue(), BATCH_GET_SIZE);
|
||||
}
|
||||
|
||||
ArrayList<KvPair> results = new ArrayList<>();
|
||||
for (Batch retryBatch : retryBatches) {
|
||||
// recursive calls
|
||||
List<KvPair> batchResult = doSendBatchGetInBatchesWithRetry(backOffer, retryBatch, version);
|
||||
results.addAll(batchResult);
|
||||
}
|
||||
return results;
|
||||
}
|
||||
|
||||
/**
|
||||
* Append batch to list and split them according to batch limit
|
||||
*
|
||||
* @param batches a grouped batch
|
||||
* @param region region
|
||||
* @param keys keys
|
||||
* @param batchGetMaxSizeInByte batch max limit
|
||||
*/
|
||||
private void appendBatches(
|
||||
List<Batch> batches, TiRegion region, List<ByteString> keys, int batchGetMaxSizeInByte) {
|
||||
int start;
|
||||
int end;
|
||||
if (keys == null) {
|
||||
return;
|
||||
}
|
||||
int len = keys.size();
|
||||
for (start = 0; start < len; start = end) {
|
||||
int size = 0;
|
||||
for (end = start; end < len && size < batchGetMaxSizeInByte; end++) {
|
||||
size += keys.get(end).size();
|
||||
}
|
||||
Batch batch = new Batch(region, keys.subList(start, end));
|
||||
batches.add(batch);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Group by list of keys according to its region
|
||||
*
|
||||
* @param keys keys
|
||||
* @return a mapping of keys and their region
|
||||
*/
|
||||
private Map<TiRegion, List<ByteString>> groupKeysByRegion(List<ByteString> keys) {
|
||||
return keys.stream()
|
||||
.collect(Collectors.groupingBy(clientBuilder.getRegionManager()::getRegionByKey));
|
||||
}
|
||||
|
||||
private Iterator<KvPair> scanIterator(
|
||||
TiConfiguration conf,
|
||||
RegionStoreClientBuilder builder,
|
||||
ByteString startKey,
|
||||
ByteString endKey,
|
||||
long version) {
|
||||
return new ConcreteScanIterator(conf, builder, startKey, endKey, version);
|
||||
}
|
||||
|
||||
private Iterator<KvPair> scanIterator(
|
||||
TiConfiguration conf,
|
||||
RegionStoreClientBuilder builder,
|
||||
ByteString startKey,
|
||||
long version,
|
||||
int limit) {
|
||||
return new ConcreteScanIterator(conf, builder, startKey, version, limit);
|
||||
}
|
||||
|
||||
/** A Batch containing the region and a list of keys to send */
|
||||
private static final class Batch {
|
||||
private final TiRegion region;
|
||||
private final List<ByteString> keys;
|
||||
|
||||
Batch(TiRegion region, List<ByteString> keys) {
|
||||
this.region = region;
|
||||
this.keys = keys;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,20 @@
|
|||
/*
|
||||
* Copyright 2020 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.tikv.common;
|
||||
|
||||
public class Main {
|
||||
public static void main(String[] args) throws Exception {}
|
||||
}
|
||||
|
|
@ -22,40 +22,78 @@ import static org.tikv.common.pd.PDError.buildFromPdpbError;
|
|||
import com.google.common.annotations.VisibleForTesting;
|
||||
import com.google.common.util.concurrent.ThreadFactoryBuilder;
|
||||
import com.google.protobuf.ByteString;
|
||||
import io.etcd.jetcd.ByteSequence;
|
||||
import io.etcd.jetcd.Client;
|
||||
import io.etcd.jetcd.KeyValue;
|
||||
import io.etcd.jetcd.kv.GetResponse;
|
||||
import io.etcd.jetcd.options.GetOption;
|
||||
import io.grpc.ManagedChannel;
|
||||
import java.net.URI;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.*;
|
||||
import java.util.concurrent.CompletableFuture;
|
||||
import java.util.concurrent.ConcurrentHashMap;
|
||||
import java.util.concurrent.ConcurrentMap;
|
||||
import java.util.concurrent.ExecutionException;
|
||||
import java.util.concurrent.Executors;
|
||||
import java.util.concurrent.Future;
|
||||
import java.util.concurrent.ScheduledExecutorService;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.function.Supplier;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.tikv.common.TiConfiguration.KVMode;
|
||||
import org.tikv.common.codec.Codec.BytesCodec;
|
||||
import org.tikv.common.codec.CodecDataOutput;
|
||||
import org.tikv.common.codec.KeyUtils;
|
||||
import org.tikv.common.exception.GrpcException;
|
||||
import org.tikv.common.exception.TiClientInternalException;
|
||||
import org.tikv.common.meta.TiTimestamp;
|
||||
import org.tikv.common.operation.NoopHandler;
|
||||
import org.tikv.common.operation.PDErrorHandler;
|
||||
import org.tikv.common.pd.PDUtils;
|
||||
import org.tikv.common.region.TiRegion;
|
||||
import org.tikv.common.util.BackOffFunction.BackOffFuncType;
|
||||
import org.tikv.common.util.BackOffer;
|
||||
import org.tikv.common.util.ChannelFactory;
|
||||
import org.tikv.common.util.ConcreteBackOffer;
|
||||
import org.tikv.common.util.FutureObserver;
|
||||
import org.tikv.kvproto.Metapb.Store;
|
||||
import org.tikv.kvproto.PDGrpc;
|
||||
import org.tikv.kvproto.PDGrpc.PDBlockingStub;
|
||||
import org.tikv.kvproto.PDGrpc.PDStub;
|
||||
import org.tikv.kvproto.Pdpb.*;
|
||||
import org.tikv.kvproto.Pdpb.Error;
|
||||
import org.tikv.kvproto.Pdpb.ErrorType;
|
||||
import org.tikv.kvproto.Pdpb.GetAllStoresRequest;
|
||||
import org.tikv.kvproto.Pdpb.GetMembersRequest;
|
||||
import org.tikv.kvproto.Pdpb.GetMembersResponse;
|
||||
import org.tikv.kvproto.Pdpb.GetOperatorRequest;
|
||||
import org.tikv.kvproto.Pdpb.GetOperatorResponse;
|
||||
import org.tikv.kvproto.Pdpb.GetRegionByIDRequest;
|
||||
import org.tikv.kvproto.Pdpb.GetRegionRequest;
|
||||
import org.tikv.kvproto.Pdpb.GetRegionResponse;
|
||||
import org.tikv.kvproto.Pdpb.GetStoreRequest;
|
||||
import org.tikv.kvproto.Pdpb.GetStoreResponse;
|
||||
import org.tikv.kvproto.Pdpb.OperatorStatus;
|
||||
import org.tikv.kvproto.Pdpb.RequestHeader;
|
||||
import org.tikv.kvproto.Pdpb.ResponseHeader;
|
||||
import org.tikv.kvproto.Pdpb.ScatterRegionRequest;
|
||||
import org.tikv.kvproto.Pdpb.ScatterRegionResponse;
|
||||
import org.tikv.kvproto.Pdpb.Timestamp;
|
||||
import org.tikv.kvproto.Pdpb.TsoRequest;
|
||||
import org.tikv.kvproto.Pdpb.TsoResponse;
|
||||
|
||||
/** PDClient is thread-safe and suggested to be shared threads */
|
||||
public class PDClient extends AbstractGRPCClient<PDBlockingStub, PDStub>
|
||||
implements ReadOnlyPDClient {
|
||||
private static final String TIFLASH_TABLE_SYNC_PROGRESS_PATH = "/tiflash/table/sync";
|
||||
private final Logger logger = LoggerFactory.getLogger(PDClient.class);
|
||||
private RequestHeader header;
|
||||
private TsoRequest tsoReq;
|
||||
private volatile LeaderWrapper leaderWrapper;
|
||||
private ScheduledExecutorService service;
|
||||
private ScheduledExecutorService tiflashReplicaService;
|
||||
private List<URI> pdAddrs;
|
||||
private Client etcdClient;
|
||||
private ConcurrentMap<Long, Double> tiflashReplicaMap;
|
||||
|
||||
private PDClient(TiConfiguration conf, ChannelFactory channelFactory) {
|
||||
super(conf, channelFactory);
|
||||
|
|
@ -110,18 +148,71 @@ public class PDClient extends AbstractGRPCClient<PDBlockingStub, PDStub>
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* wait scatter region until finish
|
||||
*
|
||||
* @param region
|
||||
*/
|
||||
void waitScatterRegionFinish(TiRegion region, BackOffer backOffer) {
|
||||
for (; ; ) {
|
||||
GetOperatorResponse resp = getOperator(region.getId());
|
||||
if (resp != null) {
|
||||
if (isScatterRegionFinish(resp)) {
|
||||
logger.info(String.format("wait scatter region on %d is finished", region.getId()));
|
||||
return;
|
||||
} else {
|
||||
backOffer.doBackOff(
|
||||
BackOffFuncType.BoRegionMiss, new GrpcException("waiting scatter region"));
|
||||
logger.info(
|
||||
String.format(
|
||||
"wait scatter region %d at key %s is %s",
|
||||
region.getId(),
|
||||
KeyUtils.formatBytes(resp.getDesc().toByteArray()),
|
||||
resp.getStatus().toString()));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private GetOperatorResponse getOperator(long regionId) {
|
||||
Supplier<GetOperatorRequest> request =
|
||||
() -> GetOperatorRequest.newBuilder().setHeader(header).setRegionId(regionId).build();
|
||||
// get operator no need to handle error and no need back offer.
|
||||
return callWithRetry(
|
||||
ConcreteBackOffer.newCustomBackOff(0),
|
||||
PDGrpc.getGetOperatorMethod(),
|
||||
request,
|
||||
new NoopHandler<>());
|
||||
}
|
||||
|
||||
private boolean isScatterRegionFinish(GetOperatorResponse resp) {
|
||||
// If the current operator of region is not `scatter-region`, we could assume
|
||||
// that `scatter-operator` has finished or timeout.
|
||||
boolean finished =
|
||||
!resp.getDesc().equals(ByteString.copyFromUtf8("scatter-region"))
|
||||
|| resp.getStatus() != OperatorStatus.RUNNING;
|
||||
|
||||
if (resp.hasHeader()) {
|
||||
ResponseHeader header = resp.getHeader();
|
||||
if (header.hasError()) {
|
||||
Error error = header.getError();
|
||||
// heartbeat may not send to PD
|
||||
if (error.getType() == ErrorType.REGION_NOT_FOUND) {
|
||||
finished = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
return finished;
|
||||
}
|
||||
|
||||
@Override
|
||||
public TiRegion getRegionByKey(BackOffer backOffer, ByteString key) {
|
||||
Supplier<GetRegionRequest> request;
|
||||
if (conf.getKvMode() == KVMode.RAW) {
|
||||
request = () -> GetRegionRequest.newBuilder().setHeader(header).setRegionKey(key).build();
|
||||
} else {
|
||||
CodecDataOutput cdo = new CodecDataOutput();
|
||||
BytesCodec.writeBytes(cdo, key.toByteArray());
|
||||
ByteString encodedKey = cdo.toByteString();
|
||||
request =
|
||||
() -> GetRegionRequest.newBuilder().setHeader(header).setRegionKey(encodedKey).build();
|
||||
}
|
||||
CodecDataOutput cdo = new CodecDataOutput();
|
||||
BytesCodec.writeBytes(cdo, key.toByteArray());
|
||||
ByteString encodedKey = cdo.toByteString();
|
||||
|
||||
Supplier<GetRegionRequest> request =
|
||||
() -> GetRegionRequest.newBuilder().setHeader(header).setRegionKey(encodedKey).build();
|
||||
|
||||
PDErrorHandler<GetRegionResponse> handler =
|
||||
new PDErrorHandler<>(getRegionResponseErrorExtractor, this);
|
||||
|
|
@ -232,10 +323,26 @@ public class PDClient extends AbstractGRPCClient<PDBlockingStub, PDStub>
|
|||
}
|
||||
|
||||
@Override
|
||||
public void close() {
|
||||
public List<Store> getAllStores(BackOffer backOffer) {
|
||||
return callWithRetry(
|
||||
backOffer,
|
||||
PDGrpc.getGetAllStoresMethod(),
|
||||
buildGetAllStoresReq(),
|
||||
new PDErrorHandler<>(
|
||||
r -> r.getHeader().hasError() ? buildFromPdpbError(r.getHeader().getError()) : null,
|
||||
this))
|
||||
.getStoresList();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() throws InterruptedException {
|
||||
etcdClient.close();
|
||||
if (service != null) {
|
||||
service.shutdownNow();
|
||||
}
|
||||
if (tiflashReplicaService != null) {
|
||||
tiflashReplicaService.shutdownNow();
|
||||
}
|
||||
if (channelFactory != null) {
|
||||
channelFactory.close();
|
||||
}
|
||||
|
|
@ -320,6 +427,59 @@ public class PDClient extends AbstractGRPCClient<PDBlockingStub, PDStub>
|
|||
"already tried all address on file, but not leader found yet.");
|
||||
}
|
||||
|
||||
public void updateTiFlashReplicaStatus() {
|
||||
ByteSequence prefix =
|
||||
ByteSequence.from(TIFLASH_TABLE_SYNC_PROGRESS_PATH, StandardCharsets.UTF_8);
|
||||
for (int i = 0; i < 5; i++) {
|
||||
CompletableFuture<GetResponse> resp;
|
||||
try {
|
||||
resp =
|
||||
etcdClient.getKVClient().get(prefix, GetOption.newBuilder().withPrefix(prefix).build());
|
||||
} catch (Exception e) {
|
||||
logger.info("get tiflash table replica sync progress failed, continue checking.", e);
|
||||
continue;
|
||||
}
|
||||
GetResponse getResp;
|
||||
try {
|
||||
getResp = resp.get();
|
||||
} catch (InterruptedException e) {
|
||||
Thread.currentThread().interrupt();
|
||||
continue;
|
||||
} catch (ExecutionException e) {
|
||||
throw new GrpcException("failed to update tiflash replica", e);
|
||||
}
|
||||
ConcurrentMap<Long, Double> progressMap = new ConcurrentHashMap<>();
|
||||
for (KeyValue kv : getResp.getKvs()) {
|
||||
long tableId;
|
||||
try {
|
||||
tableId =
|
||||
Long.parseLong(
|
||||
kv.getKey().toString().substring(TIFLASH_TABLE_SYNC_PROGRESS_PATH.length()));
|
||||
} catch (Exception e) {
|
||||
logger.info(
|
||||
"invalid tiflash table replica sync progress key. key = " + kv.getKey().toString());
|
||||
continue;
|
||||
}
|
||||
double progress;
|
||||
try {
|
||||
progress = Double.parseDouble(kv.getValue().toString());
|
||||
} catch (Exception e) {
|
||||
logger.info(
|
||||
"invalid tiflash table replica sync progress value. value = "
|
||||
+ kv.getValue().toString());
|
||||
continue;
|
||||
}
|
||||
progressMap.put(tableId, progress);
|
||||
}
|
||||
tiflashReplicaMap = progressMap;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
public double getTiFlashReplicaProgress(long tableId) {
|
||||
return tiflashReplicaMap.getOrDefault(tableId, 0.0);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected PDBlockingStub getBlockingStub() {
|
||||
if (leaderWrapper == null) {
|
||||
|
|
@ -354,6 +514,8 @@ public class PDClient extends AbstractGRPCClient<PDBlockingStub, PDStub>
|
|||
header = RequestHeader.newBuilder().setClusterId(clusterId).build();
|
||||
tsoReq = TsoRequest.newBuilder().setHeader(header).setCount(1).build();
|
||||
this.pdAddrs = pdAddrs;
|
||||
this.etcdClient = Client.builder().endpoints(pdAddrs).build();
|
||||
this.tiflashReplicaMap = new ConcurrentHashMap<>();
|
||||
createLeaderWrapper(resp.getLeader().getClientUrls(0));
|
||||
service =
|
||||
Executors.newSingleThreadScheduledExecutor(
|
||||
|
|
@ -370,6 +532,11 @@ public class PDClient extends AbstractGRPCClient<PDBlockingStub, PDStub>
|
|||
1,
|
||||
1,
|
||||
TimeUnit.MINUTES);
|
||||
tiflashReplicaService =
|
||||
Executors.newSingleThreadScheduledExecutor(
|
||||
new ThreadFactoryBuilder().setDaemon(true).build());
|
||||
tiflashReplicaService.scheduleAtFixedRate(
|
||||
this::updateTiFlashReplicaStatus, 10, 10, TimeUnit.SECONDS);
|
||||
}
|
||||
|
||||
static class LeaderWrapper {
|
||||
|
|
|
|||
|
|
@ -16,6 +16,7 @@
|
|||
package org.tikv.common;
|
||||
|
||||
import com.google.protobuf.ByteString;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.Future;
|
||||
import org.tikv.common.meta.TiTimestamp;
|
||||
import org.tikv.common.region.TiRegion;
|
||||
|
|
@ -60,4 +61,6 @@ public interface ReadOnlyPDClient {
|
|||
Store getStore(BackOffer backOffer, long storeId);
|
||||
|
||||
Future<Store> getStoreAsync(BackOffer backOffer, long storeId);
|
||||
|
||||
List<Store> getAllStores(BackOffer backOffer);
|
||||
}
|
||||
|
|
|
|||
|
|
@ -0,0 +1,180 @@
|
|||
/*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.tikv.common;
|
||||
|
||||
import static org.tikv.common.operation.iterator.CoprocessorIterator.getHandleIterator;
|
||||
import static org.tikv.common.operation.iterator.CoprocessorIterator.getRowIterator;
|
||||
import static org.tikv.common.operation.iterator.CoprocessorIterator.getTiChunkIterator;
|
||||
|
||||
import com.google.protobuf.ByteString;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.stream.Collectors;
|
||||
import javax.annotation.Nonnull;
|
||||
import org.tikv.common.columnar.TiChunk;
|
||||
import org.tikv.common.key.Key;
|
||||
import org.tikv.common.meta.TiDAGRequest;
|
||||
import org.tikv.common.meta.TiTimestamp;
|
||||
import org.tikv.common.operation.iterator.ConcreteScanIterator;
|
||||
import org.tikv.common.operation.iterator.IndexScanIterator;
|
||||
import org.tikv.common.row.Row;
|
||||
import org.tikv.common.util.ConcreteBackOffer;
|
||||
import org.tikv.common.util.RangeSplitter;
|
||||
import org.tikv.common.util.RangeSplitter.RegionTask;
|
||||
import org.tikv.kvproto.Kvrpcpb.KvPair;
|
||||
|
||||
public class Snapshot {
|
||||
private final TiTimestamp timestamp;
|
||||
private final TiSession session;
|
||||
private final TiConfiguration conf;
|
||||
|
||||
public Snapshot(@Nonnull TiTimestamp timestamp, TiConfiguration conf) {
|
||||
this.timestamp = timestamp;
|
||||
this.conf = conf;
|
||||
this.session = TiSession.getInstance(conf);
|
||||
}
|
||||
|
||||
public TiSession getSession() {
|
||||
return session;
|
||||
}
|
||||
|
||||
public long getVersion() {
|
||||
return timestamp.getVersion();
|
||||
}
|
||||
|
||||
public TiTimestamp getTimestamp() {
|
||||
return timestamp;
|
||||
}
|
||||
|
||||
public byte[] get(byte[] key) {
|
||||
ByteString keyString = ByteString.copyFrom(key);
|
||||
ByteString value = get(keyString);
|
||||
return value.toByteArray();
|
||||
}
|
||||
|
||||
public ByteString get(ByteString key) {
|
||||
try (KVClient client = new KVClient(session.getConf(), session.getRegionStoreClientBuilder())) {
|
||||
return client.get(key, timestamp.getVersion());
|
||||
}
|
||||
}
|
||||
|
||||
public List<org.tikv.common.BytePairWrapper> batchGet(int backOffer, List<byte[]> keys) {
|
||||
List<ByteString> list = new ArrayList<>();
|
||||
for (byte[] key : keys) {
|
||||
list.add(ByteString.copyFrom(key));
|
||||
}
|
||||
try (KVClient client = new KVClient(session.getConf(), session.getRegionStoreClientBuilder())) {
|
||||
List<KvPair> kvPairList =
|
||||
client.batchGet(
|
||||
ConcreteBackOffer.newCustomBackOff(backOffer), list, timestamp.getVersion());
|
||||
return kvPairList
|
||||
.stream()
|
||||
.map(
|
||||
kvPair ->
|
||||
new org.tikv.common.BytePairWrapper(
|
||||
kvPair.getKey().toByteArray(), kvPair.getValue().toByteArray()))
|
||||
.collect(Collectors.toList());
|
||||
}
|
||||
}
|
||||
|
||||
public Iterator<TiChunk> tableReadChunk(
|
||||
TiDAGRequest dagRequest, List<RegionTask> tasks, int numOfRows) {
|
||||
if (dagRequest.isDoubleRead()) {
|
||||
throw new UnsupportedOperationException(
|
||||
"double read case should first read handle in row-wise fashion");
|
||||
} else {
|
||||
return getTiChunkIterator(dagRequest, tasks, getSession(), numOfRows);
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Issue a table read request
|
||||
*
|
||||
* @param dagRequest DAG request for coprocessor
|
||||
* @return a Iterator that contains all result from this select request.
|
||||
*/
|
||||
public Iterator<Row> tableReadRow(TiDAGRequest dagRequest, long physicalId) {
|
||||
return tableReadRow(
|
||||
dagRequest,
|
||||
RangeSplitter.newSplitter(session.getRegionManager())
|
||||
.splitRangeByRegion(
|
||||
dagRequest.getRangesByPhysicalId(physicalId), dagRequest.getStoreType()));
|
||||
}
|
||||
|
||||
/**
|
||||
* Below is lower level API for env like Spark which already did key range split Perform table
|
||||
* scan
|
||||
*
|
||||
* @param dagRequest DAGRequest for coprocessor
|
||||
* @param tasks RegionTasks of the coprocessor request to send
|
||||
* @return Row iterator to iterate over resulting rows
|
||||
*/
|
||||
private Iterator<Row> tableReadRow(TiDAGRequest dagRequest, List<RegionTask> tasks) {
|
||||
if (dagRequest.isDoubleRead()) {
|
||||
Iterator<Long> iter = getHandleIterator(dagRequest, tasks, getSession());
|
||||
return new IndexScanIterator(this, dagRequest, iter);
|
||||
} else {
|
||||
return getRowIterator(dagRequest, tasks, getSession());
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Below is lower level API for env like Spark which already did key range split Perform handle
|
||||
* scan
|
||||
*
|
||||
* @param dagRequest DAGRequest for coprocessor
|
||||
* @param tasks RegionTask of the coprocessor request to send
|
||||
* @return Row iterator to iterate over resulting rows
|
||||
*/
|
||||
public Iterator<Long> indexHandleRead(TiDAGRequest dagRequest, List<RegionTask> tasks) {
|
||||
return getHandleIterator(dagRequest, tasks, session);
|
||||
}
|
||||
|
||||
/**
|
||||
* scan all keys after startKey, inclusive
|
||||
*
|
||||
* @param startKey start of keys
|
||||
* @return iterator of kvPair
|
||||
*/
|
||||
public Iterator<KvPair> scan(ByteString startKey) {
|
||||
return new ConcreteScanIterator(
|
||||
session.getConf(),
|
||||
session.getRegionStoreClientBuilder(),
|
||||
startKey,
|
||||
timestamp.getVersion(),
|
||||
Integer.MAX_VALUE);
|
||||
}
|
||||
|
||||
/**
|
||||
* scan all keys with prefix
|
||||
*
|
||||
* @param prefix prefix of keys
|
||||
* @return iterator of kvPair
|
||||
*/
|
||||
public Iterator<KvPair> scanPrefix(ByteString prefix) {
|
||||
ByteString nextPrefix = Key.toRawKey(prefix).nextPrefix().toByteString();
|
||||
return new ConcreteScanIterator(
|
||||
session.getConf(),
|
||||
session.getRegionStoreClientBuilder(),
|
||||
prefix,
|
||||
nextPrefix,
|
||||
timestamp.getVersion());
|
||||
}
|
||||
|
||||
public TiConfiguration getConf() {
|
||||
return conf;
|
||||
}
|
||||
}
|
||||
|
|
@ -15,8 +15,14 @@
|
|||
|
||||
package org.tikv.common;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.stream.Collectors;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.tikv.common.region.TiStoreType;
|
||||
import org.tikv.common.util.BackOffer;
|
||||
import org.tikv.common.util.ConcreteBackOffer;
|
||||
import org.tikv.kvproto.Metapb;
|
||||
|
||||
public class StoreVersion {
|
||||
|
||||
|
|
@ -51,6 +57,39 @@ public class StoreVersion {
|
|||
return new StoreVersion(v0).toIntVersion() - new StoreVersion(v1).toIntVersion();
|
||||
}
|
||||
|
||||
public static boolean minTiKVVersion(String version, PDClient pdClient) {
|
||||
StoreVersion storeVersion = new StoreVersion(version);
|
||||
|
||||
BackOffer bo = ConcreteBackOffer.newCustomBackOff(BackOffer.PD_INFO_BACKOFF);
|
||||
List<Metapb.Store> storeList =
|
||||
pdClient
|
||||
.getAllStores(bo)
|
||||
.stream()
|
||||
.filter(
|
||||
store ->
|
||||
!isTiFlash(store)
|
||||
&& (store.getState() == Metapb.StoreState.Up
|
||||
|| store.getState() == Metapb.StoreState.Offline))
|
||||
.collect(Collectors.toList());
|
||||
|
||||
for (Metapb.Store store : storeList) {
|
||||
if (storeVersion.greatThan(new StoreVersion(store.getVersion()))) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
private static boolean isTiFlash(Metapb.Store store) {
|
||||
for (Metapb.StoreLabel label : store.getLabelsList()) {
|
||||
if (label.getKey().equals(TiStoreType.TiFlash.getLabelKey())
|
||||
&& label.getValue().equals(TiStoreType.TiFlash.getLabelValue())) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
private int toIntVersion() {
|
||||
return v0 * SCALE * SCALE + v1 * SCALE + v2;
|
||||
}
|
||||
|
|
|
|||
|
|
@ -0,0 +1,75 @@
|
|||
/*
|
||||
* Copyright 2019 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.tikv.common;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Comparator;
|
||||
import java.util.List;
|
||||
import java.util.stream.Collectors;
|
||||
import org.tikv.common.key.IndexKey;
|
||||
import org.tikv.common.key.Key;
|
||||
import org.tikv.common.key.RowKey;
|
||||
import org.tikv.common.meta.TiIndexInfo;
|
||||
import org.tikv.common.meta.TiTableInfo;
|
||||
import org.tikv.common.region.TiRegion;
|
||||
|
||||
public class TiBatchWriteUtils {
|
||||
|
||||
private static final Comparator<TiIndexInfo> tiIndexInfoComparator =
|
||||
Comparator.comparing(TiIndexInfo::getId);
|
||||
|
||||
public static List<TiRegion> getRegionByIndex(
|
||||
TiSession session, TiTableInfo table, TiIndexInfo index) {
|
||||
ArrayList<TiRegion> regionList = new ArrayList<>();
|
||||
Key min = IndexKey.toIndexKey(table.getId(), index.getId());
|
||||
Key max = min.nextPrefix();
|
||||
|
||||
while (min.compareTo(max) < 0) {
|
||||
TiRegion region = session.getRegionManager().getRegionByKey(min.toByteString());
|
||||
regionList.add(region);
|
||||
min = Key.toRawKey(region.getEndKey());
|
||||
}
|
||||
return regionList;
|
||||
}
|
||||
|
||||
public static List<TiRegion> getIndexRegions(TiSession session, TiTableInfo table) {
|
||||
return table
|
||||
.getIndices()
|
||||
.stream()
|
||||
.sorted(tiIndexInfoComparator)
|
||||
.flatMap(index -> getRegionByIndex(session, table, index).stream())
|
||||
.collect(Collectors.toList());
|
||||
}
|
||||
|
||||
public static List<TiRegion> getRecordRegions(TiSession session, TiTableInfo table) {
|
||||
ArrayList<TiRegion> regionList = new ArrayList<>();
|
||||
Key key = RowKey.createMin(table.getId());
|
||||
RowKey endRowKey = RowKey.createBeyondMax(table.getId());
|
||||
|
||||
while (key.compareTo(endRowKey) < 0) {
|
||||
TiRegion region = session.getRegionManager().getRegionByKey(key.toByteString());
|
||||
regionList.add(region);
|
||||
key = Key.toRawKey(region.getEndKey());
|
||||
}
|
||||
return regionList;
|
||||
}
|
||||
|
||||
public static List<TiRegion> getRegionsByTable(TiSession session, TiTableInfo table) {
|
||||
List<TiRegion> recordRegions = getIndexRegions(session, table);
|
||||
recordRegions.addAll(getRecordRegions(session, table));
|
||||
return recordRegions;
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,154 @@
|
|||
/*
|
||||
* Copyright 2019 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.tikv.common;
|
||||
|
||||
import com.fasterxml.jackson.core.type.TypeReference;
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import java.io.IOException;
|
||||
import java.sql.Connection;
|
||||
import java.sql.ResultSet;
|
||||
import java.sql.ResultSetMetaData;
|
||||
import java.sql.SQLException;
|
||||
import java.sql.Statement;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
public class TiDBJDBCClient implements AutoCloseable {
|
||||
private static final String UNLOCK_TABLES_SQL = "unlock tables";
|
||||
private static final String SELECT_TIDB_CONFIG_SQL = "select @@tidb_config";
|
||||
private static final String ENABLE_TABLE_LOCK_KEY = "enable-table-lock";
|
||||
private static final Boolean ENABLE_TABLE_LOCK_DEFAULT = false;
|
||||
private static final String DELAY_CLEAN_TABLE_LOCK = "delay-clean-table-lock";
|
||||
private static final int DELAY_CLEAN_TABLE_LOCK_DEFAULT = 0;
|
||||
private static final String TIDB_ROW_FORMAT_VERSION_SQL = "select @@tidb_row_format_version";
|
||||
private static final int TIDB_ROW_FORMAT_VERSION_DEFAULT = 1;
|
||||
private final Logger logger = LoggerFactory.getLogger(getClass().getName());
|
||||
private final Connection connection;
|
||||
|
||||
public TiDBJDBCClient(Connection connection) {
|
||||
this.connection = connection;
|
||||
}
|
||||
|
||||
public boolean isEnableTableLock() throws IOException, SQLException {
|
||||
Map<String, Object> configMap = readConfMapFromTiDB();
|
||||
Object enableTableLock =
|
||||
configMap.getOrDefault(ENABLE_TABLE_LOCK_KEY, ENABLE_TABLE_LOCK_DEFAULT);
|
||||
return (Boolean) enableTableLock;
|
||||
}
|
||||
|
||||
/**
|
||||
* get enable-table-lock config from tidb
|
||||
*
|
||||
* @return Milliseconds
|
||||
* @throws IOException
|
||||
* @throws SQLException
|
||||
*/
|
||||
public int getDelayCleanTableLock() throws IOException, SQLException {
|
||||
Map<String, Object> configMap = readConfMapFromTiDB();
|
||||
Object enableTableLock =
|
||||
configMap.getOrDefault(DELAY_CLEAN_TABLE_LOCK, DELAY_CLEAN_TABLE_LOCK_DEFAULT);
|
||||
return (int) enableTableLock;
|
||||
}
|
||||
|
||||
/**
|
||||
* get row format version from tidb
|
||||
*
|
||||
* @return 1 if should not encode and write with new row format.(default) 2 if encode and write
|
||||
* with new row format.(default on v4.0.0 cluster)
|
||||
*/
|
||||
public int getRowFormatVersion() {
|
||||
try {
|
||||
List<List<Object>> result = queryTiDBViaJDBC(TIDB_ROW_FORMAT_VERSION_SQL);
|
||||
if (result.isEmpty()) {
|
||||
// default set to 1
|
||||
return TIDB_ROW_FORMAT_VERSION_DEFAULT;
|
||||
} else {
|
||||
Object version = result.get(0).get(0);
|
||||
if (version instanceof String) {
|
||||
return Integer.parseInt((String) version);
|
||||
} else if (version instanceof Number) {
|
||||
return ((Number) version).intValue();
|
||||
} else {
|
||||
return TIDB_ROW_FORMAT_VERSION_DEFAULT;
|
||||
}
|
||||
}
|
||||
} catch (Exception ignored) {
|
||||
return TIDB_ROW_FORMAT_VERSION_DEFAULT;
|
||||
}
|
||||
}
|
||||
|
||||
public boolean lockTableWriteLocal(String databaseName, String tableName) throws SQLException {
|
||||
try (Statement tidbStmt = connection.createStatement()) {
|
||||
String sql = "lock tables `" + databaseName + "`.`" + tableName + "` write local";
|
||||
int result = tidbStmt.executeUpdate(sql);
|
||||
return result == 0;
|
||||
}
|
||||
}
|
||||
|
||||
public boolean unlockTables() throws SQLException {
|
||||
try (Statement tidbStmt = connection.createStatement()) {
|
||||
int result = tidbStmt.executeUpdate(UNLOCK_TABLES_SQL);
|
||||
return result == 0;
|
||||
}
|
||||
}
|
||||
|
||||
public boolean dropTable(String databaseName, String tableName) throws SQLException {
|
||||
try (Statement tidbStmt = connection.createStatement()) {
|
||||
String sql = "drop table if exists `" + databaseName + "`.`" + tableName + "`";
|
||||
return tidbStmt.execute(sql);
|
||||
}
|
||||
}
|
||||
|
||||
private Map<String, Object> readConfMapFromTiDB() throws SQLException, IOException {
|
||||
String configJSON = (String) queryTiDBViaJDBC(SELECT_TIDB_CONFIG_SQL).get(0).get(0);
|
||||
ObjectMapper objectMapper = new ObjectMapper();
|
||||
TypeReference<HashMap<String, Object>> typeRef =
|
||||
new TypeReference<HashMap<String, Object>>() {};
|
||||
return objectMapper.readValue(configJSON, typeRef);
|
||||
}
|
||||
|
||||
public boolean isClosed() throws SQLException {
|
||||
return connection.isClosed();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() throws Exception {
|
||||
connection.close();
|
||||
}
|
||||
|
||||
private List<List<Object>> queryTiDBViaJDBC(String query) throws SQLException {
|
||||
ArrayList<List<Object>> result = new ArrayList<>();
|
||||
|
||||
try (Statement tidbStmt = connection.createStatement()) {
|
||||
ResultSet resultSet = tidbStmt.executeQuery(query);
|
||||
ResultSetMetaData rsMetaData = resultSet.getMetaData();
|
||||
|
||||
while (resultSet.next()) {
|
||||
ArrayList<Object> row = new ArrayList<>();
|
||||
for (int i = 1; i <= rsMetaData.getColumnCount(); i++) {
|
||||
row.add(resultSet.getObject(i));
|
||||
}
|
||||
result.add(row);
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
}
|
||||
|
|
@ -16,11 +16,32 @@
|
|||
package org.tikv.common;
|
||||
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
import com.google.common.util.concurrent.ThreadFactoryBuilder;
|
||||
import com.google.protobuf.ByteString;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.ExecutorService;
|
||||
import java.util.concurrent.Executors;
|
||||
import java.util.function.Function;
|
||||
import java.util.stream.Collectors;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.tikv.common.catalog.Catalog;
|
||||
import org.tikv.common.event.CacheInvalidateEvent;
|
||||
import org.tikv.common.exception.TiKVException;
|
||||
import org.tikv.common.key.Key;
|
||||
import org.tikv.common.meta.TiTimestamp;
|
||||
import org.tikv.common.region.RegionManager;
|
||||
import org.tikv.common.region.RegionStoreClient;
|
||||
import org.tikv.common.region.RegionStoreClient.RegionStoreClientBuilder;
|
||||
import org.tikv.common.util.ChannelFactory;
|
||||
import org.tikv.common.region.TiRegion;
|
||||
import org.tikv.common.util.*;
|
||||
import org.tikv.kvproto.Metapb;
|
||||
import org.tikv.raw.RawKVClient;
|
||||
import org.tikv.txn.KVClient;
|
||||
import org.tikv.txn.TxnKVClient;
|
||||
|
||||
/**
|
||||
* TiSession is the holder for PD Client, Store pdClient and PD Cache All sessions share common
|
||||
|
|
@ -28,48 +49,166 @@ import org.tikv.txn.KVClient;
|
|||
* thread-safe but it's also recommended to have multiple session avoiding lock contention
|
||||
*/
|
||||
public class TiSession implements AutoCloseable {
|
||||
private static final Logger logger = LoggerFactory.getLogger(TiSession.class);
|
||||
private static final Map<String, TiSession> sessionCachedMap = new HashMap<>();
|
||||
private final TiConfiguration conf;
|
||||
private final PDClient pdClient;
|
||||
private final ChannelFactory channelFactory;
|
||||
private Function<CacheInvalidateEvent, Void> cacheInvalidateCallback;
|
||||
// below object creation is either heavy or making connection (pd), pending for lazy loading
|
||||
private volatile PDClient client;
|
||||
private volatile Catalog catalog;
|
||||
private volatile ExecutorService indexScanThreadPool;
|
||||
private volatile ExecutorService tableScanThreadPool;
|
||||
private volatile RegionManager regionManager;
|
||||
private volatile RegionStoreClient.RegionStoreClientBuilder clientBuilder;
|
||||
private boolean isClosed = false;
|
||||
|
||||
public TiSession(TiConfiguration conf) {
|
||||
this.conf = conf;
|
||||
this.channelFactory = new ChannelFactory(conf.getMaxFrameSize());
|
||||
this.pdClient = PDClient.createRaw(conf, channelFactory);
|
||||
}
|
||||
|
||||
public TiConfiguration getConf() {
|
||||
return conf;
|
||||
this.client = PDClient.createRaw(conf, channelFactory);
|
||||
}
|
||||
|
||||
public static TiSession create(TiConfiguration conf) {
|
||||
return new TiSession(conf);
|
||||
}
|
||||
|
||||
public static TiSession getInstance(TiConfiguration conf) {
|
||||
synchronized (sessionCachedMap) {
|
||||
String key = conf.getPdAddrsString();
|
||||
if (sessionCachedMap.containsKey(key)) {
|
||||
return sessionCachedMap.get(key);
|
||||
}
|
||||
|
||||
TiSession newSession = new TiSession(conf);
|
||||
sessionCachedMap.put(key, newSession);
|
||||
return newSession;
|
||||
}
|
||||
}
|
||||
|
||||
public RawKVClient createRawClient() {
|
||||
// Create new Region Manager avoiding thread contentions
|
||||
RegionManager regionMgr = new RegionManager(pdClient);
|
||||
RegionManager regionMgr = new RegionManager(client);
|
||||
RegionStoreClientBuilder builder =
|
||||
new RegionStoreClientBuilder(conf, channelFactory, regionMgr, pdClient);
|
||||
new RegionStoreClientBuilder(conf, channelFactory, regionMgr, client);
|
||||
return new RawKVClient(conf, builder);
|
||||
}
|
||||
|
||||
public KVClient createTxnKVClient() {
|
||||
public KVClient createKVClient() {
|
||||
// Create new Region Manager avoiding thread contentions
|
||||
RegionManager regionMgr = new RegionManager(pdClient);
|
||||
RegionManager regionMgr = new RegionManager(client);
|
||||
RegionStoreClientBuilder builder =
|
||||
new RegionStoreClientBuilder(conf, channelFactory, regionMgr, pdClient);
|
||||
new RegionStoreClientBuilder(conf, channelFactory, regionMgr, client);
|
||||
return new KVClient(conf, builder);
|
||||
}
|
||||
|
||||
@VisibleForTesting
|
||||
public RegionManager getRegionManager() {
|
||||
return new RegionManager(pdClient);
|
||||
public TxnKVClient createTxnClient() {
|
||||
return new TxnKVClient(conf, this.getRegionStoreClientBuilder(), this.getPDClient());
|
||||
}
|
||||
|
||||
public RegionStoreClient.RegionStoreClientBuilder getRegionStoreClientBuilder() {
|
||||
RegionStoreClient.RegionStoreClientBuilder res = clientBuilder;
|
||||
if (res == null) {
|
||||
synchronized (this) {
|
||||
if (clientBuilder == null) {
|
||||
clientBuilder =
|
||||
new RegionStoreClient.RegionStoreClientBuilder(
|
||||
conf, this.channelFactory, this.getRegionManager(), this.getPDClient());
|
||||
}
|
||||
res = clientBuilder;
|
||||
}
|
||||
}
|
||||
return res;
|
||||
}
|
||||
|
||||
public TiConfiguration getConf() {
|
||||
return conf;
|
||||
}
|
||||
|
||||
public TiTimestamp getTimestamp() {
|
||||
return getPDClient().getTimestamp(ConcreteBackOffer.newTsoBackOff());
|
||||
}
|
||||
|
||||
public Snapshot createSnapshot() {
|
||||
return new Snapshot(getTimestamp(), this.conf);
|
||||
}
|
||||
|
||||
public Snapshot createSnapshot(TiTimestamp ts) {
|
||||
return new Snapshot(ts, conf);
|
||||
}
|
||||
|
||||
@VisibleForTesting
|
||||
public PDClient getPDClient() {
|
||||
return pdClient;
|
||||
PDClient res = client;
|
||||
if (res == null) {
|
||||
synchronized (this) {
|
||||
if (client == null) {
|
||||
client = PDClient.createRaw(this.getConf(), channelFactory);
|
||||
}
|
||||
res = client;
|
||||
}
|
||||
}
|
||||
return res;
|
||||
}
|
||||
|
||||
public Catalog getCatalog() {
|
||||
Catalog res = catalog;
|
||||
if (res == null) {
|
||||
synchronized (this) {
|
||||
if (catalog == null) {
|
||||
catalog = new Catalog(this::createSnapshot, conf.ifShowRowId(), conf.getDBPrefix());
|
||||
}
|
||||
res = catalog;
|
||||
}
|
||||
}
|
||||
return res;
|
||||
}
|
||||
|
||||
public synchronized RegionManager getRegionManager() {
|
||||
RegionManager res = regionManager;
|
||||
if (res == null) {
|
||||
synchronized (this) {
|
||||
if (regionManager == null) {
|
||||
regionManager = new RegionManager(getPDClient(), this.cacheInvalidateCallback);
|
||||
}
|
||||
res = regionManager;
|
||||
}
|
||||
}
|
||||
return res;
|
||||
}
|
||||
|
||||
public ExecutorService getThreadPoolForIndexScan() {
|
||||
ExecutorService res = indexScanThreadPool;
|
||||
if (res == null) {
|
||||
synchronized (this) {
|
||||
if (indexScanThreadPool == null) {
|
||||
indexScanThreadPool =
|
||||
Executors.newFixedThreadPool(
|
||||
conf.getIndexScanConcurrency(),
|
||||
new ThreadFactoryBuilder()
|
||||
.setNameFormat("index-scan-pool-%d")
|
||||
.setDaemon(true)
|
||||
.build());
|
||||
}
|
||||
res = indexScanThreadPool;
|
||||
}
|
||||
}
|
||||
return res;
|
||||
}
|
||||
|
||||
public ExecutorService getThreadPoolForTableScan() {
|
||||
ExecutorService res = tableScanThreadPool;
|
||||
if (res == null) {
|
||||
synchronized (this) {
|
||||
if (tableScanThreadPool == null) {
|
||||
tableScanThreadPool =
|
||||
Executors.newFixedThreadPool(
|
||||
conf.getTableScanConcurrency(),
|
||||
new ThreadFactoryBuilder().setDaemon(true).build());
|
||||
}
|
||||
res = tableScanThreadPool;
|
||||
}
|
||||
}
|
||||
return res;
|
||||
}
|
||||
|
||||
@VisibleForTesting
|
||||
|
|
@ -77,9 +216,137 @@ public class TiSession implements AutoCloseable {
|
|||
return channelFactory;
|
||||
}
|
||||
|
||||
/**
|
||||
* This is used for setting call back function to invalidate cache information
|
||||
*
|
||||
* @param callBackFunc callback function
|
||||
*/
|
||||
public void injectCallBackFunc(Function<CacheInvalidateEvent, Void> callBackFunc) {
|
||||
this.cacheInvalidateCallback = callBackFunc;
|
||||
}
|
||||
|
||||
/**
|
||||
* split region and scatter
|
||||
*
|
||||
* @param splitKeys
|
||||
*/
|
||||
public void splitRegionAndScatter(
|
||||
List<byte[]> splitKeys,
|
||||
int splitRegionBackoffMS,
|
||||
int scatterRegionBackoffMS,
|
||||
int scatterWaitMS) {
|
||||
logger.info(String.format("split key's size is %d", splitKeys.size()));
|
||||
long startMS = System.currentTimeMillis();
|
||||
|
||||
// split region
|
||||
List<TiRegion> newRegions =
|
||||
splitRegion(
|
||||
splitKeys
|
||||
.stream()
|
||||
.map(k -> Key.toRawKey(k).next().toByteString())
|
||||
.collect(Collectors.toList()),
|
||||
ConcreteBackOffer.newCustomBackOff(splitRegionBackoffMS));
|
||||
|
||||
// scatter region
|
||||
for (TiRegion newRegion : newRegions) {
|
||||
try {
|
||||
getPDClient()
|
||||
.scatterRegion(newRegion, ConcreteBackOffer.newCustomBackOff(scatterRegionBackoffMS));
|
||||
} catch (Exception e) {
|
||||
logger.warn(String.format("failed to scatter region: %d", newRegion.getId()), e);
|
||||
}
|
||||
}
|
||||
|
||||
// wait scatter region finish
|
||||
if (scatterWaitMS > 0) {
|
||||
logger.info("start to wait scatter region finish");
|
||||
long scatterRegionStartMS = System.currentTimeMillis();
|
||||
for (TiRegion newRegion : newRegions) {
|
||||
long remainMS = (scatterRegionStartMS + scatterWaitMS) - System.currentTimeMillis();
|
||||
if (remainMS <= 0) {
|
||||
logger.warn("wait scatter region timeout");
|
||||
return;
|
||||
}
|
||||
getPDClient()
|
||||
.waitScatterRegionFinish(newRegion, ConcreteBackOffer.newCustomBackOff((int) remainMS));
|
||||
}
|
||||
} else {
|
||||
logger.info("skip to wait scatter region finish");
|
||||
}
|
||||
|
||||
long endMS = System.currentTimeMillis();
|
||||
logger.info("splitRegionAndScatter cost {} seconds", (endMS - startMS) / 1000);
|
||||
}
|
||||
|
||||
private List<TiRegion> splitRegion(List<ByteString> splitKeys, BackOffer backOffer) {
|
||||
List<TiRegion> regions = new ArrayList<>();
|
||||
|
||||
Map<TiRegion, List<ByteString>> groupKeys = groupKeysByRegion(splitKeys);
|
||||
for (Map.Entry<TiRegion, List<ByteString>> entry : groupKeys.entrySet()) {
|
||||
|
||||
Pair<TiRegion, Metapb.Store> pair =
|
||||
getRegionManager().getRegionStorePairByKey(entry.getKey().getStartKey());
|
||||
TiRegion region = pair.first;
|
||||
Metapb.Store store = pair.second;
|
||||
List<ByteString> splits =
|
||||
entry
|
||||
.getValue()
|
||||
.stream()
|
||||
.filter(k -> !k.equals(region.getStartKey()) && !k.equals(region.getEndKey()))
|
||||
.collect(Collectors.toList());
|
||||
|
||||
if (splits.isEmpty()) {
|
||||
logger.warn(
|
||||
"split key equal to region start key or end key. Region splitting is not needed.");
|
||||
} else {
|
||||
logger.info("start to split region id={}, split size={}", region.getId(), splits.size());
|
||||
List<TiRegion> newRegions;
|
||||
try {
|
||||
newRegions = getRegionStoreClientBuilder().build(region, store).splitRegion(splits);
|
||||
} catch (final TiKVException e) {
|
||||
// retry
|
||||
logger.warn("ReSplitting ranges for splitRegion", e);
|
||||
clientBuilder.getRegionManager().invalidateRegion(region.getId());
|
||||
backOffer.doBackOff(BackOffFunction.BackOffFuncType.BoRegionMiss, e);
|
||||
newRegions = splitRegion(splits, backOffer);
|
||||
}
|
||||
logger.info("region id={}, new region size={}", region.getId(), newRegions.size());
|
||||
regions.addAll(newRegions);
|
||||
}
|
||||
}
|
||||
|
||||
logger.info("splitRegion: return region size={}", regions.size());
|
||||
return regions;
|
||||
}
|
||||
|
||||
private Map<TiRegion, List<ByteString>> groupKeysByRegion(List<ByteString> keys) {
|
||||
return keys.stream()
|
||||
.collect(Collectors.groupingBy(clientBuilder.getRegionManager()::getRegionByKey));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() {
|
||||
pdClient.close();
|
||||
channelFactory.close();
|
||||
public synchronized void close() throws Exception {
|
||||
if (isClosed) {
|
||||
logger.warn("this TiSession is already closed!");
|
||||
return;
|
||||
}
|
||||
|
||||
isClosed = true;
|
||||
synchronized (sessionCachedMap) {
|
||||
sessionCachedMap.remove(conf.getPdAddrsString());
|
||||
}
|
||||
|
||||
if (tableScanThreadPool != null) {
|
||||
tableScanThreadPool.shutdownNow();
|
||||
}
|
||||
if (indexScanThreadPool != null) {
|
||||
indexScanThreadPool.shutdownNow();
|
||||
}
|
||||
if (client != null) {
|
||||
getPDClient().close();
|
||||
}
|
||||
if (catalog != null) {
|
||||
getCatalog().close();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -0,0 +1,265 @@
|
|||
/*
|
||||
* Copyright 2019 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.tikv.common.allocator;
|
||||
|
||||
import com.google.common.primitives.UnsignedLongs;
|
||||
import com.google.protobuf.ByteString;
|
||||
import java.io.Serializable;
|
||||
import java.util.Arrays;
|
||||
import java.util.function.Function;
|
||||
import org.tikv.common.Snapshot;
|
||||
import org.tikv.common.TiConfiguration;
|
||||
import org.tikv.common.TiSession;
|
||||
import org.tikv.common.codec.CodecDataInput;
|
||||
import org.tikv.common.codec.CodecDataOutput;
|
||||
import org.tikv.common.codec.MetaCodec;
|
||||
import org.tikv.common.exception.AllocateRowIDOverflowException;
|
||||
import org.tikv.common.exception.TiBatchWriteException;
|
||||
import org.tikv.common.meta.TiTableInfo;
|
||||
import org.tikv.common.util.BackOffer;
|
||||
import org.tikv.common.util.ConcreteBackOffer;
|
||||
import org.tikv.txn.TwoPhaseCommitter;
|
||||
|
||||
/**
|
||||
* RowIDAllocator read current start from TiKV and write back 'start+step' back to TiKV. It designs
|
||||
* to allocate all id for data to be written at once, hence it does not need run inside a txn.
|
||||
*
|
||||
* <p>(start, end] is allocated
|
||||
*/
|
||||
public final class RowIDAllocator implements Serializable {
|
||||
private final long maxShardRowIDBits;
|
||||
private final long dbId;
|
||||
private final TiConfiguration conf;
|
||||
private final long step;
|
||||
private long end;
|
||||
|
||||
private RowIDAllocator(long maxShardRowIDBits, long dbId, long step, TiConfiguration conf) {
|
||||
this.maxShardRowIDBits = maxShardRowIDBits;
|
||||
this.dbId = dbId;
|
||||
this.step = step;
|
||||
this.conf = conf;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param index should >= 1
|
||||
* @return
|
||||
*/
|
||||
public long getShardRowId(long index) {
|
||||
return getShardRowId(maxShardRowIDBits, index, index + getStart());
|
||||
}
|
||||
|
||||
static long getShardRowId(long maxShardRowIDBits, long partitionIndex, long rowID) {
|
||||
if (maxShardRowIDBits <= 0 || maxShardRowIDBits >= 16) {
|
||||
return rowID;
|
||||
}
|
||||
|
||||
// assert rowID < Math.pow(2, 64 - maxShardRowIDBits)
|
||||
|
||||
long partition = partitionIndex & ((1L << maxShardRowIDBits) - 1);
|
||||
return rowID | (partition << (64 - maxShardRowIDBits - 1));
|
||||
}
|
||||
|
||||
public static RowIDAllocator create(
|
||||
long dbId, TiTableInfo table, TiConfiguration conf, boolean unsigned, long step) {
|
||||
RowIDAllocator allocator = new RowIDAllocator(table.getMaxShardRowIDBits(), dbId, step, conf);
|
||||
if (unsigned) {
|
||||
allocator.initUnsigned(
|
||||
TiSession.getInstance(conf).createSnapshot(),
|
||||
table.getId(),
|
||||
table.getMaxShardRowIDBits());
|
||||
} else {
|
||||
allocator.initSigned(
|
||||
TiSession.getInstance(conf).createSnapshot(),
|
||||
table.getId(),
|
||||
table.getMaxShardRowIDBits());
|
||||
}
|
||||
|
||||
return allocator;
|
||||
}
|
||||
|
||||
public long getStart() {
|
||||
return end - step;
|
||||
}
|
||||
|
||||
public long getEnd() {
|
||||
return end;
|
||||
}
|
||||
|
||||
// set key value pair to tikv via two phase committer protocol.
|
||||
private void set(ByteString key, byte[] value) {
|
||||
TiSession session = TiSession.getInstance(conf);
|
||||
TwoPhaseCommitter twoPhaseCommitter =
|
||||
new TwoPhaseCommitter(conf, session.getTimestamp().getVersion());
|
||||
|
||||
twoPhaseCommitter.prewritePrimaryKey(
|
||||
ConcreteBackOffer.newCustomBackOff(BackOffer.PREWRITE_MAX_BACKOFF),
|
||||
key.toByteArray(),
|
||||
value);
|
||||
|
||||
twoPhaseCommitter.commitPrimaryKey(
|
||||
ConcreteBackOffer.newCustomBackOff(BackOffer.BATCH_COMMIT_BACKOFF),
|
||||
key.toByteArray(),
|
||||
session.getTimestamp().getVersion());
|
||||
|
||||
try {
|
||||
twoPhaseCommitter.close();
|
||||
} catch (Throwable ignored) {
|
||||
}
|
||||
}
|
||||
|
||||
private void updateMeta(ByteString key, byte[] oldVal, Snapshot snapshot) {
|
||||
// 1. encode hash meta key
|
||||
// 2. load meta via hash meta key from TiKV
|
||||
// 3. update meta's filed count and set it back to TiKV
|
||||
CodecDataOutput cdo = new CodecDataOutput();
|
||||
ByteString metaKey = MetaCodec.encodeHashMetaKey(cdo, key.toByteArray());
|
||||
long fieldCount;
|
||||
ByteString metaVal = snapshot.get(metaKey);
|
||||
|
||||
// decode long from bytes
|
||||
// big endian the 8 bytes
|
||||
fieldCount = new CodecDataInput(metaVal.toByteArray()).readLong();
|
||||
|
||||
// update meta field count only oldVal is null
|
||||
if (oldVal == null || oldVal.length == 0) {
|
||||
fieldCount++;
|
||||
cdo.reset();
|
||||
cdo.writeLong(fieldCount);
|
||||
|
||||
set(metaKey, cdo.toBytes());
|
||||
}
|
||||
}
|
||||
|
||||
private long updateHash(
|
||||
ByteString key,
|
||||
ByteString field,
|
||||
Function<byte[], byte[]> calculateNewVal,
|
||||
Snapshot snapshot) {
|
||||
// 1. encode hash data key
|
||||
// 2. get value in byte from get operation
|
||||
// 3. calculate new value via calculateNewVal
|
||||
// 4. check old value equals to new value or not
|
||||
// 5. set the new value back to TiKV via 2pc
|
||||
// 6. encode a hash meta key
|
||||
// 7. update a hash meta field count if needed
|
||||
|
||||
CodecDataOutput cdo = new CodecDataOutput();
|
||||
MetaCodec.encodeHashDataKey(cdo, key.toByteArray(), field.toByteArray());
|
||||
ByteString dataKey = cdo.toByteString();
|
||||
byte[] oldVal = snapshot.get(dataKey.toByteArray());
|
||||
|
||||
byte[] newVal = calculateNewVal.apply(oldVal);
|
||||
if (Arrays.equals(newVal, oldVal)) {
|
||||
// not need to update
|
||||
return 0L;
|
||||
}
|
||||
|
||||
set(dataKey, newVal);
|
||||
updateMeta(key, oldVal, snapshot);
|
||||
return Long.parseLong(new String(newVal));
|
||||
}
|
||||
|
||||
private static boolean isDBExisted(long dbId, Snapshot snapshot) {
|
||||
ByteString dbKey = MetaCodec.encodeDatabaseID(dbId);
|
||||
ByteString json = MetaCodec.hashGet(MetaCodec.KEY_DBs, dbKey, snapshot);
|
||||
return json != null && !json.isEmpty();
|
||||
}
|
||||
|
||||
private static boolean isTableExisted(long dbId, long tableId, Snapshot snapshot) {
|
||||
ByteString dbKey = MetaCodec.encodeDatabaseID(dbId);
|
||||
ByteString tableKey = MetaCodec.tableKey(tableId);
|
||||
return !MetaCodec.hashGet(dbKey, tableKey, snapshot).isEmpty();
|
||||
}
|
||||
|
||||
public static boolean shardRowBitsOverflow(
|
||||
long base, long step, long shardRowBits, boolean reservedSignBit) {
|
||||
long signBit = reservedSignBit ? 1 : 0;
|
||||
long mask = ((1L << shardRowBits) - 1) << (64 - shardRowBits - signBit);
|
||||
if (reservedSignBit) {
|
||||
return ((base + step) & mask) > 0;
|
||||
} else {
|
||||
return Long.compareUnsigned((base + step) & mask, 0) > 0;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* read current row id from TiKV and write the calculated value back to TiKV. The calculation rule
|
||||
* is start(read from TiKV) + step.
|
||||
*/
|
||||
public long udpateAllocateId(
|
||||
long dbId, long tableId, long step, Snapshot snapshot, long shard, boolean hasSignedBit) {
|
||||
if (isDBExisted(dbId, snapshot) && isTableExisted(dbId, tableId, snapshot)) {
|
||||
return updateHash(
|
||||
MetaCodec.encodeDatabaseID(dbId),
|
||||
MetaCodec.autoTableIDKey(tableId),
|
||||
(oldVal) -> {
|
||||
long base = 0;
|
||||
if (oldVal != null && oldVal.length != 0) {
|
||||
base = Long.parseLong(new String(oldVal));
|
||||
}
|
||||
if (shard >= 1 && shardRowBitsOverflow(base, step, shard, hasSignedBit)) {
|
||||
throw new AllocateRowIDOverflowException(base, step, shard);
|
||||
}
|
||||
base += step;
|
||||
return String.valueOf(base).getBytes();
|
||||
},
|
||||
snapshot);
|
||||
}
|
||||
|
||||
throw new IllegalArgumentException("table or database is not existed");
|
||||
}
|
||||
|
||||
/** read current row id from TiKV according to database id and table id. */
|
||||
public static long getAllocateId(long dbId, long tableId, Snapshot snapshot) {
|
||||
if (isDBExisted(dbId, snapshot) && isTableExisted(dbId, tableId, snapshot)) {
|
||||
ByteString dbKey = MetaCodec.encodeDatabaseID(dbId);
|
||||
ByteString tblKey = MetaCodec.autoTableIDKey(tableId);
|
||||
ByteString val = MetaCodec.hashGet(dbKey, tblKey, snapshot);
|
||||
if (val.isEmpty()) return 0L;
|
||||
return Long.parseLong(val.toStringUtf8());
|
||||
}
|
||||
|
||||
throw new IllegalArgumentException("table or database is not existed");
|
||||
}
|
||||
|
||||
private void initSigned(Snapshot snapshot, long tableId, long shard) {
|
||||
// get new start from TiKV, and calculate new end and set it back to TiKV.
|
||||
long newStart = getAllocateId(dbId, tableId, snapshot);
|
||||
long tmpStep = Math.min(Long.MAX_VALUE - newStart, step);
|
||||
if (tmpStep != step) {
|
||||
throw new TiBatchWriteException("cannot allocate ids for this write");
|
||||
}
|
||||
if (newStart == Long.MAX_VALUE) {
|
||||
throw new TiBatchWriteException("cannot allocate more ids since it ");
|
||||
}
|
||||
end = udpateAllocateId(dbId, tableId, tmpStep, snapshot, shard, true);
|
||||
}
|
||||
|
||||
private void initUnsigned(Snapshot snapshot, long tableId, long shard) {
|
||||
// get new start from TiKV, and calculate new end and set it back to TiKV.
|
||||
long newStart = getAllocateId(dbId, tableId, snapshot);
|
||||
// for unsigned long, -1L is max value.
|
||||
long tmpStep = UnsignedLongs.min(-1L - newStart, step);
|
||||
if (tmpStep != step) {
|
||||
throw new TiBatchWriteException("cannot allocate ids for this write");
|
||||
}
|
||||
// when compare unsigned long, the min value is largest value.
|
||||
if (UnsignedLongs.compare(newStart, -1L) == 0) {
|
||||
throw new TiBatchWriteException(
|
||||
"cannot allocate more ids since the start reaches " + "unsigned long's max value ");
|
||||
}
|
||||
end = udpateAllocateId(dbId, tableId, tmpStep, snapshot, shard, false);
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,205 @@
|
|||
/*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.tikv.common.catalog;
|
||||
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
import com.google.common.collect.ImmutableList;
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import java.util.*;
|
||||
import java.util.concurrent.ConcurrentHashMap;
|
||||
import java.util.function.Supplier;
|
||||
import java.util.stream.Collectors;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.tikv.common.Snapshot;
|
||||
import org.tikv.common.meta.TiDBInfo;
|
||||
import org.tikv.common.meta.TiTableInfo;
|
||||
|
||||
public class Catalog implements AutoCloseable {
|
||||
private final boolean showRowId;
|
||||
private final String dbPrefix;
|
||||
private final Logger logger = LoggerFactory.getLogger(this.getClass());
|
||||
private final Supplier<Snapshot> snapshotProvider;
|
||||
private CatalogCache metaCache;
|
||||
|
||||
public Catalog(Supplier<Snapshot> snapshotProvider, boolean showRowId, String dbPrefix) {
|
||||
this.snapshotProvider = Objects.requireNonNull(snapshotProvider, "Snapshot Provider is null");
|
||||
this.showRowId = showRowId;
|
||||
this.dbPrefix = dbPrefix;
|
||||
metaCache = new CatalogCache(new CatalogTransaction(snapshotProvider.get()), dbPrefix, false);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() {}
|
||||
|
||||
private synchronized void reloadCache(boolean loadTables) {
|
||||
Snapshot snapshot = snapshotProvider.get();
|
||||
CatalogTransaction newTrx = new CatalogTransaction(snapshot);
|
||||
long latestVersion = newTrx.getLatestSchemaVersion();
|
||||
if (latestVersion > metaCache.getVersion()) {
|
||||
metaCache = new CatalogCache(newTrx, dbPrefix, loadTables);
|
||||
}
|
||||
}
|
||||
|
||||
private void reloadCache() {
|
||||
reloadCache(false);
|
||||
}
|
||||
|
||||
public List<TiDBInfo> listDatabases() {
|
||||
reloadCache();
|
||||
return metaCache.listDatabases();
|
||||
}
|
||||
|
||||
public List<TiTableInfo> listTables(TiDBInfo database) {
|
||||
Objects.requireNonNull(database, "database is null");
|
||||
reloadCache(true);
|
||||
if (showRowId) {
|
||||
return metaCache
|
||||
.listTables(database)
|
||||
.stream()
|
||||
.map(TiTableInfo::copyTableWithRowId)
|
||||
.collect(Collectors.toList());
|
||||
} else {
|
||||
return metaCache.listTables(database);
|
||||
}
|
||||
}
|
||||
|
||||
public TiDBInfo getDatabase(String dbName) {
|
||||
Objects.requireNonNull(dbName, "dbName is null");
|
||||
reloadCache();
|
||||
return metaCache.getDatabase(dbName);
|
||||
}
|
||||
|
||||
public TiTableInfo getTable(String dbName, String tableName) {
|
||||
TiDBInfo database = getDatabase(dbName);
|
||||
if (database == null) {
|
||||
return null;
|
||||
}
|
||||
return getTable(database, tableName);
|
||||
}
|
||||
|
||||
public TiTableInfo getTable(TiDBInfo database, String tableName) {
|
||||
Objects.requireNonNull(database, "database is null");
|
||||
Objects.requireNonNull(tableName, "tableName is null");
|
||||
reloadCache(true);
|
||||
TiTableInfo table = metaCache.getTable(database, tableName);
|
||||
if (showRowId && table != null) {
|
||||
return table.copyTableWithRowId();
|
||||
} else {
|
||||
return table;
|
||||
}
|
||||
}
|
||||
|
||||
@VisibleForTesting
|
||||
public TiTableInfo getTable(TiDBInfo database, long tableId) {
|
||||
Objects.requireNonNull(database, "database is null");
|
||||
Collection<TiTableInfo> tables = listTables(database);
|
||||
for (TiTableInfo table : tables) {
|
||||
if (table.getId() == tableId) {
|
||||
if (showRowId) {
|
||||
return table.copyTableWithRowId();
|
||||
} else {
|
||||
return table;
|
||||
}
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
private static class CatalogCache {
|
||||
|
||||
private final Map<String, TiDBInfo> dbCache;
|
||||
private final ConcurrentHashMap<TiDBInfo, Map<String, TiTableInfo>> tableCache;
|
||||
private final String dbPrefix;
|
||||
private final CatalogTransaction transaction;
|
||||
private final long currentVersion;
|
||||
|
||||
private CatalogCache(CatalogTransaction transaction, String dbPrefix, boolean loadTables) {
|
||||
this.transaction = transaction;
|
||||
this.dbPrefix = dbPrefix;
|
||||
this.tableCache = new ConcurrentHashMap<>();
|
||||
this.dbCache = loadDatabases(loadTables);
|
||||
this.currentVersion = transaction.getLatestSchemaVersion();
|
||||
}
|
||||
|
||||
public CatalogTransaction getTransaction() {
|
||||
return transaction;
|
||||
}
|
||||
|
||||
public long getVersion() {
|
||||
return currentVersion;
|
||||
}
|
||||
|
||||
public TiDBInfo getDatabase(String name) {
|
||||
Objects.requireNonNull(name, "name is null");
|
||||
return dbCache.get(name.toLowerCase());
|
||||
}
|
||||
|
||||
public List<TiDBInfo> listDatabases() {
|
||||
return ImmutableList.copyOf(dbCache.values());
|
||||
}
|
||||
|
||||
public List<TiTableInfo> listTables(TiDBInfo db) {
|
||||
Map<String, TiTableInfo> tableMap = tableCache.get(db);
|
||||
if (tableMap == null) {
|
||||
tableMap = loadTables(db);
|
||||
}
|
||||
Collection<TiTableInfo> tables = tableMap.values();
|
||||
return tables
|
||||
.stream()
|
||||
.filter(tbl -> !tbl.isView() || !tbl.isSequence())
|
||||
.collect(Collectors.toList());
|
||||
}
|
||||
|
||||
public TiTableInfo getTable(TiDBInfo db, String tableName) {
|
||||
Map<String, TiTableInfo> tableMap = tableCache.get(db);
|
||||
if (tableMap == null) {
|
||||
tableMap = loadTables(db);
|
||||
}
|
||||
TiTableInfo tbl = tableMap.get(tableName.toLowerCase());
|
||||
// https://github.com/pingcap/tispark/issues/961
|
||||
// TODO: support reading from view table in the future.
|
||||
if (tbl != null && (tbl.isView() || tbl.isSequence())) return null;
|
||||
return tbl;
|
||||
}
|
||||
|
||||
private Map<String, TiTableInfo> loadTables(TiDBInfo db) {
|
||||
List<TiTableInfo> tables = transaction.getTables(db.getId());
|
||||
ImmutableMap.Builder<String, TiTableInfo> builder = ImmutableMap.builder();
|
||||
for (TiTableInfo table : tables) {
|
||||
builder.put(table.getName().toLowerCase(), table);
|
||||
}
|
||||
Map<String, TiTableInfo> tableMap = builder.build();
|
||||
tableCache.put(db, tableMap);
|
||||
return tableMap;
|
||||
}
|
||||
|
||||
private Map<String, TiDBInfo> loadDatabases(boolean loadTables) {
|
||||
HashMap<String, TiDBInfo> newDBCache = new HashMap<>();
|
||||
|
||||
List<TiDBInfo> databases = transaction.getDatabases();
|
||||
databases.forEach(
|
||||
db -> {
|
||||
TiDBInfo newDBInfo = db.rename(dbPrefix + db.getName());
|
||||
newDBCache.put(newDBInfo.getName().toLowerCase(), newDBInfo);
|
||||
if (loadTables) {
|
||||
loadTables(newDBInfo);
|
||||
}
|
||||
});
|
||||
return newDBCache;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,108 @@
|
|||
/*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.tikv.common.catalog;
|
||||
|
||||
import static org.tikv.common.codec.MetaCodec.KEY_DBs;
|
||||
|
||||
import com.fasterxml.jackson.core.JsonParseException;
|
||||
import com.fasterxml.jackson.databind.JsonMappingException;
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import com.google.common.collect.ImmutableList;
|
||||
import com.google.protobuf.ByteString;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.tikv.common.Snapshot;
|
||||
import org.tikv.common.codec.CodecDataInput;
|
||||
import org.tikv.common.codec.KeyUtils;
|
||||
import org.tikv.common.codec.MetaCodec;
|
||||
import org.tikv.common.exception.TiClientInternalException;
|
||||
import org.tikv.common.meta.TiDBInfo;
|
||||
import org.tikv.common.meta.TiTableInfo;
|
||||
import org.tikv.common.util.Pair;
|
||||
|
||||
public class CatalogTransaction {
|
||||
protected static final Logger logger = LoggerFactory.getLogger(CatalogTransaction.class);
|
||||
private final Snapshot snapshot;
|
||||
|
||||
CatalogTransaction(Snapshot snapshot) {
|
||||
this.snapshot = snapshot;
|
||||
}
|
||||
|
||||
public static <T> T parseFromJson(ByteString json, Class<T> cls) {
|
||||
Objects.requireNonNull(json, "json is null");
|
||||
Objects.requireNonNull(cls, "cls is null");
|
||||
|
||||
logger.debug(String.format("Parse Json %s : %s", cls.getSimpleName(), json.toStringUtf8()));
|
||||
ObjectMapper mapper = new ObjectMapper();
|
||||
try {
|
||||
return mapper.readValue(json.toStringUtf8(), cls);
|
||||
} catch (JsonParseException | JsonMappingException e) {
|
||||
String errMsg =
|
||||
String.format(
|
||||
"Invalid JSON value for Type %s: %s\n", cls.getSimpleName(), json.toStringUtf8());
|
||||
throw new TiClientInternalException(errMsg, e);
|
||||
} catch (Exception e1) {
|
||||
throw new TiClientInternalException("Error parsing Json", e1);
|
||||
}
|
||||
}
|
||||
|
||||
long getLatestSchemaVersion() {
|
||||
ByteString versionBytes = MetaCodec.bytesGet(MetaCodec.KEY_SCHEMA_VERSION, this.snapshot);
|
||||
CodecDataInput cdi = new CodecDataInput(versionBytes.toByteArray());
|
||||
return Long.parseLong(new String(cdi.toByteArray(), StandardCharsets.UTF_8));
|
||||
}
|
||||
|
||||
public List<TiDBInfo> getDatabases() {
|
||||
List<Pair<ByteString, ByteString>> fields =
|
||||
MetaCodec.hashGetFields(MetaCodec.KEY_DBs, this.snapshot);
|
||||
ImmutableList.Builder<TiDBInfo> builder = ImmutableList.builder();
|
||||
for (Pair<ByteString, ByteString> pair : fields) {
|
||||
builder.add(parseFromJson(pair.second, TiDBInfo.class));
|
||||
}
|
||||
return builder.build();
|
||||
}
|
||||
|
||||
TiDBInfo getDatabase(long id) {
|
||||
ByteString dbKey = MetaCodec.encodeDatabaseID(id);
|
||||
ByteString json = MetaCodec.hashGet(KEY_DBs, dbKey, this.snapshot);
|
||||
if (json == null || json.isEmpty()) {
|
||||
return null;
|
||||
}
|
||||
return parseFromJson(json, TiDBInfo.class);
|
||||
}
|
||||
|
||||
List<TiTableInfo> getTables(long dbId) {
|
||||
ByteString dbKey = MetaCodec.encodeDatabaseID(dbId);
|
||||
List<Pair<ByteString, ByteString>> fields = MetaCodec.hashGetFields(dbKey, this.snapshot);
|
||||
ImmutableList.Builder<TiTableInfo> builder = ImmutableList.builder();
|
||||
for (Pair<ByteString, ByteString> pair : fields) {
|
||||
if (KeyUtils.hasPrefix(pair.first, ByteString.copyFromUtf8(MetaCodec.KEY_TABLE))) {
|
||||
try {
|
||||
TiTableInfo tableInfo = parseFromJson(pair.second, TiTableInfo.class);
|
||||
if (!tableInfo.isSequence()) {
|
||||
builder.add(tableInfo);
|
||||
}
|
||||
} catch (TiClientInternalException e) {
|
||||
logger.warn("fail to parse table from json!", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
return builder.build();
|
||||
}
|
||||
}
|
||||
|
|
@ -20,9 +20,18 @@ import static com.google.common.base.Preconditions.checkArgument;
|
|||
import gnu.trove.list.array.TIntArrayList;
|
||||
import java.math.BigDecimal;
|
||||
import java.sql.Date;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import org.joda.time.*;
|
||||
import java.util.List;
|
||||
import org.joda.time.DateTime;
|
||||
import org.joda.time.DateTimeZone;
|
||||
import org.joda.time.IllegalInstantException;
|
||||
import org.joda.time.LocalDate;
|
||||
import org.joda.time.LocalDateTime;
|
||||
import org.tikv.common.ExtendedDateTime;
|
||||
import org.tikv.common.exception.ConvertOverflowException;
|
||||
import org.tikv.common.exception.InvalidCodecFormatException;
|
||||
import org.tikv.common.exception.TypeException;
|
||||
|
||||
public class Codec {
|
||||
|
||||
|
|
@ -38,20 +47,21 @@ public class Codec {
|
|||
public static final int UVARINT_FLAG = 9;
|
||||
public static final int JSON_FLAG = 10;
|
||||
public static final int MAX_FLAG = 250;
|
||||
public static final long SIGN_MASK = ~Long.MAX_VALUE;
|
||||
|
||||
public static boolean isNullFlag(int flag) {
|
||||
return flag == NULL_FLAG;
|
||||
}
|
||||
|
||||
public static class IntegerCodec {
|
||||
private static final long SIGN_MASK = ~Long.MAX_VALUE;
|
||||
|
||||
private static long flipSignBit(long v) {
|
||||
return v ^ SIGN_MASK;
|
||||
}
|
||||
|
||||
/**
|
||||
* Encoding a long value to byte buffer with type flag at the beginning
|
||||
* Encoding a long value to byte buffer with type flag at the beginning If we are encoding a
|
||||
* key, the comparable is must true; otherwise the comparable is false.
|
||||
*
|
||||
* @param cdo For outputting data in bytes array
|
||||
* @param lVal The data to encode
|
||||
|
|
@ -86,6 +96,17 @@ public class Codec {
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Encode Data as duration, the same as go's binary.PutUvarint
|
||||
*
|
||||
* @param cdo For outputting data in bytes array
|
||||
* @param value The data to encode
|
||||
*/
|
||||
public static void writeDuration(CodecDataOutput cdo, long value) {
|
||||
cdo.writeByte(DURATION_FLAG);
|
||||
writeLong(cdo, value);
|
||||
}
|
||||
|
||||
/**
|
||||
* Encode long value without type flag at the beginning The signed bit is flipped for memory
|
||||
* comparable purpose
|
||||
|
|
@ -128,7 +149,8 @@ public class Codec {
|
|||
* @param value The data to encode
|
||||
*/
|
||||
static void writeUVarLong(CodecDataOutput cdo, long value) {
|
||||
while ((value - 0x80) >= 0) {
|
||||
// value is assumed to be an unsigned value.
|
||||
while (Long.compareUnsigned(value, 0x80) >= 0) {
|
||||
cdo.writeByte((byte) value | 0x80);
|
||||
value >>>= 7;
|
||||
}
|
||||
|
|
@ -145,10 +167,6 @@ public class Codec {
|
|||
return flipSignBit(cdi.readLong());
|
||||
}
|
||||
|
||||
public static long readPartialLong(CodecDataInput cdi) {
|
||||
return flipSignBit(cdi.readPartialLong());
|
||||
}
|
||||
|
||||
/**
|
||||
* Decode as unsigned long without any binary manipulation
|
||||
*
|
||||
|
|
@ -185,7 +203,7 @@ public class Codec {
|
|||
int s = 0;
|
||||
for (int i = 0; !cdi.eof(); i++) {
|
||||
long b = cdi.readUnsignedByte();
|
||||
if ((b - 0x80) < 0) {
|
||||
if (Long.compareUnsigned(b, 0x80) < 0) {
|
||||
if (i > 9 || i == 9 && b > 1) {
|
||||
throw new InvalidCodecFormatException("readUVarLong overflow");
|
||||
}
|
||||
|
|
@ -321,9 +339,6 @@ public class Codec {
|
|||
}
|
||||
|
||||
public static class RealCodec {
|
||||
|
||||
private static final long signMask = 0x8000000000000000L;
|
||||
|
||||
/**
|
||||
* Decode as float
|
||||
*
|
||||
|
|
@ -343,7 +358,7 @@ public class Codec {
|
|||
private static long encodeDoubleToCmpLong(double val) {
|
||||
long u = Double.doubleToRawLongBits(val);
|
||||
if (val >= 0) {
|
||||
u |= signMask;
|
||||
u |= SIGN_MASK;
|
||||
} else {
|
||||
u = ~u;
|
||||
}
|
||||
|
|
@ -395,7 +410,7 @@ public class Codec {
|
|||
int binSize = dec.fromBin(precision, frac, data.toArray());
|
||||
cdi.mark(curPos + binSize);
|
||||
cdi.reset();
|
||||
return dec.toDecimal();
|
||||
return dec.toBigDecimal();
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
@ -404,7 +419,20 @@ public class Codec {
|
|||
* @param cdo cdo is destination data.
|
||||
* @param dec is decimal value that will be written into cdo.
|
||||
*/
|
||||
static void writeDecimal(CodecDataOutput cdo, MyDecimal dec) {
|
||||
public static void writeDecimal(
|
||||
CodecDataOutput cdo, MyDecimal dec, int precision, int fraction) {
|
||||
int[] data = dec.toBin(precision, fraction);
|
||||
cdo.writeByte(precision);
|
||||
cdo.writeByte(fraction);
|
||||
for (int aData : data) {
|
||||
cdo.writeByte(aData & 0xFF);
|
||||
}
|
||||
}
|
||||
|
||||
// TODO remove this once we refactor unit test CodecTest
|
||||
public static void writeDecimal(CodecDataOutput cdo, BigDecimal val) {
|
||||
MyDecimal dec = new MyDecimal();
|
||||
dec.fromString(val.toPlainString());
|
||||
int[] data = dec.toBin(dec.precision(), dec.frac());
|
||||
cdo.writeByte(dec.precision());
|
||||
cdo.writeByte(dec.frac());
|
||||
|
|
@ -413,34 +441,23 @@ public class Codec {
|
|||
}
|
||||
}
|
||||
|
||||
public static void writeDecimalFully(CodecDataOutput cdo, BigDecimal val) {
|
||||
public static void writeDecimalFully(
|
||||
CodecDataOutput cdo, MyDecimal val, int precision, int fraction) {
|
||||
cdo.writeByte(DECIMAL_FLAG);
|
||||
writeDecimal(cdo, val);
|
||||
}
|
||||
|
||||
/**
|
||||
* Encoding a double value to byte buffer
|
||||
*
|
||||
* @param cdo For outputting data in bytes array
|
||||
* @param val The data to encode
|
||||
*/
|
||||
public static void writeDecimal(CodecDataOutput cdo, BigDecimal val) {
|
||||
MyDecimal dec = new MyDecimal();
|
||||
dec.fromString(val.toPlainString());
|
||||
writeDecimal(cdo, dec);
|
||||
writeDecimal(cdo, val, precision, fraction);
|
||||
}
|
||||
}
|
||||
|
||||
public static class DateTimeCodec {
|
||||
|
||||
/**
|
||||
* Encode a DateTime to a packed long converting to specific timezone
|
||||
*
|
||||
* @param dateTime dateTime that need to be encoded.
|
||||
* @param extendedDateTime dateTime with nanos that need to be encoded.
|
||||
* @param tz timezone used for converting to localDateTime
|
||||
* @return a packed long.
|
||||
*/
|
||||
static long toPackedLong(DateTime dateTime, DateTimeZone tz) {
|
||||
public static long toPackedLong(ExtendedDateTime extendedDateTime, DateTimeZone tz) {
|
||||
DateTime dateTime = extendedDateTime.getDateTime();
|
||||
LocalDateTime localDateTime = dateTime.withZone(tz).toLocalDateTime();
|
||||
return toPackedLong(
|
||||
localDateTime.getYear(),
|
||||
|
|
@ -449,7 +466,7 @@ public class Codec {
|
|||
localDateTime.getHourOfDay(),
|
||||
localDateTime.getMinuteOfHour(),
|
||||
localDateTime.getSecondOfMinute(),
|
||||
localDateTime.getMillisOfSecond() * 1000);
|
||||
extendedDateTime.getMicrosOfSeconds());
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
@ -474,7 +491,7 @@ public class Codec {
|
|||
* @param tz timezone to interpret datetime parts
|
||||
* @return decoded DateTime using provided timezone
|
||||
*/
|
||||
static DateTime fromPackedLong(long packed, DateTimeZone tz) {
|
||||
public static ExtendedDateTime fromPackedLong(long packed, DateTimeZone tz) {
|
||||
// TODO: As for JDBC behavior, it can be configured to "round" or "toNull"
|
||||
// for now we didn't pass in session so we do a toNull behavior
|
||||
if (packed == 0) {
|
||||
|
|
@ -493,14 +510,29 @@ public class Codec {
|
|||
int hour = hms >> 12;
|
||||
int microsec = (int) (packed % (1 << 24));
|
||||
|
||||
return createExtendedDateTime(tz, year, month, day, hour, minute, second, microsec);
|
||||
}
|
||||
|
||||
public static ExtendedDateTime createExtendedDateTime(
|
||||
DateTimeZone tz,
|
||||
int year,
|
||||
int month,
|
||||
int day,
|
||||
int hour,
|
||||
int minute,
|
||||
int second,
|
||||
int microsec) {
|
||||
try {
|
||||
return new DateTime(year, month, day, hour, minute, second, microsec / 1000, tz);
|
||||
DateTime dateTime =
|
||||
new DateTime(year, month, day, hour, minute, second, microsec / 1000, tz);
|
||||
return new ExtendedDateTime(dateTime, microsec % 1000);
|
||||
} catch (IllegalInstantException e) {
|
||||
LocalDateTime localDateTime =
|
||||
new LocalDateTime(year, month, day, hour, minute, second, microsec / 1000);
|
||||
DateTime dt = localDateTime.toLocalDate().toDateTimeAtStartOfDay(tz);
|
||||
long millis = dt.getMillis() + localDateTime.toLocalTime().getMillisOfDay();
|
||||
return new DateTime(millis, tz);
|
||||
DateTime dateTime = new DateTime(millis, tz);
|
||||
return new ExtendedDateTime(dateTime, microsec % 1000);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -509,11 +541,12 @@ public class Codec {
|
|||
* should be done beforehand
|
||||
*
|
||||
* @param cdo encoding output
|
||||
* @param dateTime value to encode
|
||||
* @param extendeddateTime value to encode
|
||||
* @param tz timezone used to converting local time
|
||||
*/
|
||||
public static void writeDateTimeFully(CodecDataOutput cdo, DateTime dateTime, DateTimeZone tz) {
|
||||
long val = DateTimeCodec.toPackedLong(dateTime, tz);
|
||||
public static void writeDateTimeFully(
|
||||
CodecDataOutput cdo, ExtendedDateTime extendeddateTime, DateTimeZone tz) {
|
||||
long val = DateTimeCodec.toPackedLong(extendeddateTime, tz);
|
||||
IntegerCodec.writeULongFully(cdo, val, true);
|
||||
}
|
||||
|
||||
|
|
@ -522,11 +555,12 @@ public class Codec {
|
|||
* should be done beforehand The encoded value has no data type flag
|
||||
*
|
||||
* @param cdo encoding output
|
||||
* @param dateTime value to encode
|
||||
* @param extendedDateTime value to encode
|
||||
* @param tz timezone used to converting local time
|
||||
*/
|
||||
public static void writeDateTimeProto(CodecDataOutput cdo, DateTime dateTime, DateTimeZone tz) {
|
||||
long val = DateTimeCodec.toPackedLong(dateTime, tz);
|
||||
public static void writeDateTimeProto(
|
||||
CodecDataOutput cdo, ExtendedDateTime extendedDateTime, DateTimeZone tz) {
|
||||
long val = DateTimeCodec.toPackedLong(extendedDateTime, tz);
|
||||
IntegerCodec.writeULong(cdo, val);
|
||||
}
|
||||
|
||||
|
|
@ -537,9 +571,9 @@ public class Codec {
|
|||
* @see DateTimeCodec#fromPackedLong(long, DateTimeZone)
|
||||
* @param cdi codec buffer input
|
||||
* @param tz timezone to interpret datetime parts
|
||||
* @return decoded DateTime using provided timezone
|
||||
* @return decoded ExtendedDateTime using provided timezone
|
||||
*/
|
||||
public static DateTime readFromUVarInt(CodecDataInput cdi, DateTimeZone tz) {
|
||||
public static ExtendedDateTime readFromUVarInt(CodecDataInput cdi, DateTimeZone tz) {
|
||||
return DateTimeCodec.fromPackedLong(IntegerCodec.readUVarLong(cdi), tz);
|
||||
}
|
||||
|
||||
|
|
@ -549,9 +583,9 @@ public class Codec {
|
|||
* @see DateTimeCodec#fromPackedLong(long, DateTimeZone)
|
||||
* @param cdi codec buffer input
|
||||
* @param tz timezone to interpret datetime parts
|
||||
* @return decoded DateTime using provided timezone
|
||||
* @return decoded ExtendedDateTime using provided timezone
|
||||
*/
|
||||
public static DateTime readFromUInt(CodecDataInput cdi, DateTimeZone tz) {
|
||||
public static ExtendedDateTime readFromUInt(CodecDataInput cdi, DateTimeZone tz) {
|
||||
return DateTimeCodec.fromPackedLong(IntegerCodec.readULong(cdi), tz);
|
||||
}
|
||||
}
|
||||
|
|
@ -575,8 +609,7 @@ public class Codec {
|
|||
}
|
||||
|
||||
static long toPackedLong(LocalDate date) {
|
||||
return Codec.DateCodec.toPackedLong(
|
||||
date.getYear(), date.getMonthOfYear(), date.getDayOfMonth());
|
||||
return DateCodec.toPackedLong(date.getYear(), date.getMonthOfYear(), date.getDayOfMonth());
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
@ -653,4 +686,83 @@ public class Codec {
|
|||
return DateCodec.fromPackedLong(IntegerCodec.readULong(cdi));
|
||||
}
|
||||
}
|
||||
|
||||
public static class EnumCodec {
|
||||
|
||||
public static Integer parseEnumName(String name, List<String> elems)
|
||||
throws ConvertOverflowException {
|
||||
int i = 0;
|
||||
while (i < elems.size()) {
|
||||
if (elems.get(i).equals(name)) {
|
||||
return i + 1;
|
||||
}
|
||||
i = i + 1;
|
||||
}
|
||||
|
||||
// name doesn't exist, maybe an integer?
|
||||
int result;
|
||||
try {
|
||||
result = Integer.parseInt(name);
|
||||
} catch (Exception e) {
|
||||
throw ConvertOverflowException.newEnumException(name);
|
||||
}
|
||||
return parseEnumValue(result, elems);
|
||||
}
|
||||
|
||||
public static Integer parseEnumValue(Integer number, List<String> elems)
|
||||
throws ConvertOverflowException {
|
||||
if (number == 0) {
|
||||
throw ConvertOverflowException.newLowerBoundException(number, 0);
|
||||
}
|
||||
|
||||
if (number > elems.size()) {
|
||||
throw ConvertOverflowException.newUpperBoundException(number, elems.size());
|
||||
}
|
||||
|
||||
return number;
|
||||
}
|
||||
|
||||
public static String readEnumFromIndex(int idx, List<String> elems) {
|
||||
if (idx < 0 || idx >= elems.size()) throw new TypeException("Index is out of range");
|
||||
return elems.get(idx);
|
||||
}
|
||||
}
|
||||
|
||||
public static class SetCodec {
|
||||
private static final long[] SET_INDEX_VALUE = initSetIndexVal();
|
||||
private static final long[] SET_INDEX_INVERT_VALUE = initSetIndexInvertVal();
|
||||
|
||||
private static long[] initSetIndexInvertVal() {
|
||||
long[] tmpArr = new long[64];
|
||||
for (int i = 0; i < 64; i++) {
|
||||
// complement of original value.
|
||||
tmpArr[i] = ~SET_INDEX_VALUE[i];
|
||||
}
|
||||
return tmpArr;
|
||||
}
|
||||
|
||||
private static long[] initSetIndexVal() {
|
||||
long[] tmpArr = new long[64];
|
||||
for (int i = 0; i < 64; i++) {
|
||||
tmpArr[i] = 1L << i;
|
||||
}
|
||||
return tmpArr;
|
||||
}
|
||||
|
||||
public static String readSetFromLong(long number, List<String> elems) {
|
||||
List<String> items = new ArrayList<>();
|
||||
int length = elems.size();
|
||||
for (int i = 0; i < length; i++) {
|
||||
long checker = number & SET_INDEX_VALUE[i];
|
||||
if (checker != 0) {
|
||||
items.add(elems.get(i));
|
||||
number &= SET_INDEX_INVERT_VALUE[i];
|
||||
}
|
||||
}
|
||||
if (number != 0) {
|
||||
throw new TypeException(String.format("invalid number %d for Set %s", number, elems));
|
||||
}
|
||||
return String.join(",", items);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -17,91 +17,12 @@ package org.tikv.common.codec;
|
|||
|
||||
import com.google.protobuf.ByteString;
|
||||
import java.io.*;
|
||||
import javax.annotation.Nonnull;
|
||||
|
||||
public class CodecDataInput implements DataInput {
|
||||
/**
|
||||
* An copy of ByteArrayInputStream without synchronization for faster decode.
|
||||
*
|
||||
* @see ByteArrayInputStream
|
||||
*/
|
||||
private class UnSyncByteArrayInputStream extends InputStream {
|
||||
protected byte buf[];
|
||||
protected int pos;
|
||||
protected int mark = 0;
|
||||
protected int count;
|
||||
|
||||
UnSyncByteArrayInputStream(byte buf[]) {
|
||||
this.buf = buf;
|
||||
this.pos = 0;
|
||||
this.count = buf.length;
|
||||
}
|
||||
|
||||
public UnSyncByteArrayInputStream(byte buf[], int offset, int length) {
|
||||
this.buf = buf;
|
||||
this.pos = offset;
|
||||
this.count = Math.min(offset + length, buf.length);
|
||||
this.mark = offset;
|
||||
}
|
||||
|
||||
public int read() {
|
||||
return (pos < count) ? (buf[pos++] & 0xff) : -1;
|
||||
}
|
||||
|
||||
public int read(byte b[], int off, int len) {
|
||||
if (b == null) {
|
||||
throw new NullPointerException();
|
||||
} else if (off < 0 || len < 0 || len > b.length - off) {
|
||||
throw new IndexOutOfBoundsException();
|
||||
}
|
||||
|
||||
if (pos >= count) {
|
||||
return -1;
|
||||
}
|
||||
|
||||
int avail = count - pos;
|
||||
if (len > avail) {
|
||||
len = avail;
|
||||
}
|
||||
if (len <= 0) {
|
||||
return 0;
|
||||
}
|
||||
System.arraycopy(buf, pos, b, off, len);
|
||||
pos += len;
|
||||
return len;
|
||||
}
|
||||
|
||||
public long skip(long n) {
|
||||
long k = count - pos;
|
||||
if (n < k) {
|
||||
k = n < 0 ? 0 : n;
|
||||
}
|
||||
|
||||
pos += k;
|
||||
return k;
|
||||
}
|
||||
|
||||
public int available() {
|
||||
return count - pos;
|
||||
}
|
||||
|
||||
public boolean markSupported() {
|
||||
return true;
|
||||
}
|
||||
|
||||
public void mark(int readAheadLimit) {
|
||||
mark = pos;
|
||||
}
|
||||
|
||||
public void reset() {
|
||||
pos = mark;
|
||||
}
|
||||
|
||||
public void close() throws IOException {}
|
||||
}
|
||||
|
||||
private final DataInputStream inputStream;
|
||||
private final UnSyncByteArrayInputStream backingStream;
|
||||
private final byte[] backingBuffer;
|
||||
protected final DataInputStream inputStream;
|
||||
protected final UnSyncByteArrayInputStream backingStream;
|
||||
protected final byte[] backingBuffer;
|
||||
|
||||
public CodecDataInput(ByteString data) {
|
||||
this(data.toByteArray());
|
||||
|
|
@ -123,7 +44,7 @@ public class CodecDataInput implements DataInput {
|
|||
}
|
||||
|
||||
@Override
|
||||
public void readFully(byte[] b) {
|
||||
public void readFully(@Nonnull byte[] b) {
|
||||
try {
|
||||
inputStream.readFully(b);
|
||||
} catch (Exception e) {
|
||||
|
|
@ -132,7 +53,7 @@ public class CodecDataInput implements DataInput {
|
|||
}
|
||||
|
||||
@Override
|
||||
public void readFully(byte[] b, int off, int len) {
|
||||
public void readFully(@Nonnull byte[] b, int off, int len) {
|
||||
try {
|
||||
inputStream.readFully(b, off, len);
|
||||
} catch (Exception e) {
|
||||
|
|
@ -196,7 +117,7 @@ public class CodecDataInput implements DataInput {
|
|||
|
||||
public int readPartialUnsignedShort() {
|
||||
try {
|
||||
byte readBuffer[] = new byte[2];
|
||||
byte[] readBuffer = new byte[2];
|
||||
inputStream.read(readBuffer, 0, 2);
|
||||
return ((readBuffer[0] & 0xff) << 8) + ((readBuffer[1] & 0xff) << 0);
|
||||
} catch (IOException e) {
|
||||
|
|
@ -231,23 +152,6 @@ public class CodecDataInput implements DataInput {
|
|||
}
|
||||
}
|
||||
|
||||
public final long readPartialLong() {
|
||||
try {
|
||||
byte readBuffer[] = new byte[8];
|
||||
inputStream.read(readBuffer, 0, 8);
|
||||
return (((long) readBuffer[0] << 56)
|
||||
+ ((long) (readBuffer[1] & 255) << 48)
|
||||
+ ((long) (readBuffer[2] & 255) << 40)
|
||||
+ ((long) (readBuffer[3] & 255) << 32)
|
||||
+ ((long) (readBuffer[4] & 255) << 24)
|
||||
+ ((readBuffer[5] & 255) << 16)
|
||||
+ ((readBuffer[6] & 255) << 8)
|
||||
+ ((readBuffer[7] & 255) << 0));
|
||||
} catch (Exception e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public float readFloat() {
|
||||
try {
|
||||
|
|
@ -276,6 +180,7 @@ public class CodecDataInput implements DataInput {
|
|||
}
|
||||
|
||||
@Override
|
||||
@Nonnull
|
||||
public String readUTF() {
|
||||
try {
|
||||
return inputStream.readUTF();
|
||||
|
|
@ -318,4 +223,85 @@ public class CodecDataInput implements DataInput {
|
|||
public byte[] toByteArray() {
|
||||
return backingBuffer;
|
||||
}
|
||||
|
||||
/**
|
||||
* An copy of ByteArrayInputStream without synchronization for faster decode.
|
||||
*
|
||||
* @see ByteArrayInputStream
|
||||
*/
|
||||
private static class UnSyncByteArrayInputStream extends InputStream {
|
||||
protected byte[] buf;
|
||||
protected int pos;
|
||||
protected int mark = 0;
|
||||
protected int count;
|
||||
|
||||
UnSyncByteArrayInputStream(byte[] buf) {
|
||||
this.buf = buf;
|
||||
this.pos = 0;
|
||||
this.count = buf.length;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int read() {
|
||||
return (pos < count) ? (buf[pos++] & 0xff) : -1;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int read(byte[] b, int off, int len) {
|
||||
if (b == null) {
|
||||
throw new NullPointerException();
|
||||
} else if (off < 0 || len < 0 || len > b.length - off) {
|
||||
throw new IndexOutOfBoundsException();
|
||||
}
|
||||
|
||||
if (pos >= count) {
|
||||
return -1;
|
||||
}
|
||||
|
||||
int avail = count - pos;
|
||||
if (len > avail) {
|
||||
len = avail;
|
||||
}
|
||||
if (len <= 0) {
|
||||
return 0;
|
||||
}
|
||||
System.arraycopy(buf, pos, b, off, len);
|
||||
pos += len;
|
||||
return len;
|
||||
}
|
||||
|
||||
@Override
|
||||
public long skip(long n) {
|
||||
long k = count - pos;
|
||||
if (n < k) {
|
||||
k = n < 0 ? 0 : n;
|
||||
}
|
||||
|
||||
pos += k;
|
||||
return k;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int available() {
|
||||
return count - pos;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean markSupported() {
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void mark(int readAheadLimit) {
|
||||
mark = pos;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void reset() {
|
||||
pos = mark;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() throws IOException {}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -0,0 +1,128 @@
|
|||
/*
|
||||
* Copyright 2020 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.tikv.common.codec;
|
||||
|
||||
import com.google.protobuf.ByteString;
|
||||
import javax.annotation.Nonnull;
|
||||
import org.tikv.common.exception.CodecException;
|
||||
|
||||
public class CodecDataInputLittleEndian extends CodecDataInput {
|
||||
|
||||
public CodecDataInputLittleEndian(ByteString data) {
|
||||
super(data);
|
||||
}
|
||||
|
||||
public CodecDataInputLittleEndian(byte[] buf) {
|
||||
super(buf);
|
||||
}
|
||||
|
||||
@Override
|
||||
public short readShort() {
|
||||
int ch1 = readUnsignedByte();
|
||||
int ch2 = readUnsignedByte();
|
||||
return (short) ((ch1) + (ch2 << 8));
|
||||
}
|
||||
|
||||
@Override
|
||||
public int readUnsignedShort() {
|
||||
int ch1 = readUnsignedByte();
|
||||
int ch2 = readUnsignedByte();
|
||||
return (ch1) + (ch2 << 8);
|
||||
}
|
||||
|
||||
@Override
|
||||
public char readChar() {
|
||||
int ch1 = readUnsignedByte();
|
||||
int ch2 = readUnsignedByte();
|
||||
return (char) ((ch1) + (ch2 << 8));
|
||||
}
|
||||
|
||||
@Override
|
||||
public int readInt() {
|
||||
int ch1 = readUnsignedByte();
|
||||
int ch2 = readUnsignedByte();
|
||||
int ch3 = readUnsignedByte();
|
||||
int ch4 = readUnsignedByte();
|
||||
return ((ch1) + (ch2 << 8) + (ch3 << 16) + (ch4 << 24));
|
||||
}
|
||||
|
||||
@Override
|
||||
public long readLong() {
|
||||
byte[] readBuffer = new byte[8];
|
||||
readFully(readBuffer, 0, 8);
|
||||
return ((readBuffer[0] & 0xff)
|
||||
+ ((readBuffer[1] & 0xff) << 8)
|
||||
+ ((readBuffer[2] & 0xff) << 16)
|
||||
+ ((long) (readBuffer[3] & 0xff) << 24)
|
||||
+ ((long) (readBuffer[4] & 0xff) << 32)
|
||||
+ ((long) (readBuffer[5] & 0xff) << 40)
|
||||
+ ((long) (readBuffer[6] & 0xff) << 48)
|
||||
+ ((long) (readBuffer[7] & 0xff) << 56));
|
||||
}
|
||||
|
||||
@Override
|
||||
public float readFloat() {
|
||||
return Float.intBitsToFloat(readInt());
|
||||
}
|
||||
|
||||
@Override
|
||||
public double readDouble() {
|
||||
return Double.longBitsToDouble(readLong());
|
||||
}
|
||||
|
||||
@Override
|
||||
public String readLine() {
|
||||
throw new CodecException("unimplemented");
|
||||
}
|
||||
|
||||
@Override
|
||||
@Nonnull
|
||||
public String readUTF() {
|
||||
throw new CodecException("unimplemented");
|
||||
}
|
||||
|
||||
public int peekByte() {
|
||||
return super.peekByte();
|
||||
}
|
||||
|
||||
public int currentPos() {
|
||||
return super.currentPos();
|
||||
}
|
||||
|
||||
public void mark(int givenPos) {
|
||||
super.mark(givenPos);
|
||||
}
|
||||
|
||||
public void reset() {
|
||||
super.reset();
|
||||
}
|
||||
|
||||
public boolean eof() {
|
||||
return super.eof();
|
||||
}
|
||||
|
||||
public int size() {
|
||||
return super.size();
|
||||
}
|
||||
|
||||
public int available() {
|
||||
return super.available();
|
||||
}
|
||||
|
||||
public byte[] toByteArray() {
|
||||
return super.toByteArray();
|
||||
}
|
||||
}
|
||||
|
|
@ -22,9 +22,9 @@ import java.io.DataOutputStream;
|
|||
|
||||
// A trivial implementation supposed to be replaced
|
||||
public class CodecDataOutput implements DataOutput {
|
||||
private DataOutputStream s;
|
||||
protected final DataOutputStream s;
|
||||
// TODO: Switch to ByteBuffer if possible, or a chain of ByteBuffer
|
||||
private ByteArrayOutputStream byteArray;
|
||||
protected final ByteArrayOutputStream byteArray;
|
||||
|
||||
public CodecDataOutput() {
|
||||
byteArray = new ByteArrayOutputStream();
|
||||
|
|
@ -46,7 +46,7 @@ public class CodecDataOutput implements DataOutput {
|
|||
}
|
||||
|
||||
@Override
|
||||
public void write(byte b[]) {
|
||||
public void write(byte[] b) {
|
||||
try {
|
||||
s.write(b);
|
||||
} catch (Exception e) {
|
||||
|
|
@ -170,6 +170,10 @@ public class CodecDataOutput implements DataOutput {
|
|||
return ByteString.copyFrom(byteArray.toByteArray());
|
||||
}
|
||||
|
||||
public int size() {
|
||||
return this.byteArray.size();
|
||||
}
|
||||
|
||||
public void reset() {
|
||||
this.byteArray.reset();
|
||||
}
|
||||
|
|
|
|||
|
|
@ -0,0 +1,71 @@
|
|||
/*
|
||||
* Copyright 2020 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.tikv.common.codec;
|
||||
|
||||
import com.google.common.primitives.Longs;
|
||||
|
||||
public class CodecDataOutputLittleEndian extends CodecDataOutput {
|
||||
public CodecDataOutputLittleEndian() {
|
||||
super();
|
||||
}
|
||||
|
||||
public CodecDataOutputLittleEndian(int size) {
|
||||
super(size);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeShort(int v) {
|
||||
try {
|
||||
s.write(v & 0xff);
|
||||
s.write((v >> 8) & 0xff);
|
||||
} catch (Exception e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeChar(int v) {
|
||||
writeShort(v);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeInt(int v) {
|
||||
try {
|
||||
s.write(v & 0xff);
|
||||
s.write((v >> 8) & 0xff);
|
||||
s.write((v >> 16) & 0xff);
|
||||
s.write((v >> 24) & 0xff);
|
||||
} catch (Exception e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeLong(long v) {
|
||||
byte[] bytes = Longs.toByteArray(Long.reverseBytes(v));
|
||||
write(bytes, 0, bytes.length);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeFloat(float v) {
|
||||
writeInt(Float.floatToIntBits(v));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeDouble(double v) {
|
||||
writeLong(Double.doubleToLongBits(v));
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,125 @@
|
|||
/*
|
||||
* Copyright 2020 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.tikv.common.codec;
|
||||
|
||||
import static com.google.common.base.Preconditions.checkArgument;
|
||||
|
||||
import com.google.protobuf.ByteString;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import org.tikv.common.Snapshot;
|
||||
import org.tikv.common.codec.Codec.BytesCodec;
|
||||
import org.tikv.common.codec.Codec.IntegerCodec;
|
||||
import org.tikv.common.exception.TiClientInternalException;
|
||||
import org.tikv.common.util.Pair;
|
||||
import org.tikv.kvproto.Kvrpcpb.KvPair;
|
||||
|
||||
public class MetaCodec {
|
||||
public static final String ENCODED_DB_PREFIX = "DB";
|
||||
public static final String KEY_TID = "TID";
|
||||
private static final byte[] META_PREFIX = new byte[] {'m'};
|
||||
private static final byte HASH_DATA_FLAG = 'h';
|
||||
private static final byte HASH_META_FLAG = 'H';
|
||||
private static final byte STR_DATA_FLAG = 's';
|
||||
public static ByteString KEY_DBs = ByteString.copyFromUtf8("DBs");
|
||||
public static String KEY_TABLE = "Table";
|
||||
public static ByteString KEY_SCHEMA_VERSION = ByteString.copyFromUtf8("SchemaVersionKey");
|
||||
|
||||
public static void encodeStringDataKey(CodecDataOutput cdo, byte[] key) {
|
||||
cdo.write(META_PREFIX);
|
||||
BytesCodec.writeBytes(cdo, key);
|
||||
IntegerCodec.writeULong(cdo, STR_DATA_FLAG);
|
||||
}
|
||||
|
||||
public static void encodeHashDataKey(CodecDataOutput cdo, byte[] key, byte[] field) {
|
||||
cdo.write(META_PREFIX);
|
||||
BytesCodec.writeBytes(cdo, key);
|
||||
IntegerCodec.writeULong(cdo, HASH_DATA_FLAG);
|
||||
BytesCodec.writeBytes(cdo, field);
|
||||
}
|
||||
|
||||
public static ByteString encodeHashMetaKey(CodecDataOutput cdo, byte[] key) {
|
||||
cdo.write(META_PREFIX);
|
||||
BytesCodec.writeBytes(cdo, key);
|
||||
IntegerCodec.writeULong(cdo, HASH_META_FLAG);
|
||||
return cdo.toByteString();
|
||||
}
|
||||
|
||||
public static void encodeHashDataKeyPrefix(CodecDataOutput cdo, byte[] key) {
|
||||
cdo.write(META_PREFIX);
|
||||
BytesCodec.writeBytes(cdo, key);
|
||||
IntegerCodec.writeULong(cdo, HASH_DATA_FLAG);
|
||||
}
|
||||
|
||||
public static Pair<ByteString, ByteString> decodeHashDataKey(ByteString rawKey) {
|
||||
checkArgument(
|
||||
KeyUtils.hasPrefix(rawKey, ByteString.copyFrom(META_PREFIX)),
|
||||
"invalid encoded hash data key prefix: " + new String(META_PREFIX));
|
||||
CodecDataInput cdi = new CodecDataInput(rawKey.toByteArray());
|
||||
cdi.skipBytes(META_PREFIX.length);
|
||||
byte[] key = BytesCodec.readBytes(cdi);
|
||||
long typeFlag = IntegerCodec.readULong(cdi);
|
||||
if (typeFlag != HASH_DATA_FLAG) {
|
||||
throw new TiClientInternalException("Invalid hash data flag: " + typeFlag);
|
||||
}
|
||||
byte[] field = BytesCodec.readBytes(cdi);
|
||||
return Pair.create(ByteString.copyFrom(key), ByteString.copyFrom(field));
|
||||
}
|
||||
|
||||
public static ByteString autoTableIDKey(long tableId) {
|
||||
return ByteString.copyFrom(String.format("%s:%d", KEY_TID, tableId).getBytes());
|
||||
}
|
||||
|
||||
public static ByteString tableKey(long tableId) {
|
||||
return ByteString.copyFrom(String.format("%s:%d", KEY_TABLE, tableId).getBytes());
|
||||
}
|
||||
|
||||
public static ByteString encodeDatabaseID(long id) {
|
||||
return ByteString.copyFrom(String.format("%s:%d", ENCODED_DB_PREFIX, id).getBytes());
|
||||
}
|
||||
|
||||
public static ByteString hashGet(ByteString key, ByteString field, Snapshot snapshot) {
|
||||
CodecDataOutput cdo = new CodecDataOutput();
|
||||
MetaCodec.encodeHashDataKey(cdo, key.toByteArray(), field.toByteArray());
|
||||
return snapshot.get(cdo.toByteString());
|
||||
}
|
||||
|
||||
public static ByteString bytesGet(ByteString key, Snapshot snapshot) {
|
||||
CodecDataOutput cdo = new CodecDataOutput();
|
||||
MetaCodec.encodeStringDataKey(cdo, key.toByteArray());
|
||||
return snapshot.get(cdo.toByteString());
|
||||
}
|
||||
|
||||
public static List<Pair<ByteString, ByteString>> hashGetFields(
|
||||
ByteString key, Snapshot snapshot) {
|
||||
CodecDataOutput cdo = new CodecDataOutput();
|
||||
MetaCodec.encodeHashDataKeyPrefix(cdo, key.toByteArray());
|
||||
ByteString encodedKey = cdo.toByteString();
|
||||
|
||||
Iterator<KvPair> iterator = snapshot.scanPrefix(encodedKey);
|
||||
List<Pair<ByteString, ByteString>> fields = new ArrayList<>();
|
||||
while (iterator.hasNext()) {
|
||||
KvPair kv = iterator.next();
|
||||
if (kv == null || kv.getKey() == null) {
|
||||
continue;
|
||||
}
|
||||
fields.add(Pair.create(MetaCodec.decodeHashDataKey(kv.getKey()).second, kv.getValue()));
|
||||
}
|
||||
|
||||
return fields;
|
||||
}
|
||||
}
|
||||
|
|
@ -16,12 +16,14 @@
|
|||
package org.tikv.common.codec;
|
||||
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
import java.io.Serializable;
|
||||
import java.math.BigDecimal;
|
||||
import java.math.BigInteger;
|
||||
import java.util.Arrays;
|
||||
|
||||
// TODO: We shouldn't allow empty MyDecimal
|
||||
// TODO: It seems MyDecimal to BigDecimal is very slow
|
||||
public class MyDecimal {
|
||||
public class MyDecimal implements Serializable {
|
||||
// how many digits that a word has
|
||||
private static final int digitsPerWord = 9;
|
||||
// MyDecimal can holds at most 9 words.
|
||||
|
|
@ -40,10 +42,43 @@ public class MyDecimal {
|
|||
private static final int ten9 = 1000000000;
|
||||
private static final int digMask = ten8;
|
||||
private static final int wordBase = ten9;
|
||||
private static final BigInteger wordBaseBigInt = BigInteger.valueOf(ten9);
|
||||
private static final int wordMax = wordBase - 1;
|
||||
private static final int[] div9 =
|
||||
new int[] {
|
||||
0, 0, 0, 0, 0, 0, 0, 0, 0,
|
||||
1, 1, 1, 1, 1, 1, 1, 1, 1,
|
||||
2, 2, 2, 2, 2, 2, 2, 2, 2,
|
||||
3, 3, 3, 3, 3, 3, 3, 3, 3,
|
||||
4, 4, 4, 4, 4, 4, 4, 4, 4,
|
||||
5, 5, 5, 5, 5, 5, 5, 5, 5,
|
||||
6, 6, 6, 6, 6, 6, 6, 6, 6,
|
||||
7, 7, 7, 7, 7, 7, 7, 7, 7,
|
||||
8, 8, 8, 8, 8, 8, 8, 8, 8,
|
||||
9, 9, 9, 9, 9, 9, 9, 9, 9,
|
||||
10, 10, 10, 10, 10, 10, 10, 10, 10,
|
||||
11, 11, 11, 11, 11, 11, 11, 11, 11,
|
||||
12, 12, 12, 12, 12, 12, 12, 12, 12,
|
||||
13, 13, 13, 13, 13, 13, 13, 13, 13,
|
||||
14, 14,
|
||||
};
|
||||
private static final int[] powers10 =
|
||||
new int[] {ten0, ten1, ten2, ten3, ten4, ten5, ten6, ten7, ten8, ten9};
|
||||
|
||||
private static final BigInteger[] powers10BigInt =
|
||||
new BigInteger[] {
|
||||
BigInteger.valueOf(ten0),
|
||||
BigInteger.valueOf(ten1),
|
||||
BigInteger.valueOf(ten2),
|
||||
BigInteger.valueOf(ten3),
|
||||
BigInteger.valueOf(ten4),
|
||||
BigInteger.valueOf(ten5),
|
||||
BigInteger.valueOf(ten6),
|
||||
BigInteger.valueOf(ten7),
|
||||
BigInteger.valueOf(ten8),
|
||||
BigInteger.valueOf(ten9)
|
||||
};
|
||||
|
||||
// A MyDecimal holds 9 words.
|
||||
private static final int maxWordBufLen = 9;
|
||||
private static final int maxFraction = 30;
|
||||
|
|
@ -52,10 +87,50 @@ public class MyDecimal {
|
|||
// The following are fields of MyDecimal
|
||||
private int digitsInt;
|
||||
private int digitsFrac;
|
||||
private int resultFrac;
|
||||
private boolean negative;
|
||||
private int[] wordBuf = new int[maxWordBufLen];
|
||||
|
||||
public MyDecimal() {}
|
||||
|
||||
public MyDecimal(int digitsInt, int digitsFrac, boolean negative, int[] wordBuf) {
|
||||
this.digitsInt = digitsInt;
|
||||
this.digitsFrac = digitsFrac;
|
||||
this.negative = negative;
|
||||
this.wordBuf = wordBuf;
|
||||
}
|
||||
|
||||
/**
|
||||
* Reads a word from a array at given size.
|
||||
*
|
||||
* @param b b is source data of unsigned byte as int[]
|
||||
* @param size is word size which can be used in switch statement.
|
||||
* @param start start indicates the where start to read.
|
||||
*/
|
||||
@VisibleForTesting
|
||||
public static int readWord(int[] b, int size, int start) {
|
||||
int x = 0;
|
||||
switch (size) {
|
||||
case 1:
|
||||
x = (byte) b[start];
|
||||
break;
|
||||
case 2:
|
||||
x = (((byte) b[start]) << 8) + (b[start + 1] & 0xFF);
|
||||
break;
|
||||
case 3:
|
||||
int sign = b[start] & 128;
|
||||
if (sign > 0) {
|
||||
x = 0xFF << 24 | (b[start] << 16) | (b[start + 1] << 8) | (b[start + 2]);
|
||||
} else {
|
||||
x = b[start] << 16 | (b[start + 1] << 8) | b[start + 2];
|
||||
}
|
||||
break;
|
||||
case 4:
|
||||
x = b[start + 3] + (b[start + 2] << 8) + (b[start + 1] << 16) + (b[start] << 24);
|
||||
break;
|
||||
}
|
||||
return x;
|
||||
}
|
||||
|
||||
/*
|
||||
* Returns total precision of this decimal. Basically, it is sum of digitsInt and digitsFrac. But there
|
||||
* are some special cases need to be token care of such as 000.001.
|
||||
|
|
@ -104,16 +179,16 @@ public class MyDecimal {
|
|||
throw new IllegalArgumentException("Bad Float Number to parse");
|
||||
}
|
||||
|
||||
int digitsInt = precision - frac;
|
||||
int wordsInt = digitsInt / digitsPerWord;
|
||||
int leadingDigits = digitsInt - wordsInt * digitsPerWord;
|
||||
int wordsFrac = frac / digitsPerWord;
|
||||
int trailingDigits = frac - wordsFrac * digitsPerWord;
|
||||
int wordsIntTo = wordsInt;
|
||||
if (leadingDigits > 0) {
|
||||
int _digitsInt = precision - frac;
|
||||
int _wordsInt = _digitsInt / digitsPerWord;
|
||||
int _leadingDigits = _digitsInt - _wordsInt * digitsPerWord;
|
||||
int _wordsFrac = frac / digitsPerWord;
|
||||
int trailingDigits = frac - _wordsFrac * digitsPerWord;
|
||||
int wordsIntTo = _wordsInt;
|
||||
if (_leadingDigits > 0) {
|
||||
wordsIntTo++;
|
||||
}
|
||||
int wordsFracTo = wordsFrac;
|
||||
int wordsFracTo = _wordsFrac;
|
||||
if (trailingDigits > 0) {
|
||||
wordsFracTo++;
|
||||
}
|
||||
|
|
@ -139,41 +214,41 @@ public class MyDecimal {
|
|||
wordsFracTo = 0;
|
||||
overflow = true;
|
||||
} else {
|
||||
wordsIntTo = wordsInt;
|
||||
wordsFracTo = wordBufLen - wordsInt;
|
||||
wordsIntTo = _wordsInt;
|
||||
wordsFracTo = wordBufLen - _wordsInt;
|
||||
truncated = true;
|
||||
}
|
||||
}
|
||||
|
||||
if (overflow || truncated) {
|
||||
if (wordsIntTo < oldWordsIntTo) {
|
||||
binIdx += dig2bytes[leadingDigits] + (wordsInt - wordsIntTo) * wordSize;
|
||||
binIdx += dig2bytes[_leadingDigits] + (_wordsInt - wordsIntTo) * wordSize;
|
||||
} else {
|
||||
trailingDigits = 0;
|
||||
wordsFrac = wordsFracTo;
|
||||
_wordsFrac = wordsFracTo;
|
||||
}
|
||||
}
|
||||
|
||||
this.negative = mask != 0;
|
||||
this.digitsInt = (byte) (wordsInt * digitsPerWord + leadingDigits);
|
||||
this.digitsFrac = (byte) (wordsFrac * digitsPerWord + trailingDigits);
|
||||
this.digitsInt = (byte) (_wordsInt * digitsPerWord + _leadingDigits);
|
||||
this.digitsFrac = (byte) (_wordsFrac * digitsPerWord + trailingDigits);
|
||||
|
||||
int wordIdx = 0;
|
||||
if (leadingDigits > 0) {
|
||||
int i = dig2bytes[leadingDigits];
|
||||
if (_leadingDigits > 0) {
|
||||
int i = dig2bytes[_leadingDigits];
|
||||
int x = readWord(bin, i, binIdx);
|
||||
binIdx += i;
|
||||
this.wordBuf[wordIdx] = (x ^ mask) > 0 ? x ^ mask : (x ^ mask) & 0xFF;
|
||||
if (this.wordBuf[wordIdx] >= powers10[leadingDigits + 1]) {
|
||||
if (this.wordBuf[wordIdx] >= powers10[_leadingDigits + 1]) {
|
||||
throw new IllegalArgumentException("BadNumber");
|
||||
}
|
||||
if (this.wordBuf[wordIdx] != 0) {
|
||||
wordIdx++;
|
||||
} else {
|
||||
this.digitsInt -= leadingDigits;
|
||||
this.digitsInt -= _leadingDigits;
|
||||
}
|
||||
}
|
||||
for (int stop = binIdx + wordsInt * wordSize; binIdx < stop; binIdx += wordSize) {
|
||||
for (int stop = binIdx + _wordsInt * wordSize; binIdx < stop; binIdx += wordSize) {
|
||||
this.wordBuf[wordIdx] = (readWord(bin, 4, binIdx) ^ mask);
|
||||
if (this.wordBuf[wordIdx] > wordMax) {
|
||||
throw new IllegalArgumentException("BadNumber");
|
||||
|
|
@ -185,7 +260,7 @@ public class MyDecimal {
|
|||
}
|
||||
}
|
||||
|
||||
for (int stop = binIdx + wordsFrac * wordSize; binIdx < stop; binIdx += wordSize) {
|
||||
for (int stop = binIdx + _wordsFrac * wordSize; binIdx < stop; binIdx += wordSize) {
|
||||
int x = readWord(bin, 4, binIdx);
|
||||
this.wordBuf[wordIdx] = (x ^ mask) > 0 ? x ^ mask : (x ^ mask) & 0xFF;
|
||||
if (this.wordBuf[wordIdx] > wordMax) {
|
||||
|
|
@ -203,22 +278,11 @@ public class MyDecimal {
|
|||
if (this.wordBuf[wordIdx] > wordMax) {
|
||||
throw new IllegalArgumentException("BadNumber");
|
||||
}
|
||||
wordIdx++;
|
||||
}
|
||||
|
||||
this.resultFrac = frac;
|
||||
return binSize;
|
||||
}
|
||||
|
||||
/** Returns a double value from MyDecimal instance. */
|
||||
public BigDecimal toDecimal() {
|
||||
return new BigDecimal(toString());
|
||||
}
|
||||
|
||||
public double toDouble() {
|
||||
return Float.parseFloat(toString());
|
||||
}
|
||||
|
||||
/** Truncates any prefix zeros such as 00.001. After this, digitsInt is truncated from 2 to 0. */
|
||||
private int[] removeLeadingZeros() {
|
||||
int wordIdx = 0;
|
||||
|
|
@ -241,7 +305,7 @@ public class MyDecimal {
|
|||
}
|
||||
|
||||
/**
|
||||
* Counts the number of digits of prefix zeors. For 00.001, it reutrns two.
|
||||
* Counts the number of digits of prefix zeroes. For 00.001, it returns two.
|
||||
*
|
||||
* @param i i is index for getting powers10.
|
||||
* @param word word is a integer.
|
||||
|
|
@ -255,46 +319,12 @@ public class MyDecimal {
|
|||
return leading;
|
||||
}
|
||||
|
||||
private int min(int a, int b) {
|
||||
if (a > b) return b;
|
||||
else return a;
|
||||
}
|
||||
|
||||
/** Returns size of word for a give value with number of digits */
|
||||
private int digitsToWords(int digits) {
|
||||
return (digits + digitsPerWord - 1) / digitsPerWord;
|
||||
}
|
||||
|
||||
/**
|
||||
* Reads a word from a array at given size.
|
||||
*
|
||||
* @param b b is source data of unsigned byte as int[]
|
||||
* @param size is word size which can be used in switch statement.
|
||||
* @param start start indicates the where start to read.
|
||||
*/
|
||||
@VisibleForTesting
|
||||
public static int readWord(int[] b, int size, int start) {
|
||||
int x = 0;
|
||||
switch (size) {
|
||||
case 1:
|
||||
x = (byte) b[start];
|
||||
break;
|
||||
case 2:
|
||||
x = (((byte) b[start]) << 8) + (b[start + 1] & 0xFF);
|
||||
break;
|
||||
case 3:
|
||||
int sign = b[start] & 128;
|
||||
if (sign > 0) {
|
||||
x = 0xFF << 24 | (b[start] << 16) | (b[start + 1] << 8) | (b[start + 2]);
|
||||
} else {
|
||||
x = b[start] << 16 | (b[start + 1] << 8) | b[start + 2];
|
||||
}
|
||||
break;
|
||||
case 4:
|
||||
x = b[start + 3] + (b[start + 2] << 8) + (b[start + 1] << 16) + (b[start] << 24);
|
||||
break;
|
||||
if ((digits + digitsPerWord - 1) >= 0 && ((digits + digitsPerWord - 1) < 128)) {
|
||||
return div9[digits + digitsPerWord - 1];
|
||||
}
|
||||
return x;
|
||||
return (digits + digitsPerWord - 1) / digitsPerWord;
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
@ -339,8 +369,8 @@ public class MyDecimal {
|
|||
for (; strIdx < str.length && Character.isDigit(str[strIdx]); ) {
|
||||
strIdx++;
|
||||
}
|
||||
// we initialize strIdx in case of sign notation, here we need substract startIdx from strIdx
|
||||
// casue strIdx is used for counting the number of digits.
|
||||
// we initialize strIdx in case of sign notation, here we need subtract startIdx from strIdx
|
||||
// cause strIdx is used for counting the number of digits.
|
||||
int digitsInt = strIdx - startIdx;
|
||||
int digitsFrac;
|
||||
int endIdx;
|
||||
|
|
@ -353,7 +383,6 @@ public class MyDecimal {
|
|||
digitsFrac = endIdx - strIdx - 1;
|
||||
} else {
|
||||
digitsFrac = 0;
|
||||
endIdx = strIdx;
|
||||
}
|
||||
|
||||
if (digitsInt + digitsFrac == 0) {
|
||||
|
|
@ -438,47 +467,18 @@ public class MyDecimal {
|
|||
if (allZero) {
|
||||
this.negative = false;
|
||||
}
|
||||
|
||||
this.resultFrac = this.digitsFrac;
|
||||
}
|
||||
|
||||
// parser a string to a int.
|
||||
private int strToLong(String str) {
|
||||
str = str.trim();
|
||||
if (str.isEmpty()) {
|
||||
return 0;
|
||||
}
|
||||
boolean negative = false;
|
||||
int i = 0;
|
||||
if (str.charAt(i) == '-') {
|
||||
negative = true;
|
||||
i++;
|
||||
} else if (str.charAt(i) == '+') {
|
||||
i++;
|
||||
}
|
||||
|
||||
int r = 0;
|
||||
for (; i < str.length(); i++) {
|
||||
if (!Character.isDigit(str.charAt(i))) {
|
||||
break;
|
||||
}
|
||||
r = r * 10 + (str.charAt(i) - '0');
|
||||
}
|
||||
|
||||
if (negative) {
|
||||
r = -r;
|
||||
}
|
||||
return r;
|
||||
}
|
||||
|
||||
// Returns a decimal string.
|
||||
@Override
|
||||
public String toString() {
|
||||
char[] str;
|
||||
int digitsFrac = this.digitsFrac;
|
||||
|
||||
int _digitsFrac = this.digitsFrac;
|
||||
int[] res = removeLeadingZeros();
|
||||
int wordStartIdx = res[0];
|
||||
int digitsInt = res[1];
|
||||
if (digitsInt + digitsFrac == 0) {
|
||||
if (digitsInt + _digitsFrac == 0) {
|
||||
digitsInt = 1;
|
||||
wordStartIdx = 0;
|
||||
}
|
||||
|
|
@ -487,50 +487,38 @@ public class MyDecimal {
|
|||
if (digitsIntLen == 0) {
|
||||
digitsIntLen = 1;
|
||||
}
|
||||
int digitsFracLen = digitsFrac;
|
||||
int digitsFracLen = _digitsFrac;
|
||||
int length = digitsIntLen + digitsFracLen;
|
||||
if (this.negative) {
|
||||
length++;
|
||||
}
|
||||
if (digitsFrac > 0) {
|
||||
if (_digitsFrac > 0) {
|
||||
length++;
|
||||
}
|
||||
str = new char[length];
|
||||
|
||||
int strIdx = 0;
|
||||
if (this.negative) {
|
||||
str[strIdx] = '-';
|
||||
strIdx++;
|
||||
}
|
||||
int fill = 0;
|
||||
if (digitsFrac > 0) {
|
||||
|
||||
if (_digitsFrac > 0) {
|
||||
int fracIdx = strIdx + digitsIntLen;
|
||||
fill = digitsFracLen - digitsFrac;
|
||||
int wordIdx = wordStartIdx + digitsToWords(digitsInt);
|
||||
str[fracIdx] = '.';
|
||||
fracIdx++;
|
||||
for (; digitsFrac > 0; digitsFrac -= digitsPerWord) {
|
||||
for (; _digitsFrac > 0; _digitsFrac -= digitsPerWord) {
|
||||
int x = this.wordBuf[wordIdx];
|
||||
wordIdx++;
|
||||
for (int i = min(digitsFrac, digitsPerWord); i > 0; i--) {
|
||||
for (int i = Math.min(_digitsFrac, MyDecimal.digitsPerWord); i > 0; i--) {
|
||||
int y = x / digMask;
|
||||
str[fracIdx] = (char) (y + '0');
|
||||
str[fracIdx] = (char) ((char) y + '0');
|
||||
fracIdx++;
|
||||
x -= y * digMask;
|
||||
x *= 10;
|
||||
}
|
||||
}
|
||||
for (; fill > 0; fill--) {
|
||||
str[fracIdx] = '0';
|
||||
fracIdx++;
|
||||
}
|
||||
}
|
||||
fill = digitsIntLen - digitsInt;
|
||||
if (digitsInt == 0) {
|
||||
fill--; /* symbol 0 before digital point */
|
||||
}
|
||||
for (; fill > 0; fill--) {
|
||||
str[strIdx] = '0';
|
||||
strIdx++;
|
||||
}
|
||||
if (digitsInt > 0) {
|
||||
strIdx += digitsInt;
|
||||
|
|
@ -538,11 +526,11 @@ public class MyDecimal {
|
|||
for (; digitsInt > 0; digitsInt -= digitsPerWord) {
|
||||
wordIdx--;
|
||||
int x = this.wordBuf[wordIdx];
|
||||
for (int i = min(digitsInt, digitsPerWord); i > 0; i--) {
|
||||
int y = x / 10;
|
||||
for (int i = Math.min(digitsInt, MyDecimal.digitsPerWord); i > 0; i--) {
|
||||
int temp = x / 10;
|
||||
strIdx--;
|
||||
str[strIdx] = (char) ('0' + (x - y * 10));
|
||||
x = y;
|
||||
str[strIdx] = (char) ('0' + (x - temp * 10));
|
||||
x = temp;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
|
|
@ -552,52 +540,6 @@ public class MyDecimal {
|
|||
return new String(str);
|
||||
}
|
||||
|
||||
private int stringSize() {
|
||||
return digitsInt + digitsFrac + 3;
|
||||
}
|
||||
|
||||
public long toLong() {
|
||||
long x = 0;
|
||||
int wordIdx = 0;
|
||||
for (int i = this.digitsInt; i > 0; i -= digitsPerWord) {
|
||||
/*
|
||||
Attention: trick!
|
||||
we're calculating -|from| instead of |from| here
|
||||
because |LONGLONG_MIN| > LONGLONG_MAX
|
||||
so we can convert -9223372036854775808 correctly
|
||||
*/
|
||||
long y = x;
|
||||
x = x * wordBase - (long) this.wordBuf[wordIdx];
|
||||
wordIdx++;
|
||||
if (y < Long.MIN_VALUE / wordBase || x > y) {
|
||||
/*
|
||||
the decimal is bigger than any possible integer
|
||||
return border integer depending on the sign
|
||||
*/
|
||||
if (this.negative) {
|
||||
return Long.MIN_VALUE;
|
||||
}
|
||||
return Long.MAX_VALUE;
|
||||
}
|
||||
}
|
||||
|
||||
/* boundary case: 9223372036854775808 */
|
||||
if (!this.negative && x == Long.MIN_VALUE) {
|
||||
return Long.MAX_VALUE;
|
||||
}
|
||||
|
||||
if (!this.negative) {
|
||||
x = -x;
|
||||
}
|
||||
for (int i = this.digitsFrac; i > 0; i -= digitsPerWord) {
|
||||
if (this.wordBuf[wordIdx] != 0) {
|
||||
return x;
|
||||
}
|
||||
wordIdx++;
|
||||
}
|
||||
return x;
|
||||
}
|
||||
|
||||
// decimalBinSize returns the size of array to hold a binary representation of a decimal.
|
||||
private int decimalBinSize(int precision, int frac) {
|
||||
int digitsInt = precision - frac;
|
||||
|
|
@ -629,10 +571,10 @@ public class MyDecimal {
|
|||
*
|
||||
* <p>This binary format is as follows: 1. First the number is converted to have a requested
|
||||
* precision and frac. 2. Every full digitsPerWord digits of digitsInt part are stored in 4 bytes
|
||||
* as is 3. The first digitsInt % digitesPerWord digits are stored in the reduced number of bytes
|
||||
* as is 3. The first digitsInt % digitsPerWord digits are stored in the reduced number of bytes
|
||||
* (enough bytes to store this number of digits - see dig2bytes) 4. same for frac - full word are
|
||||
* stored as is, the last frac % digitsPerWord digits - in the reduced number of bytes. 5. If the
|
||||
* number is negative - every byte is inversed. 5. The very first bit of the resulting byte array
|
||||
* number is negative - every byte is inverted. 5. The very first bit of the resulting byte array
|
||||
* is inverted (because memcmp compares unsigned bytes, see property 2 above)
|
||||
*
|
||||
* <p>Example:
|
||||
|
|
@ -686,11 +628,11 @@ public class MyDecimal {
|
|||
mask = -1;
|
||||
}
|
||||
|
||||
int digitsInt = precision - frac;
|
||||
int wordsInt = digitsInt / digitsPerWord;
|
||||
int leadingDigits = digitsInt - wordsInt * digitsPerWord;
|
||||
int wordsFrac = frac / digitsPerWord;
|
||||
int trailingDigits = frac - wordsFrac * digitsPerWord;
|
||||
int digitsInt = precision - frac; // how many digits before dot
|
||||
int wordsInt = digitsInt / digitsPerWord; // how many words to stores int part before dot.
|
||||
int leadingDigits = digitsInt - wordsInt * digitsPerWord; // first digits
|
||||
int wordsFrac = frac / digitsPerWord; // how many words to store int part after dot
|
||||
int trailingDigits = frac - wordsFrac * digitsPerWord; // last digits
|
||||
|
||||
// this should be one of 0, 1, 2, 3, 4
|
||||
int wordsFracFrom = this.digitsFrac / digitsPerWord;
|
||||
|
|
@ -826,4 +768,59 @@ public class MyDecimal {
|
|||
this.digitsInt = 0;
|
||||
this.negative = false;
|
||||
}
|
||||
|
||||
private BigInteger toBigInteger() {
|
||||
BigInteger x = BigInteger.ZERO;
|
||||
int wordIdx = 0;
|
||||
for (int i = this.digitsInt; i > 0; i -= digitsPerWord) {
|
||||
x = x.multiply(wordBaseBigInt).add(BigInteger.valueOf(this.wordBuf[wordIdx]));
|
||||
wordIdx++;
|
||||
}
|
||||
|
||||
for (int i = this.digitsFrac; i > 0; i -= digitsPerWord) {
|
||||
x = x.multiply(wordBaseBigInt).add(BigInteger.valueOf(this.wordBuf[wordIdx]));
|
||||
wordIdx++;
|
||||
}
|
||||
|
||||
if (digitsFrac % digitsPerWord != 0) {
|
||||
x = x.divide(powers10BigInt[digitsPerWord - digitsFrac % digitsPerWord]);
|
||||
}
|
||||
if (negative) {
|
||||
x = x.negate();
|
||||
}
|
||||
return x;
|
||||
}
|
||||
|
||||
public long toLong() {
|
||||
long x = 0;
|
||||
int wordIdx = 0;
|
||||
for (int i = this.digitsInt; i > 0; i -= digitsPerWord) {
|
||||
x = x * wordBase + this.wordBuf[wordIdx];
|
||||
wordIdx++;
|
||||
}
|
||||
|
||||
for (int i = this.digitsFrac; i > 0; i -= digitsPerWord) {
|
||||
x = x * wordBase + this.wordBuf[wordIdx];
|
||||
wordIdx++;
|
||||
}
|
||||
|
||||
if (digitsFrac % digitsPerWord != 0) {
|
||||
x = x / powers10[digitsPerWord - digitsFrac % digitsPerWord];
|
||||
}
|
||||
|
||||
if (negative) {
|
||||
x = -x;
|
||||
}
|
||||
return x;
|
||||
}
|
||||
|
||||
public BigDecimal toBigDecimal() {
|
||||
// 19 is the length of digits of Long.MAX_VALUE
|
||||
// If a decimal can be expressed as a long value, we should use toLong method which has
|
||||
// better performance than toBigInteger.
|
||||
if (digitsInt + digitsFrac < 19) {
|
||||
return new BigDecimal(BigInteger.valueOf(toLong()), digitsFrac);
|
||||
}
|
||||
return new BigDecimal(toBigInteger(), digitsFrac);
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -0,0 +1,180 @@
|
|||
/*
|
||||
* Copyright 2020 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.tikv.common.codec;
|
||||
|
||||
import java.math.BigDecimal;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.sql.Date;
|
||||
import java.sql.Timestamp;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import org.joda.time.DateTimeZone;
|
||||
import org.tikv.common.ExtendedDateTime;
|
||||
import org.tikv.common.codec.Codec.DateTimeCodec;
|
||||
import org.tikv.common.codec.Codec.DecimalCodec;
|
||||
import org.tikv.common.codec.Codec.EnumCodec;
|
||||
import org.tikv.common.codec.Codec.SetCodec;
|
||||
import org.tikv.common.exception.CodecException;
|
||||
import org.tikv.common.types.Converter;
|
||||
import org.tikv.common.types.DataType;
|
||||
import org.tikv.common.util.JsonUtils;
|
||||
|
||||
public class RowDecoderV2 {
|
||||
|
||||
private static final long SIGN_MASK = 0x8000000000000000L;
|
||||
|
||||
public static Object decodeCol(byte[] colData, DataType tp) {
|
||||
switch (tp.getType()) {
|
||||
case TypeLonglong:
|
||||
case TypeLong:
|
||||
case TypeInt24:
|
||||
case TypeShort:
|
||||
case TypeTiny:
|
||||
// TODO: decode consider unsigned
|
||||
return decodeInt(colData);
|
||||
case TypeFloat:
|
||||
return decodeFloat(colData);
|
||||
case TypeDouble:
|
||||
return decodeDouble(colData);
|
||||
case TypeString:
|
||||
case TypeVarString:
|
||||
case TypeVarchar:
|
||||
return new String(colData, StandardCharsets.UTF_8);
|
||||
case TypeBlob:
|
||||
case TypeTinyBlob:
|
||||
case TypeMediumBlob:
|
||||
case TypeLongBlob:
|
||||
return colData;
|
||||
case TypeNewDecimal:
|
||||
return decodeDecimal(colData);
|
||||
case TypeBit:
|
||||
int byteSize = (int) ((tp.getLength() + 7) >>> 3);
|
||||
return decodeBit(decodeInt(colData), byteSize);
|
||||
case TypeDate:
|
||||
return new Date(decodeTimestamp(colData, Converter.getLocalTimezone()).getTime());
|
||||
case TypeDatetime:
|
||||
return decodeTimestamp(colData, Converter.getLocalTimezone());
|
||||
case TypeTimestamp:
|
||||
return decodeTimestamp(colData, DateTimeZone.UTC);
|
||||
case TypeDuration:
|
||||
case TypeYear:
|
||||
return decodeInt(colData);
|
||||
case TypeEnum:
|
||||
return decodeEnum(colData, tp.getElems());
|
||||
case TypeSet:
|
||||
return decodeSet(colData, tp.getElems());
|
||||
case TypeJSON:
|
||||
return decodeJson(colData);
|
||||
case TypeNull:
|
||||
return null;
|
||||
case TypeDecimal:
|
||||
case TypeGeometry:
|
||||
case TypeNewDate:
|
||||
throw new CodecException("type should not appear in colData");
|
||||
default:
|
||||
throw new CodecException("invalid data type " + tp.getType().name());
|
||||
}
|
||||
}
|
||||
|
||||
private static long decodeInt(byte[] val) {
|
||||
switch (val.length) {
|
||||
case 1:
|
||||
return val[0];
|
||||
case 2:
|
||||
return new CodecDataInputLittleEndian(val).readShort();
|
||||
case 4:
|
||||
return new CodecDataInputLittleEndian(val).readInt();
|
||||
default:
|
||||
return new CodecDataInputLittleEndian(val).readLong();
|
||||
}
|
||||
}
|
||||
|
||||
private static float decodeFloat(byte[] val) {
|
||||
return (float) decodeDouble(val);
|
||||
}
|
||||
|
||||
private static double decodeDouble(byte[] val) {
|
||||
CodecDataInput cdi = new CodecDataInput(val);
|
||||
if (val.length < 8) {
|
||||
throw new CodecException("insufficient bytes to decode value");
|
||||
}
|
||||
long u = cdi.readLong();
|
||||
// signMask is less than zero in int64.
|
||||
if ((u & SIGN_MASK) < 0) {
|
||||
u &= ~SIGN_MASK;
|
||||
} else {
|
||||
u = ~u;
|
||||
}
|
||||
return Double.longBitsToDouble(u);
|
||||
}
|
||||
|
||||
private static BigDecimal decodeDecimal(byte[] val) {
|
||||
return DecimalCodec.readDecimal(new CodecDataInputLittleEndian(val));
|
||||
}
|
||||
|
||||
private static byte[] trimLeadingZeroBytes(byte[] bytes) {
|
||||
if (bytes.length == 0) {
|
||||
return bytes;
|
||||
}
|
||||
int pos = 0, posMax = bytes.length - 1;
|
||||
for (; pos < posMax; pos++) {
|
||||
if (bytes[pos] != 0) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
return Arrays.copyOfRange(bytes, pos, bytes.length);
|
||||
}
|
||||
|
||||
private static byte[] decodeBit(long val, int byteSize) {
|
||||
if (byteSize != -1 && (byteSize < 1 || byteSize > 8)) {
|
||||
throw new CodecException("Invalid byteSize " + byteSize);
|
||||
}
|
||||
CodecDataOutput cdo = new CodecDataOutput();
|
||||
cdo.writeLong(val);
|
||||
if (byteSize != -1) {
|
||||
return trimLeadingZeroBytes(cdo.toBytes());
|
||||
} else {
|
||||
return Arrays.copyOfRange(cdo.toBytes(), 8 - byteSize, 8);
|
||||
}
|
||||
}
|
||||
|
||||
private static Timestamp decodeTimestamp(byte[] val, DateTimeZone tz) {
|
||||
ExtendedDateTime extendedDateTime =
|
||||
DateTimeCodec.fromPackedLong(new CodecDataInputLittleEndian(val).readLong(), tz);
|
||||
// Even though null is filtered out but data like 0000-00-00 exists
|
||||
// according to MySQL JDBC behavior, it can chose the **ROUND** behavior converted to the
|
||||
// nearest
|
||||
// value which is 0001-01-01.
|
||||
if (extendedDateTime == null) {
|
||||
return DateTimeCodec.createExtendedDateTime(tz, 1, 1, 1, 0, 0, 0, 0).toTimeStamp();
|
||||
}
|
||||
return extendedDateTime.toTimeStamp();
|
||||
}
|
||||
|
||||
private static String decodeEnum(byte[] val, List<String> elems) {
|
||||
int idx = (int) decodeInt(val) - 1;
|
||||
return EnumCodec.readEnumFromIndex(idx, elems);
|
||||
}
|
||||
|
||||
private static String decodeSet(byte[] val, List<String> elems) {
|
||||
long number = decodeInt(val);
|
||||
return SetCodec.readSetFromLong(number, elems);
|
||||
}
|
||||
|
||||
private static String decodeJson(byte[] val) {
|
||||
return JsonUtils.parseJson(new CodecDataInput(val)).toString();
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,368 @@
|
|||
/*
|
||||
* Copyright 2020 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.tikv.common.codec;
|
||||
|
||||
import java.math.BigDecimal;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.sql.Date;
|
||||
import java.sql.Timestamp;
|
||||
import java.util.Arrays;
|
||||
import java.util.Comparator;
|
||||
import java.util.List;
|
||||
import org.joda.time.DateTime;
|
||||
import org.joda.time.DateTimeZone;
|
||||
import org.tikv.common.ExtendedDateTime;
|
||||
import org.tikv.common.codec.Codec.DateTimeCodec;
|
||||
import org.tikv.common.codec.Codec.DecimalCodec;
|
||||
import org.tikv.common.codec.Codec.EnumCodec;
|
||||
import org.tikv.common.exception.CodecException;
|
||||
import org.tikv.common.exception.TypeException;
|
||||
import org.tikv.common.meta.TiColumnInfo;
|
||||
import org.tikv.common.types.Converter;
|
||||
import org.tikv.common.types.DataType;
|
||||
|
||||
public class RowEncoderV2 {
|
||||
private static final long SIGN_MASK = 0x8000000000000000L;
|
||||
private int numCols;
|
||||
private Object[] values;
|
||||
private org.tikv.common.codec.RowV2 row;
|
||||
|
||||
public RowEncoderV2() {}
|
||||
|
||||
public byte[] encode(List<TiColumnInfo> columnInfos, List<Object> values) {
|
||||
this.row = org.tikv.common.codec.RowV2.createEmpty();
|
||||
numCols = columnInfos.size();
|
||||
for (int i = 0; i < numCols; i++) {
|
||||
if (columnInfos.get(i).getId() > 255) {
|
||||
this.row.large = true;
|
||||
}
|
||||
if (values.get(i) == null) {
|
||||
this.row.numNullCols++;
|
||||
} else {
|
||||
this.row.numNotNullCols++;
|
||||
}
|
||||
}
|
||||
|
||||
this.values = new Object[numCols];
|
||||
reformatCols(columnInfos, values);
|
||||
encodeRowCols(columnInfos);
|
||||
return this.row.toBytes();
|
||||
}
|
||||
|
||||
private void reformatCols(List<TiColumnInfo> columnInfos, List<Object> valueList) {
|
||||
int nullIdx = numCols - row.numNullCols;
|
||||
int notNullIdx = 0;
|
||||
if (this.row.large) {
|
||||
row.initColIDs32();
|
||||
row.initOffsets32();
|
||||
} else {
|
||||
row.initColIDs();
|
||||
row.initOffsets();
|
||||
}
|
||||
for (int i = 0; i < numCols; i++) {
|
||||
int colID = (int) columnInfos.get(i).getId();
|
||||
Object value = valueList.get(i);
|
||||
if (value == null) {
|
||||
if (this.row.large) {
|
||||
this.row.colIDs32[nullIdx] = colID;
|
||||
} else {
|
||||
this.row.colIDs[nullIdx] = (byte) colID;
|
||||
}
|
||||
nullIdx++;
|
||||
} else {
|
||||
if (this.row.large) {
|
||||
this.row.colIDs32[notNullIdx] = colID;
|
||||
} else {
|
||||
this.row.colIDs[notNullIdx] = (byte) colID;
|
||||
}
|
||||
valueList.set(notNullIdx, value);
|
||||
notNullIdx++;
|
||||
}
|
||||
}
|
||||
// sort colIDs together with corresponding values
|
||||
int len = this.row.numNotNullCols;
|
||||
if (this.row.large) {
|
||||
int[] temp = Arrays.copyOfRange(this.row.colIDs32, 0, len);
|
||||
Integer[] idx = new Integer[len];
|
||||
for (int i = 0; i < len; i++) {
|
||||
idx[i] = i;
|
||||
}
|
||||
Arrays.sort(idx, Comparator.comparingInt(o -> this.row.colIDs32[o]));
|
||||
for (int i = 0; i < len; i++) {
|
||||
this.row.colIDs32[i] = temp[idx[i]];
|
||||
this.values[i] = valueList.get(idx[i]);
|
||||
}
|
||||
if (this.row.numNullCols > 0) {
|
||||
len = this.row.numNullCols;
|
||||
int start = this.row.numNotNullCols;
|
||||
temp = Arrays.copyOfRange(this.row.colIDs32, start, start + len);
|
||||
idx = new Integer[len];
|
||||
for (int i = 0; i < len; i++) {
|
||||
idx[i] = i;
|
||||
}
|
||||
Arrays.sort(idx, Comparator.comparingInt(o -> this.row.colIDs32[start + o]));
|
||||
for (int i = 0; i < len; i++) {
|
||||
// values should all be null
|
||||
this.row.colIDs32[start + i] = temp[idx[i]];
|
||||
}
|
||||
}
|
||||
} else {
|
||||
byte[] temp = Arrays.copyOfRange(this.row.colIDs, 0, len);
|
||||
Integer[] idx = new Integer[len];
|
||||
for (int i = 0; i < len; i++) {
|
||||
idx[i] = i;
|
||||
}
|
||||
Arrays.sort(idx, Comparator.comparingInt(o -> this.row.colIDs[o]));
|
||||
for (int i = 0; i < len; i++) {
|
||||
this.row.colIDs[i] = temp[idx[i]];
|
||||
this.values[i] = valueList.get(idx[i]);
|
||||
}
|
||||
if (this.row.numNullCols > 0) {
|
||||
len = this.row.numNullCols;
|
||||
int start = this.row.numNotNullCols;
|
||||
temp = Arrays.copyOfRange(this.row.colIDs, start, start + len);
|
||||
idx = new Integer[len];
|
||||
for (int i = 0; i < len; i++) {
|
||||
idx[i] = i;
|
||||
}
|
||||
Arrays.sort(idx, Comparator.comparingInt(o -> this.row.colIDs[start + o]));
|
||||
for (int i = 0; i < len; i++) {
|
||||
// values should all be null
|
||||
this.row.colIDs[start + i] = temp[idx[i]];
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private TiColumnInfo getColumnInfoByID(List<TiColumnInfo> columnInfos, int id) {
|
||||
for (TiColumnInfo columnInfo : columnInfos) {
|
||||
if (columnInfo.getId() == id) {
|
||||
return columnInfo;
|
||||
}
|
||||
}
|
||||
throw new CodecException("column id " + id + " not found in ColumnInfo");
|
||||
}
|
||||
|
||||
private void encodeRowCols(List<TiColumnInfo> columnInfos) {
|
||||
CodecDataOutputLittleEndian cdo = new CodecDataOutputLittleEndian();
|
||||
for (int i = 0; i < this.row.numNotNullCols; i++) {
|
||||
Object o = this.values[i];
|
||||
if (this.row.large) {
|
||||
encodeValue(cdo, o, getColumnInfoByID(columnInfos, this.row.colIDs32[i]).getType());
|
||||
} else {
|
||||
encodeValue(cdo, o, getColumnInfoByID(columnInfos, this.row.colIDs[i]).getType());
|
||||
}
|
||||
if (cdo.size() > 0xffff && !this.row.large) {
|
||||
// only initialize once
|
||||
this.row.initColIDs32();
|
||||
for (int j = 0; j < numCols; j++) {
|
||||
this.row.colIDs32[j] = this.row.colIDs[j];
|
||||
}
|
||||
this.row.initOffsets32();
|
||||
if (numCols >= 0) {
|
||||
System.arraycopy(this.row.offsets, 0, this.row.offsets32, 0, numCols);
|
||||
}
|
||||
this.row.large = true;
|
||||
}
|
||||
if (this.row.large) {
|
||||
this.row.offsets32[i] = cdo.size();
|
||||
} else {
|
||||
this.row.offsets[i] = cdo.size();
|
||||
}
|
||||
}
|
||||
this.row.data = cdo.toBytes();
|
||||
}
|
||||
|
||||
private void encodeValue(CodecDataOutput cdo, Object value, DataType tp) {
|
||||
switch (tp.getType()) {
|
||||
case TypeLonglong:
|
||||
case TypeLong:
|
||||
case TypeInt24:
|
||||
case TypeShort:
|
||||
case TypeTiny:
|
||||
// TODO: encode consider unsigned
|
||||
encodeInt(cdo, (long) value);
|
||||
break;
|
||||
case TypeFloat:
|
||||
case TypeDouble:
|
||||
if (value instanceof Double) {
|
||||
encodeDouble(cdo, value);
|
||||
} else if (value instanceof Float) {
|
||||
encodeFloat(cdo, value);
|
||||
} else {
|
||||
throw new TypeException("type does not match in encoding, should be float/double");
|
||||
}
|
||||
break;
|
||||
case TypeString:
|
||||
case TypeVarString:
|
||||
case TypeVarchar:
|
||||
case TypeBlob:
|
||||
case TypeTinyBlob:
|
||||
case TypeMediumBlob:
|
||||
case TypeLongBlob:
|
||||
encodeString(cdo, value);
|
||||
break;
|
||||
case TypeNewDecimal:
|
||||
encodeDecimal(cdo, value);
|
||||
break;
|
||||
case TypeBit:
|
||||
encodeBit(cdo, value);
|
||||
break;
|
||||
case TypeTimestamp:
|
||||
encodeTimestamp(cdo, value, DateTimeZone.UTC);
|
||||
break;
|
||||
case TypeDate:
|
||||
case TypeDatetime:
|
||||
encodeTimestamp(cdo, value, Converter.getLocalTimezone());
|
||||
break;
|
||||
case TypeDuration:
|
||||
case TypeYear:
|
||||
encodeInt(cdo, (long) value);
|
||||
break;
|
||||
case TypeEnum:
|
||||
encodeEnum(cdo, value, tp.getElems());
|
||||
break;
|
||||
case TypeSet:
|
||||
encodeSet(cdo, value, tp.getElems());
|
||||
break;
|
||||
case TypeJSON:
|
||||
encodeJson(cdo, value);
|
||||
break;
|
||||
case TypeNull:
|
||||
// ??
|
||||
case TypeDecimal:
|
||||
case TypeGeometry:
|
||||
case TypeNewDate:
|
||||
throw new CodecException("type should not appear in encoding");
|
||||
default:
|
||||
throw new CodecException("invalid data type: " + tp.getType().name());
|
||||
}
|
||||
}
|
||||
|
||||
private void encodeInt(CodecDataOutput cdo, long value) {
|
||||
if (value == (byte) value) {
|
||||
cdo.writeByte((byte) value);
|
||||
} else if (value == (short) value) {
|
||||
cdo.writeShort((short) value);
|
||||
} else if (value == (int) value) {
|
||||
cdo.writeInt((int) value);
|
||||
} else {
|
||||
cdo.writeLong(value);
|
||||
}
|
||||
}
|
||||
|
||||
private void encodeFloat(CodecDataOutput cdo, Object value) {
|
||||
long u = Double.doubleToLongBits((float) value);
|
||||
if ((float) value >= 0) {
|
||||
u |= SIGN_MASK;
|
||||
} else {
|
||||
u = ~u;
|
||||
}
|
||||
u = Long.reverseBytes(u);
|
||||
cdo.writeLong(u);
|
||||
}
|
||||
|
||||
private void encodeDouble(CodecDataOutput cdo, Object value) {
|
||||
long u = Double.doubleToLongBits((double) value);
|
||||
if ((double) value >= 0) {
|
||||
u |= SIGN_MASK;
|
||||
} else {
|
||||
u = ~u;
|
||||
}
|
||||
u = Long.reverseBytes(u);
|
||||
cdo.writeLong(u);
|
||||
}
|
||||
|
||||
private void encodeBit(CodecDataOutput cdo, Object value) {
|
||||
long s = 0;
|
||||
if (value instanceof Long) {
|
||||
s = (long) value;
|
||||
} else if (value instanceof byte[]) {
|
||||
for (byte b : (byte[]) value) {
|
||||
s <<= 8;
|
||||
s |= b;
|
||||
}
|
||||
} else {
|
||||
throw new CodecException("invalid bytes type " + value.getClass());
|
||||
}
|
||||
encodeInt(cdo, s);
|
||||
}
|
||||
|
||||
private void encodeTimestamp(CodecDataOutput cdo, Object value, DateTimeZone tz) {
|
||||
if (value instanceof Timestamp) {
|
||||
Timestamp timestamp = (Timestamp) value;
|
||||
DateTime dateTime = new DateTime(timestamp.getTime());
|
||||
int nanos = timestamp.getNanos();
|
||||
ExtendedDateTime extendedDateTime = new ExtendedDateTime(dateTime, (nanos / 1000) % 1000);
|
||||
long t = DateTimeCodec.toPackedLong(extendedDateTime, tz);
|
||||
encodeInt(cdo, t);
|
||||
} else if (value instanceof Date) {
|
||||
ExtendedDateTime extendedDateTime =
|
||||
new ExtendedDateTime(new DateTime(((Date) value).getTime()));
|
||||
long t = DateTimeCodec.toPackedLong(extendedDateTime, tz);
|
||||
encodeInt(cdo, t);
|
||||
} else {
|
||||
throw new CodecException("invalid timestamp type " + value.getClass());
|
||||
}
|
||||
}
|
||||
|
||||
private void encodeString(CodecDataOutput cdo, Object value) {
|
||||
if (value instanceof byte[]) {
|
||||
cdo.write((byte[]) value);
|
||||
} else if (value instanceof String) {
|
||||
cdo.write(((String) value).getBytes(StandardCharsets.UTF_8));
|
||||
} else {
|
||||
throw new CodecException("invalid string type " + value.getClass());
|
||||
}
|
||||
}
|
||||
|
||||
private void encodeDecimal(CodecDataOutput cdo, Object value) {
|
||||
if (value instanceof MyDecimal) {
|
||||
MyDecimal dec = (MyDecimal) value;
|
||||
DecimalCodec.writeDecimal(cdo, dec, dec.precision(), dec.frac());
|
||||
} else if (value instanceof BigDecimal) {
|
||||
MyDecimal dec = new MyDecimal();
|
||||
BigDecimal decimal = (BigDecimal) value;
|
||||
int prec = decimal.precision();
|
||||
int frac = decimal.scale();
|
||||
dec.fromString(((BigDecimal) value).toPlainString());
|
||||
DecimalCodec.writeDecimal(cdo, dec, prec, frac);
|
||||
} else {
|
||||
throw new CodecException("invalid decimal type " + value.getClass());
|
||||
}
|
||||
}
|
||||
|
||||
private void encodeEnum(CodecDataOutput cdo, Object value, List<String> elems) {
|
||||
if (value instanceof Integer) {
|
||||
encodeInt(cdo, (int) value);
|
||||
} else if (value instanceof String) {
|
||||
int val = EnumCodec.parseEnumName((String) value, elems);
|
||||
encodeInt(cdo, val);
|
||||
} else {
|
||||
throw new CodecException("invalid enum type " + value.getClass());
|
||||
}
|
||||
}
|
||||
|
||||
private void encodeSet(CodecDataOutput cdo, Object value, List<String> elems) {
|
||||
// TODO: Support encoding set
|
||||
throw new CodecException("Set encoding is not yet supported.");
|
||||
}
|
||||
|
||||
private void encodeJson(CodecDataOutput cdo, Object value) {
|
||||
// TODO: Support encoding JSON
|
||||
throw new CodecException("JSON encoding is not yet supported.");
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,211 @@
|
|||
/*
|
||||
* Copyright 2020 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.tikv.common.codec;
|
||||
|
||||
import java.util.Arrays;
|
||||
import org.tikv.common.exception.InvalidCodecFormatException;
|
||||
|
||||
public class RowV2 {
|
||||
// CodecVer is the constant number that represent the new row format.
|
||||
public static int CODEC_VER = 0x80;
|
||||
// small: colID byte[], offsets int[], optimized for most cases.
|
||||
// large: colID long[], offsets long[].
|
||||
boolean large;
|
||||
int numNotNullCols;
|
||||
int numNullCols;
|
||||
byte[] colIDs;
|
||||
int[] offsets;
|
||||
byte[] data;
|
||||
// for large row
|
||||
int[] colIDs32;
|
||||
int[] offsets32;
|
||||
|
||||
private RowV2(byte[] rowData) {
|
||||
fromBytes(rowData);
|
||||
}
|
||||
|
||||
public static RowV2 createNew(byte[] rowData) {
|
||||
return new RowV2(rowData);
|
||||
}
|
||||
|
||||
public static RowV2 createEmpty() {
|
||||
return new RowV2(false, 0, 0);
|
||||
}
|
||||
|
||||
private RowV2(boolean large, int numNotNullCols, int numNullCols) {
|
||||
this.large = large;
|
||||
this.numNotNullCols = numNotNullCols;
|
||||
this.numNullCols = numNullCols;
|
||||
}
|
||||
|
||||
public byte[] getData(int i) {
|
||||
int start = 0, end = 0;
|
||||
if (this.large) {
|
||||
if (i > 0) {
|
||||
start = this.offsets32[i - 1];
|
||||
}
|
||||
end = this.offsets32[i];
|
||||
} else {
|
||||
if (i > 0) {
|
||||
start = this.offsets[i - 1];
|
||||
}
|
||||
end = this.offsets[i];
|
||||
}
|
||||
return Arrays.copyOfRange(this.data, start, end);
|
||||
}
|
||||
|
||||
private void fromBytes(byte[] rowData) {
|
||||
CodecDataInputLittleEndian cdi = new CodecDataInputLittleEndian(rowData);
|
||||
if (cdi.readUnsignedByte() != CODEC_VER) {
|
||||
throw new InvalidCodecFormatException("invalid codec version");
|
||||
}
|
||||
this.large = (cdi.readUnsignedByte() & 1) > 0;
|
||||
this.numNotNullCols = cdi.readUnsignedShort();
|
||||
this.numNullCols = cdi.readUnsignedShort();
|
||||
int cursor = 6;
|
||||
if (this.large) {
|
||||
int numCols = this.numNotNullCols + this.numNullCols;
|
||||
int colIDsLen = numCols * 4;
|
||||
this.colIDs32 = new int[numCols];
|
||||
for (int i = 0; i < numCols; i++) {
|
||||
this.colIDs32[i] = cdi.readInt();
|
||||
}
|
||||
cursor += colIDsLen;
|
||||
numCols = this.numNotNullCols;
|
||||
int offsetsLen = numCols * 4;
|
||||
this.offsets32 = new int[numCols];
|
||||
for (int i = 0; i < numCols; i++) {
|
||||
this.offsets32[i] = cdi.readInt();
|
||||
}
|
||||
cursor += offsetsLen;
|
||||
} else {
|
||||
int numCols = this.numNotNullCols + this.numNullCols;
|
||||
int colIDsLen = numCols;
|
||||
this.colIDs = new byte[numCols];
|
||||
cdi.readFully(this.colIDs, 0, numCols);
|
||||
cursor += colIDsLen;
|
||||
numCols = this.numNotNullCols;
|
||||
int offsetsLen = numCols * 2;
|
||||
this.offsets = new int[numCols];
|
||||
for (int i = 0; i < numCols; i++) {
|
||||
this.offsets[i] = cdi.readUnsignedShort();
|
||||
}
|
||||
cursor += offsetsLen;
|
||||
}
|
||||
this.data = Arrays.copyOfRange(rowData, cursor, rowData.length);
|
||||
}
|
||||
|
||||
private void writeShortArray(CodecDataOutput cdo, int[] arr) {
|
||||
for (int value : arr) {
|
||||
cdo.writeShort(value);
|
||||
}
|
||||
}
|
||||
|
||||
private void writeIntArray(CodecDataOutput cdo, int[] arr) {
|
||||
for (int value : arr) {
|
||||
cdo.writeInt(value);
|
||||
}
|
||||
}
|
||||
|
||||
public byte[] toBytes() {
|
||||
CodecDataOutputLittleEndian cdo = new CodecDataOutputLittleEndian();
|
||||
cdo.write(CODEC_VER);
|
||||
cdo.write(this.large ? 1 : 0);
|
||||
cdo.writeShort(this.numNotNullCols);
|
||||
cdo.writeShort(this.numNullCols);
|
||||
if (this.large) {
|
||||
writeIntArray(cdo, this.colIDs32);
|
||||
writeIntArray(cdo, this.offsets32);
|
||||
} else {
|
||||
cdo.write(this.colIDs);
|
||||
writeShortArray(cdo, this.offsets);
|
||||
}
|
||||
cdo.write(this.data);
|
||||
return cdo.toBytes();
|
||||
}
|
||||
|
||||
private int binarySearch(int i, int j, long colID) {
|
||||
while (i < j) {
|
||||
int h = (int) ((i + (long) j) >> 1);
|
||||
// i <= h < j
|
||||
long v;
|
||||
if (this.large) {
|
||||
v = this.colIDs32[h];
|
||||
} else {
|
||||
v = this.colIDs[h];
|
||||
}
|
||||
if (v < colID) {
|
||||
i = h + 1;
|
||||
} else if (v > colID) {
|
||||
j = h;
|
||||
} else {
|
||||
return h;
|
||||
}
|
||||
}
|
||||
return -1;
|
||||
}
|
||||
|
||||
public ColIDSearchResult findColID(long colID) {
|
||||
int i = 0, j = this.numNotNullCols;
|
||||
ColIDSearchResult result = new ColIDSearchResult(-1, false, false);
|
||||
result.idx = binarySearch(i, j, colID);
|
||||
if (result.idx != -1) {
|
||||
return result;
|
||||
}
|
||||
|
||||
// Search the column in null columns array.
|
||||
i = this.numNotNullCols;
|
||||
j = this.numNotNullCols + this.numNullCols;
|
||||
int id = binarySearch(i, j, colID);
|
||||
if (id != -1) {
|
||||
// colID found in null cols.
|
||||
result.isNull = true;
|
||||
} else {
|
||||
result.notFound = true;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
public void initColIDs() {
|
||||
int numCols = this.numNotNullCols + this.numNullCols;
|
||||
this.colIDs = new byte[numCols];
|
||||
}
|
||||
|
||||
public void initColIDs32() {
|
||||
int numCols = this.numNotNullCols + this.numNullCols;
|
||||
this.colIDs32 = new int[numCols];
|
||||
}
|
||||
|
||||
public void initOffsets() {
|
||||
this.offsets = new int[this.numNotNullCols];
|
||||
}
|
||||
|
||||
public void initOffsets32() {
|
||||
this.offsets32 = new int[this.numNotNullCols];
|
||||
}
|
||||
|
||||
public static class ColIDSearchResult {
|
||||
int idx;
|
||||
boolean isNull;
|
||||
boolean notFound;
|
||||
|
||||
private ColIDSearchResult(int idx, boolean isNull, boolean notFound) {
|
||||
this.idx = idx;
|
||||
this.isNull = isNull;
|
||||
this.notFound = notFound;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,56 @@
|
|||
/*
|
||||
* Copyright 2020 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.tikv.common.codec;
|
||||
|
||||
import java.util.List;
|
||||
import org.tikv.common.exception.CodecException;
|
||||
import org.tikv.common.meta.TiColumnInfo;
|
||||
import org.tikv.common.meta.TiTableInfo;
|
||||
import org.tikv.common.row.Row;
|
||||
|
||||
public class TableCodec {
|
||||
public static byte[] encodeRow(
|
||||
List<TiColumnInfo> columnInfos,
|
||||
Object[] values,
|
||||
boolean isPkHandle,
|
||||
boolean encodeWithNewRowFormat)
|
||||
throws IllegalAccessException {
|
||||
if (columnInfos.size() != values.length) {
|
||||
throw new IllegalAccessException(
|
||||
String.format(
|
||||
"encodeRow error: data and columnID count not " + "match %d vs %d",
|
||||
columnInfos.size(), values.length));
|
||||
}
|
||||
if (encodeWithNewRowFormat) {
|
||||
return TableCodecV2.encodeRow(columnInfos, values, isPkHandle);
|
||||
}
|
||||
return TableCodecV1.encodeRow(columnInfos, values, isPkHandle);
|
||||
}
|
||||
|
||||
public static Row decodeRow(byte[] value, Long handle, TiTableInfo tableInfo) {
|
||||
if (value.length == 0) {
|
||||
throw new CodecException("Decode fails: value length is zero");
|
||||
}
|
||||
if ((value[0] & 0xff) == org.tikv.common.codec.RowV2.CODEC_VER) {
|
||||
return TableCodecV2.decodeRow(value, handle, tableInfo);
|
||||
}
|
||||
return TableCodecV1.decodeRow(value, handle, tableInfo);
|
||||
}
|
||||
|
||||
public static long decodeHandle(byte[] value) {
|
||||
return new CodecDataInput(value).readLong();
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,86 @@
|
|||
/*
|
||||
* Copyright 2020 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.tikv.common.codec;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import org.tikv.common.codec.Codec.IntegerCodec;
|
||||
import org.tikv.common.meta.TiColumnInfo;
|
||||
import org.tikv.common.meta.TiTableInfo;
|
||||
import org.tikv.common.row.ObjectRowImpl;
|
||||
import org.tikv.common.row.Row;
|
||||
import org.tikv.common.types.DataType.EncodeType;
|
||||
import org.tikv.common.types.IntegerType;
|
||||
|
||||
public class TableCodecV1 {
|
||||
/** Row layout: colID1, value1, colID2, value2, ..... */
|
||||
protected static byte[] encodeRow(
|
||||
List<TiColumnInfo> columnInfos, Object[] values, boolean isPkHandle) {
|
||||
CodecDataOutput cdo = new CodecDataOutput();
|
||||
|
||||
for (int i = 0; i < columnInfos.size(); i++) {
|
||||
TiColumnInfo col = columnInfos.get(i);
|
||||
// skip pk is handle case
|
||||
if (col.isPrimaryKey() && isPkHandle) {
|
||||
continue;
|
||||
}
|
||||
IntegerCodec.writeLongFully(cdo, col.getId(), false);
|
||||
col.getType().encode(cdo, EncodeType.VALUE, values[i]);
|
||||
}
|
||||
|
||||
// We could not set nil value into kv.
|
||||
if (cdo.toBytes().length == 0) {
|
||||
return new byte[] {Codec.NULL_FLAG};
|
||||
}
|
||||
|
||||
return cdo.toBytes();
|
||||
}
|
||||
|
||||
protected static Row decodeRow(byte[] value, Long handle, TiTableInfo tableInfo) {
|
||||
if (handle == null && tableInfo.isPkHandle()) {
|
||||
throw new IllegalArgumentException("when pk is handle, handle cannot be null");
|
||||
}
|
||||
|
||||
int colSize = tableInfo.getColumns().size();
|
||||
HashMap<Long, TiColumnInfo> idToColumn = new HashMap<>(colSize);
|
||||
for (TiColumnInfo col : tableInfo.getColumns()) {
|
||||
idToColumn.put(col.getId(), col);
|
||||
}
|
||||
|
||||
// decode bytes to Map<ColumnID, Data>
|
||||
HashMap<Long, Object> decodedDataMap = new HashMap<>(colSize);
|
||||
CodecDataInput cdi = new CodecDataInput(value);
|
||||
Object[] res = new Object[colSize];
|
||||
while (!cdi.eof()) {
|
||||
long colID = (long) IntegerType.BIGINT.decode(cdi);
|
||||
Object colValue = idToColumn.get(colID).getType().decodeForBatchWrite(cdi);
|
||||
decodedDataMap.put(colID, colValue);
|
||||
}
|
||||
|
||||
// construct Row with Map<ColumnID, Data> & handle
|
||||
for (int i = 0; i < colSize; i++) {
|
||||
// skip pk is handle case
|
||||
TiColumnInfo col = tableInfo.getColumn(i);
|
||||
if (col.isPrimaryKey() && tableInfo.isPkHandle()) {
|
||||
res[i] = handle;
|
||||
} else {
|
||||
res[i] = decodedDataMap.get(col.getId());
|
||||
}
|
||||
}
|
||||
|
||||
return ObjectRowImpl.create(res);
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,90 @@
|
|||
/*
|
||||
* Copyright 2020 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.tikv.common.codec;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import org.tikv.common.meta.TiColumnInfo;
|
||||
import org.tikv.common.meta.TiTableInfo;
|
||||
import org.tikv.common.row.ObjectRowImpl;
|
||||
import org.tikv.common.row.Row;
|
||||
|
||||
public class TableCodecV2 {
|
||||
|
||||
/**
|
||||
* New Row Format: Reference
|
||||
* https://github.com/pingcap/tidb/blob/952d1d7541a8e86be0af58f5b7e3d5e982bab34e/docs/design/2018-07-19-row-format.md
|
||||
*
|
||||
* <p>- version, flag, numOfNotNullCols, numOfNullCols, notNullCols, nullCols, notNullOffsets,
|
||||
* notNullValues
|
||||
*/
|
||||
protected static byte[] encodeRow(
|
||||
List<TiColumnInfo> columnInfos, Object[] values, boolean isPkHandle) {
|
||||
RowEncoderV2 encoder = new RowEncoderV2();
|
||||
List<TiColumnInfo> columnInfoList = new ArrayList<>();
|
||||
List<Object> valueList = new ArrayList<>();
|
||||
for (int i = 0; i < columnInfos.size(); i++) {
|
||||
TiColumnInfo col = columnInfos.get(i);
|
||||
// skip pk is handle case
|
||||
if (col.isPrimaryKey() && isPkHandle) {
|
||||
continue;
|
||||
}
|
||||
columnInfoList.add(col);
|
||||
valueList.add(values[i]);
|
||||
}
|
||||
return encoder.encode(columnInfoList, valueList);
|
||||
}
|
||||
|
||||
protected static Row decodeRow(byte[] value, Long handle, TiTableInfo tableInfo) {
|
||||
if (handle == null && tableInfo.isPkHandle()) {
|
||||
throw new IllegalArgumentException("when pk is handle, handle cannot be null");
|
||||
}
|
||||
int colSize = tableInfo.getColumns().size();
|
||||
// decode bytes to Map<ColumnID, Data>
|
||||
HashMap<Long, Object> decodedDataMap = new HashMap<>(colSize);
|
||||
org.tikv.common.codec.RowV2 rowV2 = org.tikv.common.codec.RowV2.createNew(value);
|
||||
|
||||
for (TiColumnInfo col : tableInfo.getColumns()) {
|
||||
if (col.isPrimaryKey() && tableInfo.isPkHandle()) {
|
||||
decodedDataMap.put(col.getId(), handle);
|
||||
continue;
|
||||
}
|
||||
org.tikv.common.codec.RowV2.ColIDSearchResult searchResult = rowV2.findColID(col.getId());
|
||||
if (searchResult.isNull) {
|
||||
// current col is null, nothing should be added to decodedMap
|
||||
continue;
|
||||
}
|
||||
if (!searchResult.notFound) {
|
||||
// corresponding column should be found
|
||||
assert (searchResult.idx != -1);
|
||||
byte[] colData = rowV2.getData(searchResult.idx);
|
||||
Object d = RowDecoderV2.decodeCol(colData, col.getType());
|
||||
decodedDataMap.put(col.getId(), d);
|
||||
}
|
||||
}
|
||||
|
||||
Object[] res = new Object[colSize];
|
||||
|
||||
// construct Row with Map<ColumnID, Data> & handle
|
||||
for (int i = 0; i < colSize; i++) {
|
||||
// skip pk is handle case
|
||||
TiColumnInfo col = tableInfo.getColumn(i);
|
||||
res[i] = decodedDataMap.get(col.getId());
|
||||
}
|
||||
return ObjectRowImpl.create(res);
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,201 @@
|
|||
/*
|
||||
* Copyright 2019 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.tikv.common.columnar;
|
||||
|
||||
import java.math.BigDecimal;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
|
||||
/** An implementation of {@link TiColumnVector}. All data is stored in TiDB chunk format. */
|
||||
public class BatchedTiChunkColumnVector extends TiColumnVector {
|
||||
private final List<TiChunkColumnVector> childColumns;
|
||||
private final int numOfNulls;
|
||||
private final int[] rightEndpoints;
|
||||
|
||||
public BatchedTiChunkColumnVector(List<TiChunkColumnVector> child, int numOfRows) {
|
||||
super(child.get(0).dataType(), numOfRows);
|
||||
this.childColumns = child;
|
||||
this.numOfNulls =
|
||||
child
|
||||
.stream()
|
||||
.reduce(
|
||||
0,
|
||||
(partialAgeResult, columnVector) -> partialAgeResult + columnVector.numNulls(),
|
||||
Integer::sum);
|
||||
int right = 0;
|
||||
this.rightEndpoints = new int[child.size() + 1];
|
||||
this.rightEndpoints[0] = 0;
|
||||
for (int i = 1; i < rightEndpoints.length; i++) {
|
||||
right += child.get(i - 1).numOfRows();
|
||||
this.rightEndpoints[i] = right;
|
||||
}
|
||||
}
|
||||
|
||||
public final String typeName() {
|
||||
return dataType().getType().name();
|
||||
}
|
||||
|
||||
// TODO: once we switch off_heap mode, we need control memory access pattern.
|
||||
public void free() {}
|
||||
|
||||
/**
|
||||
* Cleans up memory for this column vector. The column vector is not usable after this.
|
||||
*
|
||||
* <p>This overwrites `AutoCloseable.close` to remove the `throws` clause, as column vector is
|
||||
* in-memory and we don't expect any exception to happen during closing.
|
||||
*/
|
||||
@Override
|
||||
public void close() {}
|
||||
|
||||
/** Returns true if this column vector contains any null values. */
|
||||
@Override
|
||||
public boolean hasNull() {
|
||||
return numOfNulls > 0;
|
||||
}
|
||||
|
||||
/** Returns the number of nulls in this column vector. */
|
||||
@Override
|
||||
public int numNulls() {
|
||||
return numOfNulls;
|
||||
}
|
||||
|
||||
private int[] getColumnVectorIdxAndRowId(int rowId) {
|
||||
int offset = Arrays.binarySearch(this.rightEndpoints, rowId);
|
||||
int idx;
|
||||
if (offset >= 0) {
|
||||
idx = offset;
|
||||
} else {
|
||||
idx = -(offset + 2);
|
||||
}
|
||||
if (idx >= childColumns.size() || idx < 0) {
|
||||
throw new UnsupportedOperationException("Something goes wrong, it should never happen");
|
||||
}
|
||||
return new int[] {idx, rowId - rightEndpoints[idx]};
|
||||
}
|
||||
|
||||
/** Returns whether the value at rowId is NULL. */
|
||||
@Override
|
||||
public boolean isNullAt(int rowId) {
|
||||
int[] pair = getColumnVectorIdxAndRowId(rowId);
|
||||
return childColumns.get(pair[0]).isNullAt(pair[1]);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the boolean type value for rowId. The return value is undefined and can be anything, if
|
||||
* the slot for rowId is null.
|
||||
*/
|
||||
@Override
|
||||
public boolean getBoolean(int rowId) {
|
||||
int[] pair = getColumnVectorIdxAndRowId(rowId);
|
||||
return childColumns.get(pair[0]).getBoolean(pair[1]);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the byte type value for rowId. The return value is undefined and can be anything, if
|
||||
* the slot for rowId is null.
|
||||
*/
|
||||
@Override
|
||||
public byte getByte(int rowId) {
|
||||
int[] pair = getColumnVectorIdxAndRowId(rowId);
|
||||
return childColumns.get(pair[0]).getByte(pair[1]);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the short type value for rowId. The return value is undefined and can be anything, if
|
||||
* the slot for rowId is null.
|
||||
*/
|
||||
@Override
|
||||
public short getShort(int rowId) {
|
||||
int[] pair = getColumnVectorIdxAndRowId(rowId);
|
||||
return childColumns.get(pair[0]).getShort(pair[1]);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the int type value for rowId. The return value is undefined and can be anything, if the
|
||||
* slot for rowId is null.
|
||||
*/
|
||||
@Override
|
||||
public int getInt(int rowId) {
|
||||
int[] pair = getColumnVectorIdxAndRowId(rowId);
|
||||
return childColumns.get(pair[0]).getInt(pair[1]);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the long type value for rowId. The return value is undefined and can be anything, if
|
||||
* the slot for rowId is null.
|
||||
*/
|
||||
@Override
|
||||
public long getLong(int rowId) {
|
||||
int[] pair = getColumnVectorIdxAndRowId(rowId);
|
||||
return childColumns.get(pair[0]).getLong(pair[1]);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the float type value for rowId. The return value is undefined and can be anything, if
|
||||
* the slot for rowId is null.
|
||||
*/
|
||||
@Override
|
||||
public float getFloat(int rowId) {
|
||||
int[] pair = getColumnVectorIdxAndRowId(rowId);
|
||||
return childColumns.get(pair[0]).getFloat(pair[1]);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the double type value for rowId. The return value is undefined and can be anything, if
|
||||
* the slot for rowId is null.
|
||||
*/
|
||||
@Override
|
||||
public double getDouble(int rowId) {
|
||||
int[] pair = getColumnVectorIdxAndRowId(rowId);
|
||||
return childColumns.get(pair[0]).getDouble(pair[1]);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the decimal type value for rowId. If the slot for rowId is null, it should return null.
|
||||
*/
|
||||
@Override
|
||||
public BigDecimal getDecimal(int rowId, int precision, int scale) {
|
||||
int[] pair = getColumnVectorIdxAndRowId(rowId);
|
||||
return childColumns.get(pair[0]).getDecimal(pair[1], precision, scale);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the string type value for rowId. If the slot for rowId is null, it should return null.
|
||||
* Note that the returned UTF8String may point to the data of this column vector, please copy it
|
||||
* if you want to keep it after this column vector is freed.
|
||||
*/
|
||||
@Override
|
||||
public String getUTF8String(int rowId) {
|
||||
int[] pair = getColumnVectorIdxAndRowId(rowId);
|
||||
return childColumns.get(pair[0]).getUTF8String(pair[1]);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the binary type value for rowId. If the slot for rowId is null, it should return null.
|
||||
*/
|
||||
@Override
|
||||
public byte[] getBinary(int rowId) {
|
||||
int[] pair = getColumnVectorIdxAndRowId(rowId);
|
||||
return childColumns.get(pair[0]).getBinary(pair[1]);
|
||||
}
|
||||
|
||||
/** @return child [[TiColumnVector]] at the given ordinal. */
|
||||
@Override
|
||||
protected TiColumnVector getChild(int ordinal) {
|
||||
throw new UnsupportedOperationException(
|
||||
"TiChunkBatchColumnVector does not support this operation");
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,314 @@
|
|||
/*
|
||||
* Copyright 2020 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.tikv.common.columnar;
|
||||
|
||||
import static org.tikv.common.util.MemoryUtil.EMPTY_BYTE_BUFFER_DIRECT;
|
||||
|
||||
import java.math.BigDecimal;
|
||||
import java.nio.ByteBuffer;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.sql.Timestamp;
|
||||
import org.joda.time.LocalDate;
|
||||
import org.tikv.common.columnar.datatypes.CHType;
|
||||
import org.tikv.common.types.AbstractDateTimeType;
|
||||
import org.tikv.common.types.BytesType;
|
||||
import org.tikv.common.types.DateType;
|
||||
import org.tikv.common.util.MemoryUtil;
|
||||
|
||||
public class TiBlockColumnVector extends TiColumnVector {
|
||||
long offsetsAddr;
|
||||
ByteBuffer offsets;
|
||||
long nullMapAddr;
|
||||
ByteBuffer nullMap;
|
||||
long dataAddr;
|
||||
ByteBuffer data;
|
||||
private int fixedLength;
|
||||
|
||||
public TiBlockColumnVector(CHType type, ByteBuffer data, int numOfRows, int fixedLength) {
|
||||
super(type.toDataType(), numOfRows);
|
||||
this.data = data;
|
||||
this.dataAddr = MemoryUtil.getAddress(data);
|
||||
fillEmptyNullMap();
|
||||
fillEmptyOffsets();
|
||||
this.fixedLength = fixedLength;
|
||||
}
|
||||
|
||||
public TiBlockColumnVector(CHType type) {
|
||||
super(type.toDataType(), 0);
|
||||
}
|
||||
|
||||
public TiBlockColumnVector(
|
||||
CHType type, ByteBuffer nullMap, ByteBuffer data, int numOfRows, int fixedLength) {
|
||||
// chType -> data type
|
||||
super(type.toDataType(), numOfRows);
|
||||
this.nullMap = nullMap;
|
||||
this.nullMapAddr = MemoryUtil.getAddress(nullMap);
|
||||
this.data = data;
|
||||
this.dataAddr = MemoryUtil.getAddress(data);
|
||||
fillEmptyOffsets();
|
||||
this.fixedLength = fixedLength;
|
||||
}
|
||||
|
||||
/** Sets up the data type of this column vector. */
|
||||
public TiBlockColumnVector(
|
||||
CHType type, ByteBuffer nullMap, ByteBuffer offsets, ByteBuffer data, int numOfRows) {
|
||||
// chType -> data type
|
||||
super(type.toDataType(), numOfRows);
|
||||
this.offsets = offsets;
|
||||
this.offsetsAddr = MemoryUtil.getAddress(offsets);
|
||||
this.nullMap = nullMap;
|
||||
this.nullMapAddr = MemoryUtil.getAddress(nullMap);
|
||||
this.data = data;
|
||||
this.dataAddr = MemoryUtil.getAddress(data);
|
||||
this.fixedLength = -1;
|
||||
}
|
||||
|
||||
private void fillEmptyNullMap() {
|
||||
this.nullMap = EMPTY_BYTE_BUFFER_DIRECT;
|
||||
this.nullMapAddr = MemoryUtil.getAddress(this.nullMap);
|
||||
}
|
||||
|
||||
private void fillEmptyOffsets() {
|
||||
this.offsets = EMPTY_BYTE_BUFFER_DIRECT;
|
||||
this.offsetsAddr = MemoryUtil.getAddress(this.offsets);
|
||||
}
|
||||
|
||||
/**
|
||||
* Cleans up memory for this column vector. The column vector is not usable after this.
|
||||
*
|
||||
* <p>This overwrites `AutoCloseable.close` to remove the `throws` clause, as column vector is
|
||||
* in-memory and we don't expect any exception to happen during closing.
|
||||
*/
|
||||
@Override
|
||||
public void close() {
|
||||
if (dataAddr != 0) {
|
||||
MemoryUtil.free(data);
|
||||
}
|
||||
|
||||
if (offsetsAddr != 0) {
|
||||
MemoryUtil.free(offsets);
|
||||
}
|
||||
|
||||
if (nullMapAddr != 0) {
|
||||
MemoryUtil.free(nullMap);
|
||||
}
|
||||
dataAddr = 0;
|
||||
offsetsAddr = 0;
|
||||
nullMapAddr = 0;
|
||||
}
|
||||
|
||||
/** Returns true if this column vector contains any null values. */
|
||||
@Override
|
||||
public boolean hasNull() {
|
||||
return nullMap == null;
|
||||
}
|
||||
|
||||
/** Returns the number of nulls in this column vector. */
|
||||
@Override
|
||||
public int numNulls() {
|
||||
throw new UnsupportedOperationException("numNulls is not supported for TiBlockColumnVector");
|
||||
}
|
||||
|
||||
/** Returns whether the value at rowId is NULL. */
|
||||
@Override
|
||||
public boolean isNullAt(int rowId) {
|
||||
if (nullMap == EMPTY_BYTE_BUFFER_DIRECT) {
|
||||
return false;
|
||||
}
|
||||
return MemoryUtil.getByte(nullMapAddr + rowId) != 0;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the boolean type value for rowId. The return value is undefined and can be anything, if
|
||||
* the slot for rowId is null.
|
||||
*/
|
||||
@Override
|
||||
public boolean getBoolean(int rowId) {
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the byte type value for rowId. The return value is undefined and can be anything, if
|
||||
* the slot for rowId is null.
|
||||
*/
|
||||
@Override
|
||||
public byte getByte(int rowId) {
|
||||
return MemoryUtil.getByte(dataAddr + rowId);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the short type value for rowId. The return value is undefined and can be anything, if
|
||||
* the slot for rowId is null.
|
||||
*/
|
||||
@Override
|
||||
public short getShort(int rowId) {
|
||||
return MemoryUtil.getShort(dataAddr + (rowId << 1));
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the int type value for rowId. The return value is undefined and can be anything, if the
|
||||
* slot for rowId is null.
|
||||
*/
|
||||
@Override
|
||||
public int getInt(int rowId) {
|
||||
if (type instanceof DateType) {
|
||||
return (int) getTime(rowId);
|
||||
}
|
||||
return MemoryUtil.getInt(dataAddr + (rowId << 2));
|
||||
}
|
||||
|
||||
private long getDateTime(int rowId) {
|
||||
long v = MemoryUtil.getLong(dataAddr + (rowId << 3));
|
||||
long ymdhms = v >>> 24;
|
||||
long ymd = ymdhms >>> 17;
|
||||
int day = (int) (ymd & ((1 << 5) - 1));
|
||||
long ym = ymd >>> 5;
|
||||
int month = (int) (ym % 13);
|
||||
int year = (int) (ym / 13);
|
||||
|
||||
int hms = (int) (ymdhms & ((1 << 17) - 1));
|
||||
int second = hms & ((1 << 6) - 1);
|
||||
int minute = (hms >>> 6) & ((1 << 6) - 1);
|
||||
int hour = hms >>> 12;
|
||||
int microsec = (int) (v % (1 << 24));
|
||||
Timestamp ts =
|
||||
new Timestamp(year - 1900, month - 1, day, hour, minute, second, microsec * 1000);
|
||||
return ts.getTime() / 1000 * 1000000 + ts.getNanos() / 1000;
|
||||
}
|
||||
|
||||
private long getTime(int rowId) {
|
||||
long v = MemoryUtil.getLong(dataAddr + (rowId << 3));
|
||||
long ymd = v >>> 41;
|
||||
long ym = ymd >>> 5;
|
||||
int year = (int) (ym / 13);
|
||||
int month = (int) (ym % 13);
|
||||
int day = (int) (ymd & ((1 << 5) - 1));
|
||||
LocalDate date = new LocalDate(year, month, day);
|
||||
return Math.floorDiv(date.toDate().getTime(), AbstractDateTimeType.MILLS_PER_DAY);
|
||||
}
|
||||
/**
|
||||
* Returns the long type value for rowId. The return value is undefined and can be anything, if
|
||||
* the slot for rowId is null.
|
||||
*/
|
||||
@Override
|
||||
public long getLong(int rowId) {
|
||||
if (type instanceof AbstractDateTimeType) {
|
||||
return getDateTime(rowId);
|
||||
}
|
||||
if (fixedLength == 1) {
|
||||
return getByte(rowId);
|
||||
} else if (fixedLength == 2) {
|
||||
return getShort(rowId);
|
||||
} else if (fixedLength == 4) {
|
||||
return getInt(rowId);
|
||||
} else if (fixedLength == 8) {
|
||||
return MemoryUtil.getLong(dataAddr + (rowId * fixedLength));
|
||||
}
|
||||
throw new UnsupportedOperationException(
|
||||
String.format("getting long with fixed length %d", fixedLength));
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the float type value for rowId. The return value is undefined and can be anything, if
|
||||
* the slot for rowId is null.
|
||||
*/
|
||||
@Override
|
||||
public float getFloat(int rowId) {
|
||||
return MemoryUtil.getFloat(dataAddr + (rowId * fixedLength));
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the double type value for rowId. The return value is undefined and can be anything, if
|
||||
* the slot for rowId is null.
|
||||
*/
|
||||
@Override
|
||||
public double getDouble(int rowId) {
|
||||
return MemoryUtil.getDouble(dataAddr + (rowId * fixedLength));
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the decimal type value for rowId. If the slot for rowId is null, it should return null.
|
||||
*/
|
||||
@Override
|
||||
public BigDecimal getDecimal(int rowId, int precision, int scale) {
|
||||
long rowIdAddr = rowId * fixedLength + dataAddr;
|
||||
if (fixedLength == 4) {
|
||||
return MemoryUtil.getDecimal32(rowIdAddr, scale);
|
||||
} else if (fixedLength == 8) {
|
||||
return MemoryUtil.getDecimal64(rowIdAddr, scale);
|
||||
} else if (fixedLength == 16) {
|
||||
return MemoryUtil.getDecimal128(rowIdAddr, scale);
|
||||
} else {
|
||||
return MemoryUtil.getDecimal256(rowIdAddr, scale);
|
||||
}
|
||||
}
|
||||
|
||||
private long offsetAt(int i) {
|
||||
return i == 0 ? 0 : MemoryUtil.getLong(offsetsAddr + ((i - 1) << 3));
|
||||
}
|
||||
|
||||
public int sizeAt(int i) {
|
||||
return (int)
|
||||
(i == 0
|
||||
? MemoryUtil.getLong(offsetsAddr)
|
||||
: MemoryUtil.getLong(offsetsAddr + (i << 3))
|
||||
- MemoryUtil.getLong(offsetsAddr + ((i - 1) << 3)));
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the string type value for rowId. If the slot for rowId is null, it should return null.
|
||||
* Note that the returned UTF8String may point to the data of this column vector, please copy it
|
||||
* if you want to keep it after this column vector is freed.
|
||||
*/
|
||||
@Override
|
||||
public String getUTF8String(int rowId) {
|
||||
// FixedString case
|
||||
if (fixedLength != -1) {
|
||||
byte[] chars = new byte[fixedLength];
|
||||
MemoryUtil.getBytes((int) (dataAddr + fixedLength * rowId), chars, 0, fixedLength);
|
||||
return new String(chars);
|
||||
} else {
|
||||
long offset = (dataAddr + offsetAt(rowId));
|
||||
int numBytes = sizeAt(rowId) - 1;
|
||||
byte[] chars = new byte[numBytes];
|
||||
MemoryUtil.getBytes(offset, chars, 0, numBytes);
|
||||
return new String(chars, StandardCharsets.UTF_8);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the binary type value for rowId. If the slot for rowId is null, it should return null.
|
||||
*/
|
||||
@Override
|
||||
public byte[] getBinary(int rowId) {
|
||||
if (type.equals(BytesType.BLOB) || type.equals(BytesType.TINY_BLOB)) {
|
||||
long offset = (dataAddr + offsetAt(rowId));
|
||||
int numBytes = sizeAt(rowId) - 1;
|
||||
byte[] ret = new byte[numBytes];
|
||||
MemoryUtil.getBytes(offset, ret, 0, numBytes);
|
||||
return ret;
|
||||
} else {
|
||||
throw new UnsupportedOperationException(
|
||||
"get Binary for TiBlockColumnVector is not supported");
|
||||
}
|
||||
}
|
||||
|
||||
/** @return child [[TiColumnVector]] at the given ordinal. */
|
||||
@Override
|
||||
protected TiColumnVector getChild(int ordinal) {
|
||||
throw new UnsupportedOperationException("getChild is not supported for TiBlockColumnVector");
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,42 @@
|
|||
/*
|
||||
* Copyright 2019 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.tikv.common.columnar;
|
||||
|
||||
/**
|
||||
* TiChunk is an abstraction of Chunk data transmitted from TiKV. A Chunk represents a batch row
|
||||
* data in columnar format.
|
||||
*/
|
||||
public class TiChunk {
|
||||
private final TiColumnVector[] columnVectors;
|
||||
private final int numOfRows;
|
||||
|
||||
public TiChunk(TiColumnVector[] columnVectors) {
|
||||
this.columnVectors = columnVectors;
|
||||
this.numOfRows = columnVectors[0].numOfRows();
|
||||
}
|
||||
|
||||
public TiColumnVector column(int ordinal) {
|
||||
return columnVectors[ordinal];
|
||||
}
|
||||
|
||||
public int numOfCols() {
|
||||
return columnVectors.length;
|
||||
}
|
||||
|
||||
public int numOfRows() {
|
||||
return numOfRows;
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,258 @@
|
|||
/*
|
||||
* Copyright 2019 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.tikv.common.columnar;
|
||||
|
||||
import com.google.common.primitives.UnsignedLong;
|
||||
import java.math.BigDecimal;
|
||||
import java.nio.ByteBuffer;
|
||||
import java.sql.Timestamp;
|
||||
import org.joda.time.LocalDate;
|
||||
import org.tikv.common.codec.CodecDataInput;
|
||||
import org.tikv.common.codec.MyDecimal;
|
||||
import org.tikv.common.types.*;
|
||||
import org.tikv.common.util.JsonUtils;
|
||||
|
||||
/** An implementation of {@link TiColumnVector}. All data is stored in TiDB chunk format. */
|
||||
public class TiChunkColumnVector extends TiColumnVector {
|
||||
/** Represents the length of each different data type */
|
||||
private final int fixLength;
|
||||
/** Represents how many nulls in this column vector */
|
||||
private final int numOfNulls;
|
||||
/** Can be used to determine data at rowId is null or not */
|
||||
private final byte[] nullBitMaps;
|
||||
/** Can be used to read non-fixed length data type such as string */
|
||||
private final long[] offsets;
|
||||
|
||||
private final ByteBuffer data;
|
||||
|
||||
public TiChunkColumnVector(
|
||||
DataType dataType,
|
||||
int fixLength,
|
||||
int numOfRows,
|
||||
int numOfNulls,
|
||||
byte[] nullBitMaps,
|
||||
long[] offsets,
|
||||
ByteBuffer data) {
|
||||
super(dataType, numOfRows);
|
||||
this.fixLength = fixLength;
|
||||
this.numOfNulls = numOfNulls;
|
||||
this.nullBitMaps = nullBitMaps;
|
||||
this.data = data;
|
||||
this.offsets = offsets;
|
||||
}
|
||||
|
||||
public final String typeName() {
|
||||
return dataType().getType().name();
|
||||
}
|
||||
|
||||
// TODO: once we switch off_heap mode, we need control memory access pattern.
|
||||
public void free() {}
|
||||
|
||||
/**
|
||||
* Cleans up memory for this column vector. The column vector is not usable after this.
|
||||
*
|
||||
* <p>This overwrites `AutoCloseable.close` to remove the `throws` clause, as column vector is
|
||||
* in-memory and we don't expect any exception to happen during closing.
|
||||
*/
|
||||
@Override
|
||||
public void close() {}
|
||||
|
||||
/** Returns true if this column vector contains any null values. */
|
||||
@Override
|
||||
public boolean hasNull() {
|
||||
return numOfNulls > 0;
|
||||
}
|
||||
|
||||
/** Returns the number of nulls in this column vector. */
|
||||
@Override
|
||||
public int numNulls() {
|
||||
return numOfNulls;
|
||||
}
|
||||
|
||||
public boolean isNullAt(int rowId) {
|
||||
int nullByte = this.nullBitMaps[rowId / 8] & 0XFF;
|
||||
return (nullByte & (1 << (rowId & 7))) == 0;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the boolean type value for rowId. The return value is undefined and can be anything, if
|
||||
* the slot for rowId is null.
|
||||
*/
|
||||
@Override
|
||||
public boolean getBoolean(int rowId) {
|
||||
return false;
|
||||
}
|
||||
|
||||
public byte getByte(int rowId) {
|
||||
return data.get();
|
||||
}
|
||||
|
||||
public short getShort(int rowId) {
|
||||
return data.getShort();
|
||||
}
|
||||
|
||||
public int getInt(int rowId) {
|
||||
return (int) getLong(rowId);
|
||||
}
|
||||
|
||||
private boolean isDataTimeOrTimestamp() {
|
||||
return type instanceof DateTimeType || type instanceof TimestampType;
|
||||
}
|
||||
|
||||
private long getTime(int rowId) {
|
||||
int startPos = rowId * fixLength;
|
||||
TiCoreTime coreTime = new TiCoreTime(data.getLong(startPos));
|
||||
|
||||
int year = coreTime.getYear();
|
||||
int month = coreTime.getMonth();
|
||||
int day = coreTime.getDay();
|
||||
int hour = coreTime.getHour();
|
||||
int minute = coreTime.getMinute();
|
||||
int second = coreTime.getSecond();
|
||||
long microsecond = coreTime.getMicroSecond();
|
||||
// This behavior can be modified using the zeroDateTimeBehavior configuration property.
|
||||
// The allowable values are:
|
||||
// * exception (the default), which throws an SQLException with an SQLState of S1009.
|
||||
// * convertToNull, which returns NULL instead of the date.
|
||||
// * round, which rounds the date to the nearest closest value which is 0001-01-01.
|
||||
if (year == 0 && month == 0 && day == 0 && hour == 0 && minute == 0 && microsecond == 0) {
|
||||
year = 1;
|
||||
month = 1;
|
||||
day = 1;
|
||||
}
|
||||
if (this.type instanceof DateType) {
|
||||
LocalDate date = new LocalDate(year, month, day);
|
||||
return Math.floorDiv(date.toDate().getTime(), AbstractDateTimeType.MILLS_PER_DAY);
|
||||
} else if (type instanceof DateTimeType || type instanceof TimestampType) {
|
||||
// only return microsecond from epoch.
|
||||
Timestamp ts =
|
||||
new Timestamp(
|
||||
year - 1900, month - 1, day, hour, minute, second, (int) microsecond * 1000);
|
||||
return ts.getTime() / 1000 * 1000000 + ts.getNanos() / 1000;
|
||||
} else {
|
||||
throw new UnsupportedOperationException("data, datetime, timestamp are already handled.");
|
||||
}
|
||||
}
|
||||
|
||||
private long getLongFromBinary(int rowId) {
|
||||
byte[] bytes = getBinary(rowId);
|
||||
if (bytes.length == 0) return 0;
|
||||
long result = 0;
|
||||
for (byte b : bytes) {
|
||||
result = (result << 8) | b;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
public long getLong(int rowId) {
|
||||
if (type instanceof IntegerType) {
|
||||
if (type instanceof BitType) {
|
||||
return getLongFromBinary(rowId);
|
||||
}
|
||||
return data.getLong(rowId * fixLength);
|
||||
} else if (type instanceof AbstractDateTimeType) {
|
||||
return getTime(rowId);
|
||||
} else if (type instanceof TimeType) {
|
||||
return data.getLong(rowId * fixLength);
|
||||
}
|
||||
|
||||
throw new UnsupportedOperationException("only IntegerType and Time related are supported.");
|
||||
}
|
||||
|
||||
public float getFloat(int rowId) {
|
||||
return data.getFloat(rowId * fixLength);
|
||||
}
|
||||
|
||||
public double getDouble(int rowId) {
|
||||
return data.getDouble(rowId * fixLength);
|
||||
}
|
||||
|
||||
private MyDecimal getMyDecimal(int rowId) {
|
||||
int startPos = rowId * fixLength;
|
||||
int digitsInt = data.get(startPos);
|
||||
int digitsFrac = data.get(startPos + 1);
|
||||
int resultFrac = data.get(startPos + 2);
|
||||
boolean negative = data.get(startPos + 3) == 1;
|
||||
int[] wordBuf = new int[9];
|
||||
for (int i = 0; i < 9; i++) {
|
||||
wordBuf[i] = data.getInt(startPos + 4 + i * 4);
|
||||
}
|
||||
|
||||
return new MyDecimal(digitsInt, digitsFrac, negative, wordBuf);
|
||||
}
|
||||
/**
|
||||
* Returns the decimal type value for rowId. If the slot for rowId is null, it should return null.
|
||||
*/
|
||||
@Override
|
||||
/** digitsInt int8 1 digitsFrac int8 1 resultFrac int8 1 negative bool 1 wordBuf int32[9] 36 */
|
||||
public BigDecimal getDecimal(int rowId, int precision, int scale) {
|
||||
// this is to handle unsigned long to avoid overflow.
|
||||
if (type instanceof IntegerType) {
|
||||
return new BigDecimal(UnsignedLong.fromLongBits(this.getLong(rowId)).bigIntegerValue());
|
||||
}
|
||||
// TODO figure out how to use precision and scale
|
||||
MyDecimal decimal = getMyDecimal(rowId);
|
||||
return decimal.toBigDecimal();
|
||||
}
|
||||
|
||||
private String getEnumString(int rowId) {
|
||||
int start = (int) this.offsets[rowId];
|
||||
long end = this.offsets[rowId + 1];
|
||||
return new String(getRawBinary(start + 8, end));
|
||||
}
|
||||
|
||||
private String getJsonString(int rowId) {
|
||||
long start = this.offsets[rowId];
|
||||
long end = this.offsets[rowId + 1];
|
||||
return JsonUtils.parseJson(new CodecDataInput(getRawBinary(start, end))).toString();
|
||||
}
|
||||
|
||||
public String getUTF8String(int rowId) {
|
||||
if (type instanceof EnumType) {
|
||||
return getEnumString(rowId);
|
||||
}
|
||||
|
||||
if (type instanceof JsonType) {
|
||||
return getJsonString(rowId);
|
||||
}
|
||||
|
||||
return new String(getBinary(rowId));
|
||||
}
|
||||
|
||||
private byte[] getRawBinary(long start, long end) {
|
||||
byte[] buffer = new byte[(int) (end - start)];
|
||||
for (int i = 0; i < (end - start); i++) {
|
||||
buffer[i] = data.get((int) (start + i));
|
||||
}
|
||||
return buffer;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the binary type value for rowId. If the slot for rowId is null, it should return null.
|
||||
*/
|
||||
@Override
|
||||
public byte[] getBinary(int rowId) {
|
||||
int start = (int) this.offsets[rowId];
|
||||
long end = this.offsets[rowId + 1];
|
||||
return getRawBinary(start, end);
|
||||
}
|
||||
|
||||
/** @return child [[TiColumnVector]] at the given ordinal. */
|
||||
@Override
|
||||
protected TiColumnVector getChild(int ordinal) {
|
||||
throw new UnsupportedOperationException("TiChunkColumnVector does not support this operation");
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,221 @@
|
|||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.tikv.common.columnar;
|
||||
|
||||
import java.math.BigDecimal;
|
||||
import org.tikv.common.types.DataType;
|
||||
|
||||
/**
|
||||
* An interface is mostly copied from Spark's ColumnVector (we do not link it here because we do not
|
||||
* want to pollute tikv java client's dependencies).
|
||||
*
|
||||
* <p>Most of the APIs take the rowId as a parameter. This is the batch local 0-based row id for
|
||||
* values in this TiColumnVector.
|
||||
*
|
||||
* <p>Spark only calls specific `get` method according to the data type of this {@link
|
||||
* TiColumnVector}, e.g. if it's int type, Spark is guaranteed to only call {@link #getInt(int)} or
|
||||
* {@link #getInts(int, int)}.
|
||||
*
|
||||
* <p>TiColumnVector is expected to be reused during the entire data loading process, to avoid
|
||||
* allocating memory again and again.
|
||||
*/
|
||||
public abstract class TiColumnVector implements AutoCloseable {
|
||||
|
||||
private final int numOfRows;
|
||||
/** Data type for this column. */
|
||||
protected DataType type;
|
||||
|
||||
/** Sets up the data type of this column vector. */
|
||||
protected TiColumnVector(DataType type, int numOfRows) {
|
||||
this.type = type;
|
||||
this.numOfRows = numOfRows;
|
||||
}
|
||||
|
||||
/** Returns the data type of this column vector. */
|
||||
public final DataType dataType() {
|
||||
return type;
|
||||
}
|
||||
|
||||
/**
|
||||
* Cleans up memory for this column vector. The column vector is not usable after this.
|
||||
*
|
||||
* <p>This overwrites `AutoCloseable.close` to remove the `throws` clause, as column vector is
|
||||
* in-memory and we don't expect any exception to happen during closing.
|
||||
*/
|
||||
@Override
|
||||
public abstract void close();
|
||||
|
||||
/** Returns true if this column vector contains any null values. */
|
||||
public abstract boolean hasNull();
|
||||
|
||||
/** Returns the number of nulls in this column vector. */
|
||||
public abstract int numNulls();
|
||||
|
||||
/** Returns whether the value at rowId is NULL. */
|
||||
public abstract boolean isNullAt(int rowId);
|
||||
|
||||
/**
|
||||
* Returns the boolean type value for rowId. The return value is undefined and can be anything, if
|
||||
* the slot for rowId is null.
|
||||
*/
|
||||
public abstract boolean getBoolean(int rowId);
|
||||
|
||||
/**
|
||||
* Gets boolean type values from [rowId, rowId + count). The return values for the null slots are
|
||||
* undefined and can be anything.
|
||||
*/
|
||||
public boolean[] getBooleans(int rowId, int count) {
|
||||
boolean[] res = new boolean[count];
|
||||
for (int i = 0; i < count; i++) {
|
||||
res[i] = getBoolean(rowId + i);
|
||||
}
|
||||
return res;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the byte type value for rowId. The return value is undefined and can be anything, if
|
||||
* the slot for rowId is null.
|
||||
*/
|
||||
public abstract byte getByte(int rowId);
|
||||
|
||||
/**
|
||||
* Gets byte type values from [rowId, rowId + count). The return values for the null slots are
|
||||
* undefined and can be anything.
|
||||
*/
|
||||
public byte[] getBytes(int rowId, int count) {
|
||||
byte[] res = new byte[count];
|
||||
for (int i = 0; i < count; i++) {
|
||||
res[i] = getByte(rowId + i);
|
||||
}
|
||||
return res;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the short type value for rowId. The return value is undefined and can be anything, if
|
||||
* the slot for rowId is null.
|
||||
*/
|
||||
public abstract short getShort(int rowId);
|
||||
|
||||
/**
|
||||
* Gets short type values from [rowId, rowId + count). The return values for the null slots are
|
||||
* undefined and can be anything.
|
||||
*/
|
||||
public short[] getShorts(int rowId, int count) {
|
||||
short[] res = new short[count];
|
||||
for (int i = 0; i < count; i++) {
|
||||
res[i] = getShort(rowId + i);
|
||||
}
|
||||
return res;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the int type value for rowId. The return value is undefined and can be anything, if the
|
||||
* slot for rowId is null.
|
||||
*/
|
||||
public abstract int getInt(int rowId);
|
||||
|
||||
/**
|
||||
* Gets int type values from [rowId, rowId + count). The return values for the null slots are
|
||||
* undefined and can be anything.
|
||||
*/
|
||||
public int[] getInts(int rowId, int count) {
|
||||
int[] res = new int[count];
|
||||
for (int i = 0; i < count; i++) {
|
||||
res[i] = getInt(rowId + i);
|
||||
}
|
||||
return res;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the long type value for rowId. The return value is undefined and can be anything, if
|
||||
* the slot for rowId is null.
|
||||
*/
|
||||
public abstract long getLong(int rowId);
|
||||
|
||||
/**
|
||||
* Gets long type values from [rowId, rowId + count). The return values for the null slots are
|
||||
* undefined and can be anything.
|
||||
*/
|
||||
public long[] getLongs(int rowId, int count) {
|
||||
long[] res = new long[count];
|
||||
for (int i = 0; i < count; i++) {
|
||||
res[i] = getLong(rowId + i);
|
||||
}
|
||||
return res;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the float type value for rowId. The return value is undefined and can be anything, if
|
||||
* the slot for rowId is null.
|
||||
*/
|
||||
public abstract float getFloat(int rowId);
|
||||
|
||||
/**
|
||||
* Gets float type values from [rowId, rowId + count). The return values for the null slots are
|
||||
* undefined and can be anything.
|
||||
*/
|
||||
public float[] getFloats(int rowId, int count) {
|
||||
float[] res = new float[count];
|
||||
for (int i = 0; i < count; i++) {
|
||||
res[i] = getFloat(rowId + i);
|
||||
}
|
||||
return res;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the double type value for rowId. The return value is undefined and can be anything, if
|
||||
* the slot for rowId is null.
|
||||
*/
|
||||
public abstract double getDouble(int rowId);
|
||||
|
||||
/**
|
||||
* Gets double type values from [rowId, rowId + count). The return values for the null slots are
|
||||
* undefined and can be anything.
|
||||
*/
|
||||
public double[] getDoubles(int rowId, int count) {
|
||||
double[] res = new double[count];
|
||||
for (int i = 0; i < count; i++) {
|
||||
res[i] = getDouble(rowId + i);
|
||||
}
|
||||
return res;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the decimal type value for rowId. If the slot for rowId is null, it should return null.
|
||||
*/
|
||||
public abstract BigDecimal getDecimal(int rowId, int precision, int scale);
|
||||
|
||||
/**
|
||||
* Returns the string type value for rowId. If the slot for rowId is null, it should return null.
|
||||
* Note that the returned UTF8String may point to the data of this column vector, please copy it
|
||||
* if you want to keep it after this column vector is freed.
|
||||
*/
|
||||
public abstract String getUTF8String(int rowId);
|
||||
|
||||
/**
|
||||
* Returns the binary type value for rowId. If the slot for rowId is null, it should return null.
|
||||
*/
|
||||
public abstract byte[] getBinary(int rowId);
|
||||
|
||||
/** @return child [[TiColumnVector]] at the given ordinal. */
|
||||
protected abstract TiColumnVector getChild(int ordinal);
|
||||
|
||||
public int numOfRows() {
|
||||
return numOfRows;
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,79 @@
|
|||
/*
|
||||
* Copyright 2020 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.tikv.common.columnar;
|
||||
|
||||
/**
|
||||
* TiCoreTime is TiDB's representation of date/datetime/timestamp, used to decode chunk data from
|
||||
* dag response
|
||||
*/
|
||||
public class TiCoreTime {
|
||||
// copied from https://github.com/pingcap/tidb/blob/master/types/time.go
|
||||
private static final long YEAR_BIT_FIELD_OFFSET = 50, YEAR_BIT_FIELD_WIDTH = 14;
|
||||
private static final long MONTH_BIT_FIELD_OFFSET = 46, MONTH_BIT_FIELD_WIDTH = 4;
|
||||
private static final long DAY_BIT_FIELD_OFFSET = 41, DAY_BIT_FIELD_WIDTH = 5;
|
||||
private static final long HOUR_BIT_FIELD_OFFSET = 36, HOUR_BIT_FIELD_WIDTH = 5;
|
||||
private static final long MINUTE_BIT_FIELD_OFFSET = 30, MINUTE_BIT_FIELD_WIDTH = 6;
|
||||
private static final long SECOND_BIT_FIELD_OFFSET = 24, SECOND_BIT_FIELD_WIDTH = 6;
|
||||
private static final long MICROSECOND_BIT_FIELD_OFFSET = 4, MICROSECOND_BIT_FIELD_WIDTH = 20;
|
||||
private static final long YEAR_BIT_FIELD_MASK =
|
||||
((1L << YEAR_BIT_FIELD_WIDTH) - 1) << YEAR_BIT_FIELD_OFFSET;
|
||||
private static final long MONTH_BIT_FIELD_MASK =
|
||||
((1L << MONTH_BIT_FIELD_WIDTH) - 1) << MONTH_BIT_FIELD_OFFSET;
|
||||
private static final long DAY_BIT_FIELD_MASK =
|
||||
((1L << DAY_BIT_FIELD_WIDTH) - 1) << DAY_BIT_FIELD_OFFSET;
|
||||
private static final long HOUR_BIT_FIELD_MASK =
|
||||
((1L << HOUR_BIT_FIELD_WIDTH) - 1) << HOUR_BIT_FIELD_OFFSET;
|
||||
private static final long MINUTE_BIT_FIELD_MASK =
|
||||
((1L << MINUTE_BIT_FIELD_WIDTH) - 1) << MINUTE_BIT_FIELD_OFFSET;
|
||||
private static final long SECOND_BIT_FIELD_MASK =
|
||||
((1L << SECOND_BIT_FIELD_WIDTH) - 1) << SECOND_BIT_FIELD_OFFSET;
|
||||
private static final long MICROSECOND_BIT_FIELD_MASK =
|
||||
((1L << MICROSECOND_BIT_FIELD_WIDTH) - 1) << MICROSECOND_BIT_FIELD_OFFSET;
|
||||
|
||||
private final long coreTime;
|
||||
|
||||
public TiCoreTime(long coreTime) {
|
||||
this.coreTime = coreTime;
|
||||
}
|
||||
|
||||
public int getYear() {
|
||||
return (int) ((coreTime & YEAR_BIT_FIELD_MASK) >>> YEAR_BIT_FIELD_OFFSET);
|
||||
}
|
||||
|
||||
public int getMonth() {
|
||||
return (int) ((coreTime & MONTH_BIT_FIELD_MASK) >>> MONTH_BIT_FIELD_OFFSET);
|
||||
}
|
||||
|
||||
public int getDay() {
|
||||
return (int) ((coreTime & DAY_BIT_FIELD_MASK) >>> DAY_BIT_FIELD_OFFSET);
|
||||
}
|
||||
|
||||
public int getHour() {
|
||||
return (int) ((coreTime & HOUR_BIT_FIELD_MASK) >>> HOUR_BIT_FIELD_OFFSET);
|
||||
}
|
||||
|
||||
public int getMinute() {
|
||||
return (int) ((coreTime & MINUTE_BIT_FIELD_MASK) >>> MINUTE_BIT_FIELD_OFFSET);
|
||||
}
|
||||
|
||||
public int getSecond() {
|
||||
return (int) ((coreTime & SECOND_BIT_FIELD_MASK) >>> SECOND_BIT_FIELD_OFFSET);
|
||||
}
|
||||
|
||||
public long getMicroSecond() {
|
||||
return (coreTime & MICROSECOND_BIT_FIELD_MASK) >>> MICROSECOND_BIT_FIELD_OFFSET;
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,175 @@
|
|||
/*
|
||||
* Copyright 2019 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.tikv.common.columnar;
|
||||
|
||||
import java.math.BigDecimal;
|
||||
import org.tikv.common.row.Row;
|
||||
import org.tikv.common.types.DataType;
|
||||
|
||||
/**
|
||||
* An implementation of {@link TiColumnVector}. It is a faked column vector; the underlying data is
|
||||
* in row format.
|
||||
*/
|
||||
public class TiRowColumnVector extends TiColumnVector {
|
||||
/** Represents the column index of original row */
|
||||
private final int colIdx;
|
||||
/** row-wise format data and data is already decoded */
|
||||
private Row[] rows;
|
||||
/** Sets up the data type of this column vector. */
|
||||
public TiRowColumnVector(DataType type, int colIdx, Row[] rows, int numOfRows) {
|
||||
super(type, numOfRows);
|
||||
this.rows = rows;
|
||||
this.colIdx = colIdx;
|
||||
}
|
||||
|
||||
/**
|
||||
* Cleans up memory for this column vector. The column vector is not usable after this.
|
||||
*
|
||||
* <p>This overwrites `AutoCloseable.close` to remove the `throws` clause, as column vector is
|
||||
* in-memory and we don't expect any exception to happen during closing.
|
||||
*/
|
||||
@Override
|
||||
public void close() {
|
||||
this.rows = null;
|
||||
}
|
||||
|
||||
/** Returns true if this column vector contains any null values. */
|
||||
@Override
|
||||
public boolean hasNull() {
|
||||
throw new UnsupportedOperationException(
|
||||
"row-wise column vector does not support this operation");
|
||||
}
|
||||
|
||||
/** Returns the number of nulls in this column vector. */
|
||||
@Override
|
||||
public int numNulls() {
|
||||
throw new UnsupportedOperationException(
|
||||
"row-wise column vector does not support this operation");
|
||||
}
|
||||
|
||||
/** Returns whether the value at rowId is NULL. */
|
||||
@Override
|
||||
public boolean isNullAt(int rowId) {
|
||||
return rows[rowId].get(colIdx, null) == null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the boolean type value for rowId. The return value is undefined and can be anything, if
|
||||
* the slot for rowId is null.
|
||||
*/
|
||||
@Override
|
||||
public boolean getBoolean(int rowId) {
|
||||
return rows[rowId].getLong(colIdx) == 1;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the byte type value for rowId. The return value is undefined and can be anything, if
|
||||
* the slot for rowId is null.
|
||||
*/
|
||||
@Override
|
||||
public byte getByte(int rowId) {
|
||||
return (byte) rows[rowId].getLong(colIdx);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the short type value for rowId. The return value is undefined and can be anything, if
|
||||
* the slot for rowId is null.
|
||||
*/
|
||||
@Override
|
||||
public short getShort(int rowId) {
|
||||
return (short) rows[rowId].getLong(colIdx);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the int type value for rowId. The return value is undefined and can be anything, if the
|
||||
* slot for rowId is null.
|
||||
*/
|
||||
@Override
|
||||
public int getInt(int rowId) {
|
||||
return (int) rows[rowId].getLong(colIdx);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the long type value for rowId. The return value is undefined and can be anything, if
|
||||
* the slot for rowId is null.
|
||||
*/
|
||||
@Override
|
||||
public long getLong(int rowId) {
|
||||
return rows[rowId].getLong(colIdx);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the float type value for rowId. The return value is undefined and can be anything, if
|
||||
* the slot for rowId is null.
|
||||
*/
|
||||
@Override
|
||||
public float getFloat(int rowId) {
|
||||
return ((Number) rows[rowId].getDouble(colIdx)).floatValue();
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the double type value for rowId. The return value is undefined and can be anything, if
|
||||
* the slot for rowId is null.
|
||||
*/
|
||||
@Override
|
||||
public double getDouble(int rowId) {
|
||||
return rows[rowId].getDouble(colIdx);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the decimal type value for rowId. If the slot for rowId is null, it should return null.
|
||||
*/
|
||||
@Override
|
||||
public BigDecimal getDecimal(int rowId, int precision, int scale) {
|
||||
Object val = rows[rowId].get(colIdx, null);
|
||||
if (val instanceof BigDecimal) {
|
||||
return (BigDecimal) val;
|
||||
}
|
||||
|
||||
if (val instanceof Long) {
|
||||
return BigDecimal.valueOf((long) val);
|
||||
}
|
||||
|
||||
throw new UnsupportedOperationException(
|
||||
String.format(
|
||||
"failed to getDecimal and the value is %s:%s", val.getClass().getCanonicalName(), val));
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the string type value for rowId. If the slot for rowId is null, it should return null.
|
||||
* Note that the returned UTF8String may point to the data of this column vector, please copy it
|
||||
* if you want to keep it after this column vector is freed.
|
||||
*/
|
||||
@Override
|
||||
public String getUTF8String(int rowId) {
|
||||
return rows[rowId].getString(colIdx);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the binary type value for rowId. If the slot for rowId is null, it should return null.
|
||||
*/
|
||||
@Override
|
||||
public byte[] getBinary(int rowId) {
|
||||
return rows[rowId].getBytes(colIdx);
|
||||
}
|
||||
|
||||
/** @return child [[TiColumnVector]] at the given ordinal. */
|
||||
@Override
|
||||
protected TiColumnVector getChild(int ordinal) {
|
||||
throw new UnsupportedOperationException(
|
||||
"row-wise column vector does not support this operation");
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,74 @@
|
|||
/*
|
||||
* Copyright 2020 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.tikv.common.columnar.datatypes;
|
||||
|
||||
import java.nio.ByteBuffer;
|
||||
import org.tikv.common.codec.CodecDataInput;
|
||||
import org.tikv.common.util.MemoryUtil;
|
||||
|
||||
public class AutoGrowByteBuffer {
|
||||
private final ByteBuffer initBuf;
|
||||
private ByteBuffer buf;
|
||||
|
||||
public AutoGrowByteBuffer(ByteBuffer initBuf) {
|
||||
initBuf.clear();
|
||||
this.initBuf = initBuf;
|
||||
this.buf = initBuf;
|
||||
}
|
||||
|
||||
public int dataSize() {
|
||||
return buf.position();
|
||||
}
|
||||
|
||||
public ByteBuffer getByteBuffer() {
|
||||
return buf;
|
||||
}
|
||||
|
||||
private void beforeIncrease(int inc) {
|
||||
int minCap = buf.position() + inc;
|
||||
if (minCap > buf.capacity()) {
|
||||
int newCap = buf.capacity();
|
||||
do {
|
||||
newCap = newCap << 1;
|
||||
} while (minCap > newCap);
|
||||
|
||||
ByteBuffer newBuf = MemoryUtil.allocateDirect(newCap);
|
||||
MemoryUtil.copyMemory(
|
||||
MemoryUtil.getAddress(buf), MemoryUtil.getAddress(newBuf), buf.position());
|
||||
newBuf.position(buf.position());
|
||||
|
||||
if (buf != initBuf) {
|
||||
MemoryUtil.free(buf);
|
||||
}
|
||||
|
||||
buf = newBuf;
|
||||
}
|
||||
}
|
||||
|
||||
public void put(CodecDataInput cdi, int len) {
|
||||
beforeIncrease(len);
|
||||
|
||||
buf.limit(buf.position() + len);
|
||||
MemoryUtil.readFully(buf, cdi, len);
|
||||
}
|
||||
|
||||
public void putByte(byte v) {
|
||||
beforeIncrease(1);
|
||||
|
||||
buf.limit(buf.position() + 1);
|
||||
buf.put(v);
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,78 @@
|
|||
/*
|
||||
* Copyright 2020 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.tikv.common.columnar.datatypes;
|
||||
|
||||
import static org.tikv.common.util.MemoryUtil.allocateDirect;
|
||||
|
||||
import java.nio.ByteBuffer;
|
||||
import org.tikv.common.codec.CodecDataInput;
|
||||
import org.tikv.common.columnar.TiBlockColumnVector;
|
||||
import org.tikv.common.types.DataType;
|
||||
import org.tikv.common.util.MemoryUtil;
|
||||
|
||||
// TODO Support nullable data types.
|
||||
// TODO Support nested, array and struct types.
|
||||
public abstract class CHType {
|
||||
protected int length;
|
||||
protected boolean nullable = false;
|
||||
|
||||
abstract String name();
|
||||
|
||||
public boolean isNullable() {
|
||||
return nullable;
|
||||
}
|
||||
|
||||
public void setNullable(boolean nullable) {
|
||||
this.nullable = nullable;
|
||||
}
|
||||
|
||||
protected ByteBuffer decodeNullMap(CodecDataInput cdi, int size) {
|
||||
// read size * uint8 from cdi
|
||||
ByteBuffer buffer = allocateDirect(size);
|
||||
MemoryUtil.readFully(buffer, cdi, size);
|
||||
buffer.clear();
|
||||
return buffer;
|
||||
}
|
||||
|
||||
public abstract DataType toDataType();
|
||||
|
||||
protected int bufferSize(int size) {
|
||||
return size * length;
|
||||
}
|
||||
|
||||
public TiBlockColumnVector decode(CodecDataInput cdi, int size) {
|
||||
if (length == -1) {
|
||||
throw new IllegalStateException("var type should have its own decode method");
|
||||
}
|
||||
|
||||
if (size == 0) {
|
||||
return new TiBlockColumnVector(this);
|
||||
}
|
||||
if (isNullable()) {
|
||||
ByteBuffer nullMap = decodeNullMap(cdi, size);
|
||||
ByteBuffer buffer = allocateDirect(bufferSize(size));
|
||||
// read bytes from cdi to buffer(off-heap)
|
||||
MemoryUtil.readFully(buffer, cdi, bufferSize(size));
|
||||
buffer.clear();
|
||||
return new TiBlockColumnVector(this, nullMap, buffer, size, length);
|
||||
} else {
|
||||
ByteBuffer buffer = allocateDirect(bufferSize(size));
|
||||
MemoryUtil.readFully(buffer, cdi, bufferSize(size));
|
||||
buffer.clear();
|
||||
return new TiBlockColumnVector(this, buffer, size, length);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,35 @@
|
|||
/*
|
||||
* Copyright 2020 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.tikv.common.columnar.datatypes;
|
||||
|
||||
import org.tikv.common.types.DataType;
|
||||
import org.tikv.common.types.DateType;
|
||||
|
||||
public class CHTypeDate extends CHType {
|
||||
public CHTypeDate() {
|
||||
this.length = 2;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String name() {
|
||||
return "Date";
|
||||
}
|
||||
|
||||
@Override
|
||||
public DataType toDataType() {
|
||||
return DateType.DATE;
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,35 @@
|
|||
/*
|
||||
* Copyright 2020 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.tikv.common.columnar.datatypes;
|
||||
|
||||
import org.tikv.common.types.DataType;
|
||||
import org.tikv.common.types.DateTimeType;
|
||||
|
||||
public class CHTypeDateTime extends CHType {
|
||||
public CHTypeDateTime() {
|
||||
this.length = 4;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String name() {
|
||||
return "DateTime";
|
||||
}
|
||||
|
||||
@Override
|
||||
public DataType toDataType() {
|
||||
return DateTimeType.DATETIME;
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,47 @@
|
|||
/*
|
||||
* Copyright 2020 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.tikv.common.columnar.datatypes;
|
||||
|
||||
import org.tikv.common.types.DataType;
|
||||
import org.tikv.common.types.DecimalType;
|
||||
|
||||
public class CHTypeDecimal extends CHType {
|
||||
public int precision, scale;
|
||||
|
||||
public CHTypeDecimal(int precision, int scale) {
|
||||
this.precision = precision;
|
||||
this.scale = scale;
|
||||
if (precision <= 9) {
|
||||
length = 4;
|
||||
} else if (precision <= 18) {
|
||||
length = 8;
|
||||
} else if (precision <= 38) {
|
||||
length = 16;
|
||||
} else {
|
||||
length = 48;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public String name() {
|
||||
return "Decimal(" + precision + ", " + scale + ")";
|
||||
}
|
||||
|
||||
@Override
|
||||
public DataType toDataType() {
|
||||
return new DecimalType(precision, scale);
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,38 @@
|
|||
/*
|
||||
* Copyright 2020 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.tikv.common.columnar.datatypes;
|
||||
|
||||
import org.tikv.common.types.DataType;
|
||||
import org.tikv.common.types.StringType;
|
||||
|
||||
public class CHTypeFixedString extends CHType {
|
||||
private final int length;
|
||||
|
||||
public CHTypeFixedString(int length) {
|
||||
assert length > 0;
|
||||
this.length = length;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String name() {
|
||||
return "FixedString(" + length + ")";
|
||||
}
|
||||
|
||||
@Override
|
||||
public DataType toDataType() {
|
||||
return StringType.TEXT;
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,35 @@
|
|||
/*
|
||||
* Copyright 2020 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.tikv.common.columnar.datatypes;
|
||||
|
||||
import org.tikv.common.types.DataType;
|
||||
import org.tikv.common.types.DateType;
|
||||
|
||||
public class CHTypeMyDate extends CHType {
|
||||
public CHTypeMyDate() {
|
||||
this.length = 8;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String name() {
|
||||
return "MyDate";
|
||||
}
|
||||
|
||||
@Override
|
||||
public DataType toDataType() {
|
||||
return DateType.DATE;
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,35 @@
|
|||
/*
|
||||
* Copyright 2020 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.tikv.common.columnar.datatypes;
|
||||
|
||||
import org.tikv.common.types.DataType;
|
||||
import org.tikv.common.types.DateTimeType;
|
||||
|
||||
public class CHTypeMyDateTime extends CHType {
|
||||
public CHTypeMyDateTime() {
|
||||
this.length = 8;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String name() {
|
||||
return "MyDateTime";
|
||||
}
|
||||
|
||||
@Override
|
||||
public DataType toDataType() {
|
||||
return DateTimeType.DATETIME;
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,144 @@
|
|||
/*
|
||||
* Copyright 2020 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.tikv.common.columnar.datatypes;
|
||||
|
||||
import static org.tikv.common.types.DecimalType.BIG_INT_DECIMAL;
|
||||
|
||||
import org.tikv.common.types.DataType;
|
||||
import org.tikv.common.types.IntegerType;
|
||||
|
||||
public abstract class CHTypeNumber extends CHType {
|
||||
@Override
|
||||
public DataType toDataType() {
|
||||
return IntegerType.BIGINT;
|
||||
}
|
||||
|
||||
public static class CHTypeUInt8 extends CHTypeNumber {
|
||||
public CHTypeUInt8() {
|
||||
this.length = 1;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String name() {
|
||||
return "UInt8";
|
||||
}
|
||||
}
|
||||
|
||||
public static class CHTypeUInt16 extends CHTypeNumber {
|
||||
public CHTypeUInt16() {
|
||||
this.length = 2;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String name() {
|
||||
return "UInt16";
|
||||
}
|
||||
}
|
||||
|
||||
public static class CHTypeUInt32 extends CHTypeNumber {
|
||||
public CHTypeUInt32() {
|
||||
this.length = 4;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String name() {
|
||||
return "UInt32";
|
||||
}
|
||||
}
|
||||
|
||||
public static class CHTypeUInt64 extends CHTypeNumber {
|
||||
public CHTypeUInt64() {
|
||||
this.length = 8;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String name() {
|
||||
return "UInt64";
|
||||
}
|
||||
|
||||
@Override
|
||||
public DataType toDataType() {
|
||||
return BIG_INT_DECIMAL;
|
||||
}
|
||||
}
|
||||
|
||||
public static class CHTypeInt8 extends CHTypeNumber {
|
||||
public CHTypeInt8() {
|
||||
this.length = 1;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String name() {
|
||||
return "Int8";
|
||||
}
|
||||
}
|
||||
|
||||
public static class CHTypeInt16 extends CHTypeNumber {
|
||||
|
||||
public CHTypeInt16() {
|
||||
this.length = 2;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String name() {
|
||||
return "Int16";
|
||||
}
|
||||
}
|
||||
|
||||
public static class CHTypeInt32 extends CHTypeNumber {
|
||||
public CHTypeInt32() {
|
||||
this.length = 4;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String name() {
|
||||
return "Int32";
|
||||
}
|
||||
}
|
||||
|
||||
public static class CHTypeInt64 extends CHTypeNumber {
|
||||
public CHTypeInt64() {
|
||||
this.length = 8;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String name() {
|
||||
return "Int64";
|
||||
}
|
||||
}
|
||||
|
||||
public static class CHTypeFloat32 extends CHTypeNumber {
|
||||
public CHTypeFloat32() {
|
||||
this.length = 4;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String name() {
|
||||
return "Float32";
|
||||
}
|
||||
}
|
||||
|
||||
public static class CHTypeFloat64 extends CHTypeNumber {
|
||||
public CHTypeFloat64() {
|
||||
this.length = 8;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String name() {
|
||||
return "Float64";
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,90 @@
|
|||
/*
|
||||
* Copyright 2020 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.tikv.common.columnar.datatypes;
|
||||
|
||||
import static org.tikv.common.util.MemoryUtil.EMPTY_BYTE_BUFFER_DIRECT;
|
||||
import static org.tikv.common.util.MemoryUtil.allocateDirect;
|
||||
|
||||
import com.google.common.base.Preconditions;
|
||||
import java.nio.ByteBuffer;
|
||||
import org.tikv.common.codec.Codec.IntegerCodec;
|
||||
import org.tikv.common.codec.CodecDataInput;
|
||||
import org.tikv.common.columnar.TiBlockColumnVector;
|
||||
import org.tikv.common.types.DataType;
|
||||
import org.tikv.common.types.StringType;
|
||||
import org.tikv.common.util.MemoryUtil;
|
||||
|
||||
public class CHTypeString extends CHType {
|
||||
// Use to prevent frequently reallocate the chars buffer.
|
||||
// ClickHouse does not pass a total length at the beginning, so sad...
|
||||
private static final ThreadLocal<ByteBuffer> initBuffer =
|
||||
ThreadLocal.withInitial(() -> allocateDirect(102400));
|
||||
|
||||
public CHTypeString() {
|
||||
this.length = -1;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String name() {
|
||||
return "String";
|
||||
}
|
||||
|
||||
@Override
|
||||
public DataType toDataType() {
|
||||
return StringType.TEXT;
|
||||
}
|
||||
|
||||
@Override
|
||||
public TiBlockColumnVector decode(CodecDataInput cdi, int size) {
|
||||
if (size == 0) {
|
||||
return new TiBlockColumnVector(this);
|
||||
}
|
||||
|
||||
ByteBuffer nullMap;
|
||||
if (isNullable()) {
|
||||
nullMap = decodeNullMap(cdi, size);
|
||||
} else {
|
||||
nullMap = EMPTY_BYTE_BUFFER_DIRECT;
|
||||
}
|
||||
|
||||
ByteBuffer offsets = allocateDirect(size << 3);
|
||||
ByteBuffer initCharsBuf = initBuffer.get();
|
||||
AutoGrowByteBuffer autoGrowCharsBuf = new AutoGrowByteBuffer(initCharsBuf);
|
||||
|
||||
int offset = 0;
|
||||
for (int i = 0; i < size; i++) {
|
||||
int valueSize = (int) IntegerCodec.readUVarLong(cdi);
|
||||
|
||||
offset += valueSize + 1;
|
||||
offsets.putLong(offset);
|
||||
|
||||
autoGrowCharsBuf.put(cdi, valueSize);
|
||||
autoGrowCharsBuf.putByte((byte) 0); // terminating zero byte
|
||||
}
|
||||
|
||||
Preconditions.checkState(offset == autoGrowCharsBuf.dataSize());
|
||||
|
||||
ByteBuffer chars = autoGrowCharsBuf.getByteBuffer();
|
||||
if (chars == initCharsBuf) {
|
||||
// Copy out.
|
||||
ByteBuffer newChars = allocateDirect(offset);
|
||||
MemoryUtil.copyMemory(MemoryUtil.getAddress(chars), MemoryUtil.getAddress(newChars), offset);
|
||||
chars = newChars;
|
||||
}
|
||||
|
||||
return new TiBlockColumnVector(this, nullMap, offsets, chars, size);
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,101 @@
|
|||
/*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.tikv.common.event;
|
||||
|
||||
import java.io.Serializable;
|
||||
|
||||
public class CacheInvalidateEvent implements Serializable {
|
||||
private final long regionId;
|
||||
private final long storeId;
|
||||
private final CacheType cacheType;
|
||||
private boolean invalidateRegion;
|
||||
private boolean invalidateStore;
|
||||
|
||||
public CacheInvalidateEvent(
|
||||
long regionId, long storeId, boolean updateRegion, boolean updateStore, CacheType type) {
|
||||
this.regionId = regionId;
|
||||
this.storeId = storeId;
|
||||
this.cacheType = type;
|
||||
if (updateRegion) {
|
||||
invalidateRegion();
|
||||
}
|
||||
|
||||
if (updateStore) {
|
||||
invalidateStore();
|
||||
}
|
||||
}
|
||||
|
||||
public long getRegionId() {
|
||||
return regionId;
|
||||
}
|
||||
|
||||
public long getStoreId() {
|
||||
return storeId;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (obj == this) {
|
||||
return true;
|
||||
} else if (obj instanceof CacheInvalidateEvent) {
|
||||
CacheInvalidateEvent event = (CacheInvalidateEvent) obj;
|
||||
return event.getRegionId() == getRegionId()
|
||||
&& event.getStoreId() == getStoreId()
|
||||
&& event.getCacheType() == getCacheType();
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
int result = 1106;
|
||||
result += result * 31 + getStoreId();
|
||||
result += result * 31 + getRegionId();
|
||||
result += result * 31 + getCacheType().name().hashCode();
|
||||
return result;
|
||||
}
|
||||
|
||||
public void invalidateRegion() {
|
||||
invalidateRegion = true;
|
||||
}
|
||||
|
||||
public void invalidateStore() {
|
||||
invalidateStore = true;
|
||||
}
|
||||
|
||||
public boolean shouldUpdateRegion() {
|
||||
return invalidateRegion;
|
||||
}
|
||||
|
||||
public boolean shouldUpdateStore() {
|
||||
return invalidateStore;
|
||||
}
|
||||
|
||||
public CacheType getCacheType() {
|
||||
return cacheType;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return String.format("RegionId=%d,StoreId=%d,Type=%s", regionId, storeId, cacheType.name());
|
||||
}
|
||||
|
||||
public enum CacheType implements Serializable {
|
||||
REGION_STORE,
|
||||
REQ_FAILED,
|
||||
LEADER
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,25 @@
|
|||
/*
|
||||
* Copyright 2020 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.tikv.common.exception;
|
||||
|
||||
public class AllocateRowIDOverflowException extends RuntimeException {
|
||||
public AllocateRowIDOverflowException(long current, long step, long shardRowBits) {
|
||||
super(
|
||||
String.format(
|
||||
"Overflow when allocating row id with current=%d, step=%d, shard bit=%d",
|
||||
current, step, shardRowBits));
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,26 @@
|
|||
/*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.tikv.common.exception;
|
||||
|
||||
public class CastingException extends RuntimeException {
|
||||
public CastingException(Exception e) {
|
||||
super(e);
|
||||
}
|
||||
|
||||
public CastingException(String msg) {
|
||||
super(msg);
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,25 @@
|
|||
/*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.tikv.common.exception;
|
||||
|
||||
public class CodecException extends TiKVException {
|
||||
|
||||
private static final long serialVersionUID = 0L;
|
||||
|
||||
public CodecException(String errMsg) {
|
||||
super(errMsg);
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,22 @@
|
|||
/*
|
||||
* Copyright 2019 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.tikv.common.exception;
|
||||
|
||||
public class ConvertNotSupportException extends RuntimeException {
|
||||
public ConvertNotSupportException(String fromType, String toType) {
|
||||
super("do not support converting from " + fromType + " to " + toType);
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,50 @@
|
|||
/*
|
||||
* Copyright 2019 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.tikv.common.exception;
|
||||
|
||||
public class ConvertOverflowException extends RuntimeException {
|
||||
private ConvertOverflowException(String msg) {
|
||||
super(msg);
|
||||
}
|
||||
|
||||
public ConvertOverflowException(String msg, Throwable e) {
|
||||
super(msg, e);
|
||||
}
|
||||
|
||||
public static ConvertOverflowException newMaxLengthException(String value, long maxLength) {
|
||||
return new ConvertOverflowException("value " + value + " length > max length " + maxLength);
|
||||
}
|
||||
|
||||
public static ConvertOverflowException newMaxLengthException(long length, long maxLength) {
|
||||
return new ConvertOverflowException("length " + length + " > max length " + maxLength);
|
||||
}
|
||||
|
||||
public static ConvertOverflowException newLowerBoundException(Object value, Object lowerBound) {
|
||||
return new ConvertOverflowException("value " + value + " < lowerBound " + lowerBound);
|
||||
}
|
||||
|
||||
public static ConvertOverflowException newUpperBoundException(Object value, Object upperBound) {
|
||||
return new ConvertOverflowException("value " + value + " > upperBound " + upperBound);
|
||||
}
|
||||
|
||||
public static ConvertOverflowException newEnumException(Object value) {
|
||||
return new ConvertOverflowException("Incorrect enum value: '" + value + "'");
|
||||
}
|
||||
|
||||
public static ConvertOverflowException newOutOfRange() {
|
||||
return new ConvertOverflowException("Out of range");
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,22 @@
|
|||
/*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.tikv.common.exception;
|
||||
|
||||
public class DAGRequestException extends RuntimeException {
|
||||
public DAGRequestException(String msg) {
|
||||
super(msg);
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,22 @@
|
|||
/*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.tikv.common.exception;
|
||||
|
||||
public class IgnoreUnsupportedTypeException extends RuntimeException {
|
||||
public IgnoreUnsupportedTypeException(String msg) {
|
||||
super(msg);
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,22 @@
|
|||
/*
|
||||
* Copyright 2018 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.tikv.common.exception;
|
||||
|
||||
public class RegionTaskException extends RuntimeException {
|
||||
public RegionTaskException(String msg, Throwable throwable) {
|
||||
super(msg, throwable);
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,37 @@
|
|||
/*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.tikv.common.exception;
|
||||
|
||||
import com.pingcap.tidb.tipb.Error;
|
||||
|
||||
public class SelectException extends RuntimeException {
|
||||
private final Error err;
|
||||
|
||||
public SelectException(Error err, String msg) {
|
||||
super(msg);
|
||||
this.err = err;
|
||||
}
|
||||
|
||||
// TODO: improve this
|
||||
public SelectException(String msg) {
|
||||
super(msg);
|
||||
this.err = null;
|
||||
}
|
||||
|
||||
public Error getError() {
|
||||
return err;
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,26 @@
|
|||
/*
|
||||
* Copyright 2019 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.tikv.common.exception;
|
||||
|
||||
public class TiBatchWriteException extends RuntimeException {
|
||||
public TiBatchWriteException(String msg) {
|
||||
super(msg);
|
||||
}
|
||||
|
||||
public TiBatchWriteException(String msg, Throwable t) {
|
||||
super(msg, t);
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,22 @@
|
|||
/*
|
||||
* Copyright 2020 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.tikv.common.exception;
|
||||
|
||||
public class TiDBConvertException extends RuntimeException {
|
||||
public TiDBConvertException(String columnName, Throwable e) {
|
||||
super("convert to tidb data error for column '" + columnName + "'", e);
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,26 @@
|
|||
/*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.tikv.common.exception;
|
||||
|
||||
public class TiExpressionException extends RuntimeException {
|
||||
public TiExpressionException(String msg) {
|
||||
super(msg);
|
||||
}
|
||||
|
||||
public TiExpressionException(String msg, Throwable t) {
|
||||
super(msg, t);
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,26 @@
|
|||
/*
|
||||
* Copyright 2019 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.tikv.common.exception;
|
||||
|
||||
public class TiInternalException extends RuntimeException {
|
||||
public TiInternalException(Exception e) {
|
||||
super(e);
|
||||
}
|
||||
|
||||
public TiInternalException(String msg) {
|
||||
super(msg);
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,26 @@
|
|||
/*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.tikv.common.exception;
|
||||
|
||||
public class TypeException extends RuntimeException {
|
||||
public TypeException(String msg) {
|
||||
super(msg);
|
||||
}
|
||||
|
||||
public TypeException(String msg, Throwable t) {
|
||||
super(msg, t);
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,22 @@
|
|||
/*
|
||||
* Copyright 2020 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.tikv.common.exception;
|
||||
|
||||
public class UnsupportedPartitionExprException extends RuntimeException {
|
||||
public UnsupportedPartitionExprException(String msg) {
|
||||
super(msg);
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,22 @@
|
|||
/*
|
||||
* Copyright 2019 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.tikv.common.exception;
|
||||
|
||||
public class UnsupportedSyntaxException extends RuntimeException {
|
||||
public UnsupportedSyntaxException(String msg) {
|
||||
super(msg);
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,22 @@
|
|||
/*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.tikv.common.exception;
|
||||
|
||||
public class UnsupportedTypeException extends RuntimeException {
|
||||
public UnsupportedTypeException(String msg) {
|
||||
super(msg);
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,27 @@
|
|||
/*
|
||||
* Copyright 2020 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.tikv.common.exception;
|
||||
|
||||
import org.tikv.common.codec.KeyUtils;
|
||||
|
||||
public class WriteConflictException extends RuntimeException {
|
||||
public WriteConflictException(long callerStartTS, long txnID, long commitTS, byte[] key) {
|
||||
super(
|
||||
String.format(
|
||||
"callerStartTS=%d txnID=%d commitTS=%d key=%s",
|
||||
callerStartTS, txnID, commitTS, KeyUtils.formatBytes(key)));
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,94 @@
|
|||
/*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.tikv.common.expression;
|
||||
|
||||
import static java.util.Objects.requireNonNull;
|
||||
|
||||
import com.google.common.base.Joiner;
|
||||
import com.google.common.collect.ImmutableList;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
import org.tikv.common.types.DataType;
|
||||
|
||||
public class AggregateFunction extends Expression {
|
||||
private final FunctionType type;
|
||||
private final Expression argument;
|
||||
|
||||
private AggregateFunction(FunctionType type, Expression argument, DataType dataType) {
|
||||
super(dataType);
|
||||
this.type = requireNonNull(type, "function type is null");
|
||||
this.argument = requireNonNull(argument, "function argument is null");
|
||||
}
|
||||
|
||||
public static AggregateFunction newCall(FunctionType type, Expression argument) {
|
||||
return newCall(type, argument, argument.dataType);
|
||||
}
|
||||
|
||||
public static AggregateFunction newCall(
|
||||
FunctionType type, Expression argument, DataType dataType) {
|
||||
return new AggregateFunction(type, argument, dataType);
|
||||
}
|
||||
|
||||
public FunctionType getType() {
|
||||
return type;
|
||||
}
|
||||
|
||||
public Expression getArgument() {
|
||||
return argument;
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<Expression> getChildren() {
|
||||
return ImmutableList.of(argument);
|
||||
}
|
||||
|
||||
@Override
|
||||
public <R, C> R accept(Visitor<R, C> visitor, C context) {
|
||||
return visitor.visit(this, context);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object other) {
|
||||
if (this == other) {
|
||||
return true;
|
||||
}
|
||||
if (!(other instanceof AggregateFunction)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
AggregateFunction that = (AggregateFunction) other;
|
||||
return type == that.type && Objects.equals(argument, that.argument);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(type, argument);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return String.format(
|
||||
"%s(%s)", getType(), Joiner.on(",").useForNull("NULL").join(getChildren()));
|
||||
}
|
||||
|
||||
public enum FunctionType {
|
||||
Sum,
|
||||
Count,
|
||||
Min,
|
||||
Max,
|
||||
First
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,134 @@
|
|||
/*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.tikv.common.expression;
|
||||
|
||||
import static java.util.Objects.requireNonNull;
|
||||
import static org.tikv.common.expression.ArithmeticBinaryExpression.Type.*;
|
||||
|
||||
import com.google.common.collect.ImmutableList;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
import org.tikv.common.types.DataType;
|
||||
|
||||
public class ArithmeticBinaryExpression extends Expression {
|
||||
private final Expression left;
|
||||
private final Expression right;
|
||||
private final Type compType;
|
||||
|
||||
public ArithmeticBinaryExpression(
|
||||
DataType dataType, Type type, Expression left, Expression right) {
|
||||
super(dataType);
|
||||
resolved = true;
|
||||
this.left = requireNonNull(left, "left expression is null");
|
||||
this.right = requireNonNull(right, "right expression is null");
|
||||
this.compType = requireNonNull(type, "type is null");
|
||||
}
|
||||
|
||||
public static ArithmeticBinaryExpression plus(Expression left, Expression right) {
|
||||
return new ArithmeticBinaryExpression(left.dataType, PLUS, left, right);
|
||||
}
|
||||
|
||||
public static ArithmeticBinaryExpression minus(Expression left, Expression right) {
|
||||
return new ArithmeticBinaryExpression(left.dataType, MINUS, left, right);
|
||||
}
|
||||
|
||||
public static ArithmeticBinaryExpression multiply(
|
||||
DataType dataType, Expression left, Expression right) {
|
||||
return new ArithmeticBinaryExpression(dataType, MULTIPLY, left, right);
|
||||
}
|
||||
|
||||
public static ArithmeticBinaryExpression multiply(Expression left, Expression right) {
|
||||
return new ArithmeticBinaryExpression(left.dataType, MULTIPLY, left, right);
|
||||
}
|
||||
|
||||
public static ArithmeticBinaryExpression divide(
|
||||
DataType dataType, Expression left, Expression right) {
|
||||
return new ArithmeticBinaryExpression(left.dataType, DIVIDE, left, right);
|
||||
}
|
||||
|
||||
public static ArithmeticBinaryExpression divide(Expression left, Expression right) {
|
||||
return new ArithmeticBinaryExpression(left.dataType, DIVIDE, left, right);
|
||||
}
|
||||
|
||||
public static ArithmeticBinaryExpression bitAnd(Expression left, Expression right) {
|
||||
return new ArithmeticBinaryExpression(left.dataType, BIT_AND, left, right);
|
||||
}
|
||||
|
||||
public static ArithmeticBinaryExpression bitOr(Expression left, Expression right) {
|
||||
return new ArithmeticBinaryExpression(left.dataType, BIT_OR, left, right);
|
||||
}
|
||||
|
||||
public static ArithmeticBinaryExpression bitXor(Expression left, Expression right) {
|
||||
return new ArithmeticBinaryExpression(left.dataType, BIT_XOR, left, right);
|
||||
}
|
||||
|
||||
public Expression getLeft() {
|
||||
return left;
|
||||
}
|
||||
|
||||
public Expression getRight() {
|
||||
return right;
|
||||
}
|
||||
|
||||
public Type getCompType() {
|
||||
return compType;
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<Expression> getChildren() {
|
||||
return ImmutableList.of(left, right);
|
||||
}
|
||||
|
||||
@Override
|
||||
public <R, C> R accept(Visitor<R, C> visitor, C context) {
|
||||
return visitor.visit(this, context);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object other) {
|
||||
if (this == other) {
|
||||
return true;
|
||||
}
|
||||
if (!(other instanceof ArithmeticBinaryExpression)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
ArithmeticBinaryExpression that = (ArithmeticBinaryExpression) other;
|
||||
return (compType == that.compType)
|
||||
&& Objects.equals(left, that.left)
|
||||
&& Objects.equals(right, that.right);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(compType, left, right);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return String.format("[%s %s %s]", getLeft(), getCompType(), getRight());
|
||||
}
|
||||
|
||||
public enum Type {
|
||||
PLUS,
|
||||
MINUS,
|
||||
MULTIPLY,
|
||||
DIVIDE,
|
||||
BIT_AND,
|
||||
BIT_OR,
|
||||
BIT_XOR
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,53 @@
|
|||
/*
|
||||
*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*
|
||||
*/
|
||||
|
||||
package org.tikv.common.expression;
|
||||
|
||||
import static java.util.Objects.requireNonNull;
|
||||
|
||||
import java.util.HashSet;
|
||||
import java.util.Set;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
public class Blocklist {
|
||||
private final Set<String> unsupported = new HashSet<>();
|
||||
|
||||
Blocklist(String string) {
|
||||
if (string != null) {
|
||||
String[] some = string.split(",");
|
||||
for (String one : some) {
|
||||
String trimmedExprName = one.trim();
|
||||
if (!trimmedExprName.isEmpty()) {
|
||||
unsupported.add(one.trim());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
boolean isUnsupported(String name) {
|
||||
return unsupported.contains(name);
|
||||
}
|
||||
|
||||
boolean isUnsupported(Class<?> cls) {
|
||||
return isUnsupported(requireNonNull(cls).getSimpleName());
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return unsupported.stream().collect(Collectors.joining(","));
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,55 @@
|
|||
/*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.tikv.common.expression;
|
||||
|
||||
import static com.google.common.base.Preconditions.checkNotNull;
|
||||
|
||||
import java.io.Serializable;
|
||||
import org.tikv.common.expression.visitor.ProtoConverter;
|
||||
|
||||
public class ByItem implements Serializable {
|
||||
private final Expression expr;
|
||||
private final boolean desc;
|
||||
|
||||
private ByItem(Expression expr, boolean desc) {
|
||||
checkNotNull(expr, "Expr cannot be null for ByItem");
|
||||
|
||||
this.expr = expr;
|
||||
this.desc = desc;
|
||||
}
|
||||
|
||||
public static ByItem create(Expression expr, boolean desc) {
|
||||
return new ByItem(expr, desc);
|
||||
}
|
||||
|
||||
public com.pingcap.tidb.tipb.ByItem toProto(Object context) {
|
||||
com.pingcap.tidb.tipb.ByItem.Builder builder = com.pingcap.tidb.tipb.ByItem.newBuilder();
|
||||
return builder.setExpr(ProtoConverter.toProto(expr, context)).setDesc(desc).build();
|
||||
}
|
||||
|
||||
public Expression getExpr() {
|
||||
return expr;
|
||||
}
|
||||
|
||||
public boolean isDesc() {
|
||||
return desc;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return String.format("[%s %s]", expr.toString(), desc ? "DESC" : "ASC");
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,148 @@
|
|||
/*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.tikv.common.expression;
|
||||
|
||||
import com.google.common.collect.ImmutableList;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
import org.tikv.common.exception.TiExpressionException;
|
||||
import org.tikv.common.meta.TiColumnInfo;
|
||||
import org.tikv.common.meta.TiTableInfo;
|
||||
import org.tikv.common.types.DataType;
|
||||
|
||||
public class ColumnRef extends Expression {
|
||||
private final String name;
|
||||
|
||||
@Deprecated
|
||||
public ColumnRef(String name) {
|
||||
this.name = name;
|
||||
}
|
||||
|
||||
public ColumnRef(String name, DataType dataType) {
|
||||
super(dataType);
|
||||
resolved = true;
|
||||
this.name = name;
|
||||
}
|
||||
|
||||
public static ColumnRef create(String name, TiTableInfo table) {
|
||||
name = name.replaceAll("`", "");
|
||||
TiColumnInfo col = table.getColumn(name);
|
||||
if (col != null) {
|
||||
return new ColumnRef(name, col.getType());
|
||||
}
|
||||
|
||||
throw new TiExpressionException(
|
||||
String.format("Column name %s not found in table %s", name, table));
|
||||
}
|
||||
|
||||
@Deprecated
|
||||
public static ColumnRef create(String name) {
|
||||
return new ColumnRef(name);
|
||||
}
|
||||
|
||||
public static ColumnRef create(String name, DataType dataType) {
|
||||
return new ColumnRef(name, dataType);
|
||||
}
|
||||
|
||||
public static ColumnRef create(String name, TiColumnInfo columnInfo) {
|
||||
return new ColumnRef(name, columnInfo.getType());
|
||||
}
|
||||
|
||||
public String getName() {
|
||||
return name.toLowerCase();
|
||||
}
|
||||
|
||||
public void resolve(TiTableInfo table) {
|
||||
TiColumnInfo columnInfo = null;
|
||||
for (TiColumnInfo col : table.getColumns()) {
|
||||
if (col.matchName(name)) {
|
||||
this.dataType = col.getType();
|
||||
columnInfo = col;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (columnInfo == null) {
|
||||
throw new TiExpressionException(
|
||||
String.format("No Matching column %s from table %s", name, table.getName()));
|
||||
}
|
||||
|
||||
if (columnInfo.getId() == 0) {
|
||||
throw new TiExpressionException("Zero Id is not a referable column id");
|
||||
}
|
||||
}
|
||||
|
||||
public boolean matchName(String name) {
|
||||
return this.name.equalsIgnoreCase(name);
|
||||
}
|
||||
|
||||
@Override
|
||||
public DataType getDataType() {
|
||||
return dataType;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isResolved() {
|
||||
return resolved;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object another) {
|
||||
if (this == another) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (another instanceof ColumnRef) {
|
||||
ColumnRef that = (ColumnRef) another;
|
||||
if (isResolved() && that.isResolved()) {
|
||||
return name.equalsIgnoreCase(that.name)
|
||||
&& this.dataType.equals(((ColumnRef) another).dataType);
|
||||
} else {
|
||||
return name.equalsIgnoreCase(that.name);
|
||||
}
|
||||
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
if (isResolved()) {
|
||||
return Objects.hash(this.name, this.dataType);
|
||||
} else {
|
||||
return Objects.hashCode(name);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
if (dataType != null) {
|
||||
return String.format("%s@%s", getName(), dataType.getName());
|
||||
} else {
|
||||
return String.format("[%s]", getName());
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<Expression> getChildren() {
|
||||
return ImmutableList.of();
|
||||
}
|
||||
|
||||
@Override
|
||||
public <R, C> R accept(Visitor<R, C> visitor, C context) {
|
||||
return visitor.visit(this, context);
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,202 @@
|
|||
/*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.tikv.common.expression;
|
||||
|
||||
import static com.google.common.base.Preconditions.checkArgument;
|
||||
import static java.util.Objects.requireNonNull;
|
||||
import static org.tikv.common.expression.ComparisonBinaryExpression.Operator.*;
|
||||
|
||||
import com.google.common.collect.ImmutableList;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
import org.tikv.common.exception.TiExpressionException;
|
||||
import org.tikv.common.key.TypedKey;
|
||||
import org.tikv.common.types.DataType;
|
||||
import org.tikv.common.types.IntegerType;
|
||||
|
||||
public class ComparisonBinaryExpression extends Expression {
|
||||
private final Expression left;
|
||||
private final Expression right;
|
||||
private final Operator compOperator;
|
||||
private transient NormalizedPredicate normalizedPredicate;
|
||||
|
||||
public ComparisonBinaryExpression(Operator operator, Expression left, Expression right) {
|
||||
super(IntegerType.BOOLEAN);
|
||||
this.resolved = true;
|
||||
this.left = requireNonNull(left, "left expression is null");
|
||||
this.right = requireNonNull(right, "right expression is null");
|
||||
this.compOperator = requireNonNull(operator, "type is null");
|
||||
}
|
||||
|
||||
public static ComparisonBinaryExpression equal(Expression left, Expression right) {
|
||||
return new ComparisonBinaryExpression(EQUAL, left, right);
|
||||
}
|
||||
|
||||
public static ComparisonBinaryExpression notEqual(Expression left, Expression right) {
|
||||
return new ComparisonBinaryExpression(NOT_EQUAL, left, right);
|
||||
}
|
||||
|
||||
public static ComparisonBinaryExpression lessThan(Expression left, Expression right) {
|
||||
return new ComparisonBinaryExpression(LESS_THAN, left, right);
|
||||
}
|
||||
|
||||
public static ComparisonBinaryExpression lessEqual(Expression left, Expression right) {
|
||||
return new ComparisonBinaryExpression(LESS_EQUAL, left, right);
|
||||
}
|
||||
|
||||
public static ComparisonBinaryExpression greaterThan(Expression left, Expression right) {
|
||||
return new ComparisonBinaryExpression(GREATER_THAN, left, right);
|
||||
}
|
||||
|
||||
public static ComparisonBinaryExpression greaterEqual(Expression left, Expression right) {
|
||||
return new ComparisonBinaryExpression(GREATER_EQUAL, left, right);
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<Expression> getChildren() {
|
||||
return ImmutableList.of(left, right);
|
||||
}
|
||||
|
||||
@Override
|
||||
public <R, C> R accept(Visitor<R, C> visitor, C context) {
|
||||
return visitor.visit(this, context);
|
||||
}
|
||||
|
||||
public Expression getLeft() {
|
||||
return left;
|
||||
}
|
||||
|
||||
public Expression getRight() {
|
||||
return right;
|
||||
}
|
||||
|
||||
public Operator getComparisonType() {
|
||||
return compOperator;
|
||||
}
|
||||
|
||||
public NormalizedPredicate normalize() {
|
||||
if (normalizedPredicate != null) {
|
||||
return normalizedPredicate;
|
||||
}
|
||||
if (getLeft() instanceof Constant && getRight() instanceof ColumnRef) {
|
||||
Constant left = (Constant) getLeft();
|
||||
ColumnRef right = (ColumnRef) getRight();
|
||||
Operator newOperator;
|
||||
switch (getComparisonType()) {
|
||||
case EQUAL:
|
||||
newOperator = EQUAL;
|
||||
break;
|
||||
case LESS_EQUAL:
|
||||
newOperator = GREATER_EQUAL;
|
||||
break;
|
||||
case LESS_THAN:
|
||||
newOperator = GREATER_THAN;
|
||||
break;
|
||||
case GREATER_EQUAL:
|
||||
newOperator = LESS_EQUAL;
|
||||
break;
|
||||
case GREATER_THAN:
|
||||
newOperator = LESS_THAN;
|
||||
break;
|
||||
case NOT_EQUAL:
|
||||
newOperator = NOT_EQUAL;
|
||||
break;
|
||||
default:
|
||||
throw new TiExpressionException(
|
||||
String.format(
|
||||
"PredicateNormalizer is not able to process type %s", getComparisonType()));
|
||||
}
|
||||
ComparisonBinaryExpression newExpression =
|
||||
new ComparisonBinaryExpression(newOperator, right, left);
|
||||
normalizedPredicate = new NormalizedPredicate(newExpression);
|
||||
return normalizedPredicate;
|
||||
} else if (getRight() instanceof Constant && getLeft() instanceof ColumnRef) {
|
||||
normalizedPredicate = new NormalizedPredicate(this);
|
||||
return normalizedPredicate;
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return String.format("[%s %s %s]", getLeft(), getComparisonType(), getRight());
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object other) {
|
||||
if (this == other) {
|
||||
return true;
|
||||
}
|
||||
if (!(other instanceof ComparisonBinaryExpression)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
ComparisonBinaryExpression that = (ComparisonBinaryExpression) other;
|
||||
return (compOperator == that.compOperator)
|
||||
&& Objects.equals(left, that.left)
|
||||
&& Objects.equals(right, that.right);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(compOperator, left, right);
|
||||
}
|
||||
|
||||
public enum Operator {
|
||||
EQUAL,
|
||||
NOT_EQUAL,
|
||||
LESS_THAN,
|
||||
LESS_EQUAL,
|
||||
GREATER_THAN,
|
||||
GREATER_EQUAL
|
||||
}
|
||||
|
||||
public static class NormalizedPredicate {
|
||||
private final ComparisonBinaryExpression pred;
|
||||
private TypedKey key;
|
||||
|
||||
NormalizedPredicate(ComparisonBinaryExpression pred) {
|
||||
checkArgument(pred.getLeft() instanceof ColumnRef);
|
||||
checkArgument(pred.getRight() instanceof Constant);
|
||||
this.pred = pred;
|
||||
}
|
||||
|
||||
public ColumnRef getColumnRef() {
|
||||
return (ColumnRef) pred.getLeft();
|
||||
}
|
||||
|
||||
public Constant getValue() {
|
||||
return (Constant) pred.getRight();
|
||||
}
|
||||
|
||||
public Operator getType() {
|
||||
return pred.getComparisonType();
|
||||
}
|
||||
|
||||
TypedKey getTypedLiteral() {
|
||||
return getTypedLiteral(DataType.UNSPECIFIED_LEN);
|
||||
}
|
||||
|
||||
public TypedKey getTypedLiteral(int prefixLength) {
|
||||
if (key == null) {
|
||||
DataType colRefType = getColumnRef().getDataType();
|
||||
key = TypedKey.toTypedKey(getValue().getValue(), colRefType, prefixLength);
|
||||
}
|
||||
return key;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,139 @@
|
|||
/*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.tikv.common.expression;
|
||||
|
||||
import com.google.common.collect.ImmutableList;
|
||||
import com.google.common.primitives.UnsignedLong;
|
||||
import java.math.BigDecimal;
|
||||
import java.sql.Date;
|
||||
import java.sql.Timestamp;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
import org.joda.time.DateTime;
|
||||
import org.tikv.common.exception.TiExpressionException;
|
||||
import org.tikv.common.types.*;
|
||||
|
||||
// Refactor needed.
|
||||
// Refer to https://github.com/pingcap/tipb/blob/master/go-tipb/expression.pb.go
|
||||
// TODO: This might need a refactor to accept an DataType?
|
||||
public class Constant extends Expression {
|
||||
private final Object value;
|
||||
private final BigDecimal UNSIGNED_LONG_MAX =
|
||||
new BigDecimal(UnsignedLong.fromLongBits(-1).toString());
|
||||
|
||||
public Constant(Object value, DataType type) {
|
||||
this.value = value;
|
||||
this.dataType = (type == null && value != null) ? getDefaultType(value) : type;
|
||||
this.resolved = true;
|
||||
}
|
||||
|
||||
public static Constant create(Object value, DataType type) {
|
||||
return new Constant(value, type);
|
||||
}
|
||||
|
||||
@Deprecated
|
||||
public static Constant create(Object value) {
|
||||
return new Constant(value, null);
|
||||
}
|
||||
|
||||
protected static boolean isIntegerType(Object value) {
|
||||
return value instanceof Long
|
||||
|| value instanceof Integer
|
||||
|| value instanceof Short
|
||||
|| value instanceof Byte;
|
||||
}
|
||||
|
||||
private static DataType getDefaultType(Object value) {
|
||||
if (value == null) {
|
||||
throw new TiExpressionException("NULL constant has no type");
|
||||
} else if (isIntegerType(value)) {
|
||||
return IntegerType.BIGINT;
|
||||
} else if (value instanceof String) {
|
||||
return StringType.VARCHAR;
|
||||
} else if (value instanceof Float) {
|
||||
return RealType.FLOAT;
|
||||
} else if (value instanceof Double) {
|
||||
return RealType.DOUBLE;
|
||||
} else if (value instanceof BigDecimal) {
|
||||
int prec = ((BigDecimal) value).precision();
|
||||
int frac = ((BigDecimal) value).scale();
|
||||
return new DecimalType(prec, frac);
|
||||
} else if (value instanceof DateTime) {
|
||||
return DateTimeType.DATETIME;
|
||||
} else if (value instanceof Date) {
|
||||
return DateType.DATE;
|
||||
} else if (value instanceof Timestamp) {
|
||||
return TimestampType.TIMESTAMP;
|
||||
} else if (value instanceof byte[]) {
|
||||
return BytesType.TEXT;
|
||||
} else {
|
||||
throw new TiExpressionException(
|
||||
"Constant type not supported:" + value.getClass().getSimpleName());
|
||||
}
|
||||
}
|
||||
|
||||
public void setType(DataType type) {
|
||||
this.dataType = type;
|
||||
}
|
||||
|
||||
public Object getValue() {
|
||||
return value;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
if (value == null) {
|
||||
return "null";
|
||||
}
|
||||
if (value instanceof String) {
|
||||
return String.format("\"%s\"", value);
|
||||
}
|
||||
return value.toString();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object other) {
|
||||
if (other instanceof Constant) {
|
||||
return Objects.equals(value, ((Constant) other).value);
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hashCode(value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<Expression> getChildren() {
|
||||
return ImmutableList.of();
|
||||
}
|
||||
|
||||
@Override
|
||||
public <R, C> R accept(Visitor<R, C> visitor, C context) {
|
||||
return visitor.visit(this, context);
|
||||
}
|
||||
|
||||
public boolean isOverflowed() {
|
||||
if (this.dataType instanceof IntegerType) {
|
||||
if (((IntegerType) this.dataType).isUnsignedLong()) {
|
||||
return ((BigDecimal) value).min(UNSIGNED_LONG_MAX).signum() > 0
|
||||
|| ((BigDecimal) value).signum() < 0;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,50 @@
|
|||
/*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.tikv.common.expression;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.util.List;
|
||||
import org.tikv.common.types.DataType;
|
||||
|
||||
public abstract class Expression implements Serializable {
|
||||
protected DataType dataType;
|
||||
protected boolean resolved;
|
||||
|
||||
public Expression(DataType dataType) {
|
||||
this.dataType = dataType;
|
||||
this.resolved = true;
|
||||
}
|
||||
|
||||
public Expression() {
|
||||
this.resolved = false;
|
||||
}
|
||||
|
||||
public abstract List<Expression> getChildren();
|
||||
|
||||
public abstract <R, C> R accept(Visitor<R, C> visitor, C context);
|
||||
|
||||
public boolean isResolved() {
|
||||
return getChildren().stream().allMatch(Expression::isResolved);
|
||||
}
|
||||
|
||||
public DataType getDataType() {
|
||||
return dataType;
|
||||
}
|
||||
|
||||
public void setDataType(DataType dataType) {
|
||||
this.dataType = dataType;
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,27 @@
|
|||
/*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.tikv.common.expression;
|
||||
|
||||
public class ExpressionBlocklist extends Blocklist {
|
||||
|
||||
public ExpressionBlocklist(String exprsString) {
|
||||
super(exprsString);
|
||||
}
|
||||
|
||||
public boolean isUnsupportedPushDownExpr(Class<?> cls) {
|
||||
return isUnsupported(cls);
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,96 @@
|
|||
/*
|
||||
* Copyright 2020 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.tikv.common.expression;
|
||||
|
||||
import com.google.common.collect.ImmutableList;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
import java.util.function.Function;
|
||||
|
||||
public class FuncCallExpr extends Expression {
|
||||
private final Expression child;
|
||||
private final Type funcTp;
|
||||
|
||||
public FuncCallExpr(Expression expr, Type funcTp) {
|
||||
this.child = expr;
|
||||
this.funcTp = funcTp;
|
||||
}
|
||||
|
||||
public static FuncCallExpr year(Expression expr) {
|
||||
return new FuncCallExpr(expr, Type.YEAR);
|
||||
}
|
||||
|
||||
public Type getFuncTp() {
|
||||
return this.funcTp;
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<Expression> getChildren() {
|
||||
return ImmutableList.of(child);
|
||||
}
|
||||
|
||||
@Override
|
||||
public <R, C> R accept(Visitor<R, C> visitor, C context) {
|
||||
return visitor.visit(this, context);
|
||||
}
|
||||
|
||||
public Expression getExpression() {
|
||||
return child;
|
||||
}
|
||||
|
||||
private String getFuncString() {
|
||||
if (funcTp == Type.YEAR) {
|
||||
return "year";
|
||||
}
|
||||
return "";
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return String.format("%s(%s)", getFuncString(), getExpression());
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object other) {
|
||||
if (this == other) {
|
||||
return true;
|
||||
}
|
||||
if (!(other instanceof FuncCallExpr)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
FuncCallExpr that = (FuncCallExpr) other;
|
||||
return Objects.equals(child, that.child);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hashCode(child);
|
||||
}
|
||||
|
||||
// try to evaluate a {@code Constant} literal if its type is
|
||||
// varchar or datetime. If such literal cannot be evaluated, return
|
||||
// input literal.
|
||||
public Constant eval(Constant literal) {
|
||||
Function<Constant, Constant> evalFn = FuncCallExprEval.getEvalFn(funcTp);
|
||||
if (evalFn != null) return evalFn.apply(literal);
|
||||
return literal;
|
||||
}
|
||||
|
||||
public enum Type {
|
||||
YEAR
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,58 @@
|
|||
/*
|
||||
* Copyright 2020 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.tikv.common.expression;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.function.Function;
|
||||
import org.joda.time.DateTime;
|
||||
import org.tikv.common.expression.FuncCallExpr.Type;
|
||||
import org.tikv.common.types.*;
|
||||
|
||||
public class FuncCallExprEval {
|
||||
|
||||
private static final Map<Type, Function<Constant, Constant>> evalMap = new HashMap<>();
|
||||
|
||||
static {
|
||||
// adding year eval logic here
|
||||
evalMap.put(
|
||||
Type.YEAR,
|
||||
literal -> {
|
||||
DataType type = literal.getDataType();
|
||||
if (type instanceof StringType) {
|
||||
DateTime date = DateTime.parse((String) literal.getValue());
|
||||
return Constant.create(date.getYear(), IntegerType.INT);
|
||||
} else if (type instanceof DateType) {
|
||||
DateTime date = (DateTime) literal.getValue();
|
||||
return Constant.create(date.getYear(), IntegerType.INT);
|
||||
} else if (type instanceof DateTimeType) {
|
||||
DateTime date = (DateTime) literal.getValue();
|
||||
return Constant.create(date.getYear(), IntegerType.INT);
|
||||
}
|
||||
throw new UnsupportedOperationException(
|
||||
String.format("cannot apply year on %s", type.getName()));
|
||||
});
|
||||
|
||||
// for newly adding type, please also adds the corresponding logic here.
|
||||
}
|
||||
|
||||
static Function<Constant, Constant> getEvalFn(Type tp) {
|
||||
if (evalMap.containsKey(tp)) {
|
||||
return evalMap.get(tp);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,70 @@
|
|||
/*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.tikv.common.expression;
|
||||
|
||||
import static java.util.Objects.requireNonNull;
|
||||
|
||||
import com.google.common.collect.ImmutableList;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
import org.tikv.common.types.IntegerType;
|
||||
|
||||
public class IsNull extends Expression {
|
||||
private final Expression expression;
|
||||
|
||||
public IsNull(Expression expression) {
|
||||
super(IntegerType.BOOLEAN);
|
||||
this.resolved = true;
|
||||
this.expression = requireNonNull(expression, "expression is null");
|
||||
}
|
||||
|
||||
public Expression getExpression() {
|
||||
return expression;
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<Expression> getChildren() {
|
||||
return ImmutableList.of(expression);
|
||||
}
|
||||
|
||||
@Override
|
||||
public <R, C> R accept(Visitor<R, C> visitor, C context) {
|
||||
return visitor.visit(this, context);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return String.format("IsNull(%s)", getExpression());
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object other) {
|
||||
if (this == other) {
|
||||
return true;
|
||||
}
|
||||
if (!(other instanceof IsNull)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
IsNull that = (IsNull) other;
|
||||
return Objects.equals(expression, that.expression);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hashCode(expression);
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,104 @@
|
|||
/*
|
||||
* Copyright 2020 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
// Generated by the protocol buffer compiler. DO NOT EDIT!
|
||||
// source: expression.proto
|
||||
|
||||
package org.tikv.common.expression;
|
||||
|
||||
import static java.util.Objects.requireNonNull;
|
||||
|
||||
import com.google.common.collect.ImmutableList;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
import org.tikv.common.types.IntegerType;
|
||||
|
||||
public class LogicalBinaryExpression extends Expression {
|
||||
private final Expression left;
|
||||
private final Expression right;
|
||||
private final Type compType;
|
||||
|
||||
public LogicalBinaryExpression(Type type, Expression left, Expression right) {
|
||||
super(IntegerType.BOOLEAN);
|
||||
this.left = requireNonNull(left, "left expression is null");
|
||||
this.right = requireNonNull(right, "right expression is null");
|
||||
this.compType = requireNonNull(type, "type is null");
|
||||
}
|
||||
|
||||
public static LogicalBinaryExpression and(Expression left, Expression right) {
|
||||
return new LogicalBinaryExpression(Type.AND, left, right);
|
||||
}
|
||||
|
||||
public static LogicalBinaryExpression or(Expression left, Expression right) {
|
||||
return new LogicalBinaryExpression(Type.OR, left, right);
|
||||
}
|
||||
|
||||
public static LogicalBinaryExpression xor(Expression left, Expression right) {
|
||||
return new LogicalBinaryExpression(Type.XOR, left, right);
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<Expression> getChildren() {
|
||||
return ImmutableList.of(getLeft(), getRight());
|
||||
}
|
||||
|
||||
@Override
|
||||
public <R, C> R accept(Visitor<R, C> visitor, C context) {
|
||||
return visitor.visit(this, context);
|
||||
}
|
||||
|
||||
public Expression getLeft() {
|
||||
return left;
|
||||
}
|
||||
|
||||
public Expression getRight() {
|
||||
return right;
|
||||
}
|
||||
|
||||
public Type getCompType() {
|
||||
return compType;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object other) {
|
||||
if (this == other) {
|
||||
return true;
|
||||
}
|
||||
if (!(other instanceof LogicalBinaryExpression)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
LogicalBinaryExpression that = (LogicalBinaryExpression) other;
|
||||
return (compType == that.compType)
|
||||
&& Objects.equals(left, that.left)
|
||||
&& Objects.equals(right, that.right);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(compType, left, right);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return String.format("[%s %s %s]", getLeft(), getCompType(), getRight());
|
||||
}
|
||||
|
||||
public enum Type {
|
||||
AND,
|
||||
OR,
|
||||
XOR
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,75 @@
|
|||
/*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.tikv.common.expression;
|
||||
|
||||
import static java.util.Objects.requireNonNull;
|
||||
|
||||
import com.google.common.collect.ImmutableList;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
import org.tikv.common.types.IntegerType;
|
||||
|
||||
public class Not extends Expression {
|
||||
|
||||
private final Expression expression;
|
||||
|
||||
public Not(Expression expression) {
|
||||
super(IntegerType.BOOLEAN);
|
||||
resolved = true;
|
||||
this.expression = requireNonNull(expression, "expression is null");
|
||||
}
|
||||
|
||||
public static Not not(Expression expression) {
|
||||
return new Not(expression);
|
||||
}
|
||||
|
||||
public Expression getExpression() {
|
||||
return expression;
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<Expression> getChildren() {
|
||||
return ImmutableList.of(expression);
|
||||
}
|
||||
|
||||
@Override
|
||||
public <R, C> R accept(Visitor<R, C> visitor, C context) {
|
||||
return visitor.visit(this, context);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return String.format("Not(%s)", getExpression());
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object other) {
|
||||
if (this == other) {
|
||||
return true;
|
||||
}
|
||||
if (!(other instanceof Not)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
Not that = (Not) other;
|
||||
return Objects.equals(expression, that.expression);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hashCode(expression);
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,94 @@
|
|||
/*
|
||||
* Copyright 2020 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.tikv.common.expression;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
import org.tikv.common.meta.TiPartitionDef;
|
||||
import org.tikv.common.meta.TiPartitionInfo;
|
||||
import org.tikv.common.meta.TiPartitionInfo.PartitionType;
|
||||
import org.tikv.common.meta.TiTableInfo;
|
||||
import org.tikv.common.parser.TiParser;
|
||||
|
||||
public class PartitionPruner {
|
||||
public static List<Expression> extractLogicalOrComparisonExpr(List<Expression> filters) {
|
||||
List<Expression> filteredFilters = new ArrayList<>();
|
||||
for (Expression expr : filters) {
|
||||
if (expr instanceof LogicalBinaryExpression || expr instanceof ComparisonBinaryExpression) {
|
||||
filteredFilters.add(expr);
|
||||
}
|
||||
}
|
||||
return filteredFilters;
|
||||
}
|
||||
|
||||
public static List<TiPartitionDef> prune(TiTableInfo tableInfo, List<Expression> filters) {
|
||||
PartitionType type = tableInfo.getPartitionInfo().getType();
|
||||
if (!tableInfo.isPartitionEnabled()) {
|
||||
return tableInfo.getPartitionInfo().getDefs();
|
||||
}
|
||||
|
||||
boolean isRangeCol =
|
||||
Objects.requireNonNull(tableInfo.getPartitionInfo().getColumns()).size() > 0;
|
||||
|
||||
switch (type) {
|
||||
case RangePartition:
|
||||
if (!isRangeCol) {
|
||||
// TiDB only supports partition pruning on range partition on single column
|
||||
// If we meet range partition on multiple columns, we simply return all parts.
|
||||
if (tableInfo.getPartitionInfo().getColumns().size() > 1) {
|
||||
return tableInfo.getPartitionInfo().getDefs();
|
||||
}
|
||||
RangePartitionPruner prunner = new RangePartitionPruner(tableInfo);
|
||||
return prunner.prune(filters);
|
||||
} else {
|
||||
RangeColumnPartitionPruner pruner = new RangeColumnPartitionPruner(tableInfo);
|
||||
return pruner.prune(filters);
|
||||
}
|
||||
case ListPartition:
|
||||
case HashPartition:
|
||||
return tableInfo.getPartitionInfo().getDefs();
|
||||
}
|
||||
|
||||
throw new UnsupportedOperationException("cannot prune under invalid partition table");
|
||||
}
|
||||
|
||||
static void generateRangeExprs(
|
||||
TiPartitionInfo partInfo,
|
||||
List<Expression> partExprs,
|
||||
TiParser parser,
|
||||
String partExprStr,
|
||||
int lessThanIdx) {
|
||||
// partExprColRefs.addAll(PredicateUtils.extractColumnRefFromExpression(partExpr));
|
||||
for (int i = 0; i < partInfo.getDefs().size(); i++) {
|
||||
TiPartitionDef pDef = partInfo.getDefs().get(i);
|
||||
String current = pDef.getLessThan().get(lessThanIdx);
|
||||
String leftHand;
|
||||
if (current.equals("MAXVALUE")) {
|
||||
leftHand = "true";
|
||||
} else {
|
||||
leftHand = String.format("%s < %s", partExprStr, current);
|
||||
}
|
||||
if (i == 0) {
|
||||
partExprs.add(parser.parseExpression(leftHand));
|
||||
} else {
|
||||
String previous = partInfo.getDefs().get(i - 1).getLessThan().get(lessThanIdx);
|
||||
String and = String.format("%s and %s", partExprStr + ">=" + previous, leftHand);
|
||||
partExprs.add(parser.parseExpression(and));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,130 @@
|
|||
/*
|
||||
* Copyright 2020 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.tikv.common.expression;
|
||||
|
||||
import static org.tikv.common.expression.PartitionPruner.extractLogicalOrComparisonExpr;
|
||||
|
||||
import com.google.common.collect.ImmutableSet;
|
||||
import com.google.common.collect.RangeSet;
|
||||
import com.google.common.collect.TreeRangeSet;
|
||||
import java.util.*;
|
||||
import org.tikv.common.expression.ComparisonBinaryExpression.NormalizedPredicate;
|
||||
import org.tikv.common.expression.visitor.DefaultVisitor;
|
||||
import org.tikv.common.expression.visitor.PrunedPartitionBuilder;
|
||||
import org.tikv.common.key.TypedKey;
|
||||
import org.tikv.common.meta.TiPartitionDef;
|
||||
import org.tikv.common.meta.TiPartitionInfo;
|
||||
import org.tikv.common.meta.TiTableInfo;
|
||||
import org.tikv.common.parser.TiParser;
|
||||
import org.tikv.common.predicates.PredicateUtils;
|
||||
|
||||
@SuppressWarnings("UnstableApiUsage")
|
||||
public class RangeColumnPartitionPruner
|
||||
extends DefaultVisitor<Set<Integer>, LogicalBinaryExpression> {
|
||||
private final int partsSize;
|
||||
private final TiPartitionInfo partInfo;
|
||||
private final Map<String, List<Expression>> partExprsPerColumnRef;
|
||||
|
||||
RangeColumnPartitionPruner(TiTableInfo tableInfo) {
|
||||
this.partExprsPerColumnRef = new HashMap<>();
|
||||
this.partInfo = tableInfo.getPartitionInfo();
|
||||
TiParser parser = new TiParser(tableInfo);
|
||||
for (int i = 0; i < partInfo.getColumns().size(); i++) {
|
||||
List<Expression> partExprs = new ArrayList<>();
|
||||
String colRefName = partInfo.getColumns().get(i);
|
||||
PartitionPruner.generateRangeExprs(partInfo, partExprs, parser, colRefName, i);
|
||||
partExprsPerColumnRef.put(colRefName, partExprs);
|
||||
}
|
||||
this.partsSize = tableInfo.getPartitionInfo().getDefs().size();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Set<Integer> visit(LogicalBinaryExpression node, LogicalBinaryExpression parent) {
|
||||
Expression left = node.getLeft();
|
||||
Expression right = node.getRight();
|
||||
Set<Integer> partsIsCoveredByLeft = left.accept(this, node);
|
||||
Set<Integer> partsIsCoveredByRight = right.accept(this, node);
|
||||
switch (node.getCompType()) {
|
||||
case OR:
|
||||
partsIsCoveredByLeft.addAll(partsIsCoveredByRight);
|
||||
return partsIsCoveredByLeft;
|
||||
case AND:
|
||||
Set<Integer> partsIsCoveredByBoth = new HashSet<>();
|
||||
for (int i = 0; i < partsSize; i++) {
|
||||
if (partsIsCoveredByLeft.contains(i) && partsIsCoveredByRight.contains(i)) {
|
||||
partsIsCoveredByBoth.add(i);
|
||||
}
|
||||
}
|
||||
return partsIsCoveredByBoth;
|
||||
}
|
||||
|
||||
throw new UnsupportedOperationException("cannot access here");
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Set<Integer> visit(ComparisonBinaryExpression node, LogicalBinaryExpression parent) {
|
||||
NormalizedPredicate predicate = node.normalize();
|
||||
if (predicate == null) {
|
||||
throw new UnsupportedOperationException(
|
||||
String.format("ComparisonBinaryExpression %s cannot be normalized", node.toString()));
|
||||
}
|
||||
String colRefName = predicate.getColumnRef().getName();
|
||||
List<Expression> partExprs = partExprsPerColumnRef.get(colRefName);
|
||||
Set<Integer> partDefs = new HashSet<>();
|
||||
if (partExprs == null) {
|
||||
switch (parent.getCompType()) {
|
||||
case OR:
|
||||
return partDefs;
|
||||
case AND:
|
||||
for (int i = 0; i < partsSize; i++) {
|
||||
partDefs.add(i);
|
||||
}
|
||||
return partDefs;
|
||||
}
|
||||
}
|
||||
Objects.requireNonNull(partExprs, "partition expression cannot be null");
|
||||
for (int i = 0; i < partsSize; i++) {
|
||||
PrunedPartitionBuilder rangeBuilder =
|
||||
new PrunedPartitionBuilder(ImmutableSet.of(predicate.getColumnRef()));
|
||||
RangeSet<TypedKey> partExprRange = rangeBuilder.buildRange(partExprs.get(i));
|
||||
RangeSet<TypedKey> filterRange = rangeBuilder.buildRange(node);
|
||||
RangeSet<TypedKey> copy = TreeRangeSet.create(partExprRange);
|
||||
copy.removeAll(filterRange.complement());
|
||||
// part expr and filter is connected
|
||||
if (!copy.isEmpty()) {
|
||||
partDefs.add(i);
|
||||
}
|
||||
}
|
||||
return partDefs;
|
||||
}
|
||||
|
||||
public List<TiPartitionDef> prune(List<Expression> filters) {
|
||||
filters = extractLogicalOrComparisonExpr(filters);
|
||||
Expression cnfExpr = PredicateUtils.mergeCNFExpressions(filters);
|
||||
if (cnfExpr == null) {
|
||||
return partInfo.getDefs();
|
||||
}
|
||||
Set<Integer> partsIdx = cnfExpr.accept(this, null);
|
||||
List<TiPartitionDef> pDefs = new ArrayList<>();
|
||||
for (int i = 0; i < partsSize; i++) {
|
||||
if (partsIdx.contains(i)) {
|
||||
// part range is empty indicates this partition can be pruned.
|
||||
pDefs.add(partInfo.getDefs().get(i));
|
||||
}
|
||||
}
|
||||
return pDefs;
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,151 @@
|
|||
/*
|
||||
* Copyright 2019 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.tikv.common.expression;
|
||||
|
||||
import static org.tikv.common.expression.PartitionPruner.extractLogicalOrComparisonExpr;
|
||||
|
||||
import com.google.common.collect.RangeSet;
|
||||
import java.util.*;
|
||||
import org.tikv.common.exception.UnsupportedPartitionExprException;
|
||||
import org.tikv.common.exception.UnsupportedSyntaxException;
|
||||
import org.tikv.common.expression.visitor.PartAndFilterExprRewriter;
|
||||
import org.tikv.common.expression.visitor.PrunedPartitionBuilder;
|
||||
import org.tikv.common.key.TypedKey;
|
||||
import org.tikv.common.meta.TiPartitionDef;
|
||||
import org.tikv.common.meta.TiPartitionInfo;
|
||||
import org.tikv.common.meta.TiTableInfo;
|
||||
import org.tikv.common.parser.TiParser;
|
||||
import org.tikv.common.predicates.PredicateUtils;
|
||||
|
||||
@SuppressWarnings("UnstableApiUsage")
|
||||
public class RangePartitionPruner {
|
||||
private final TiPartitionInfo partInfo;
|
||||
private final Set<ColumnRef> partExprColRefs = new HashSet<>();
|
||||
private Expression partExpr;
|
||||
private List<Expression> partExprs;
|
||||
private PrunedPartitionBuilder rangeBuilder;
|
||||
private boolean foundUnsupportedPartExpr;
|
||||
|
||||
RangePartitionPruner(TiTableInfo tableInfo) {
|
||||
this.partInfo = tableInfo.getPartitionInfo();
|
||||
try {
|
||||
this.partExprs = generateRangePartExprs(tableInfo);
|
||||
this.rangeBuilder = new PrunedPartitionBuilder(partExprColRefs);
|
||||
} catch (UnsupportedSyntaxException | UnsupportedPartitionExprException e) {
|
||||
foundUnsupportedPartExpr = true;
|
||||
}
|
||||
}
|
||||
|
||||
private List<TiPartitionDef> pruneRangeNormalPart(Expression cnfExpr) {
|
||||
Objects.requireNonNull(cnfExpr, "cnf expression cannot be null at pruning stage");
|
||||
|
||||
// we need rewrite filter expression if partition expression is a Year expression.
|
||||
// This step is designed to deal with y < '1995-10-10'(in filter condition and also a part of
|
||||
// partition expression) where y is a date type.
|
||||
// Rewriting only applies partition expression on the constant part, resulting year(y) < 1995.
|
||||
PartAndFilterExprRewriter expressionRewriter = new PartAndFilterExprRewriter(partExpr);
|
||||
cnfExpr = expressionRewriter.rewrite(cnfExpr);
|
||||
// if we find an unsupported partition function, we downgrade to scan all partitions.
|
||||
if (expressionRewriter.isUnsupportedPartFnFound()) {
|
||||
return partInfo.getDefs();
|
||||
}
|
||||
RangeSet<TypedKey> filterRange = rangeBuilder.buildRange(cnfExpr);
|
||||
|
||||
List<TiPartitionDef> pDefs = new ArrayList<>();
|
||||
for (int i = 0; i < partExprs.size(); i++) {
|
||||
Expression partExpr = partExprs.get(i);
|
||||
// when we build range, we still need rewrite partition expression.
|
||||
// If we have a year(purchased) < 1995 which cannot be normalized, we need
|
||||
// to rewrite it into purchased < 1995 to let RangeSetBuilder be happy.
|
||||
RangeSet<TypedKey> partRange = rangeBuilder.buildRange(expressionRewriter.rewrite(partExpr));
|
||||
partRange.removeAll(filterRange.complement());
|
||||
if (!partRange.isEmpty()) {
|
||||
// part range is empty indicates this partition can be pruned.
|
||||
pDefs.add(partInfo.getDefs().get(i));
|
||||
}
|
||||
}
|
||||
return pDefs;
|
||||
}
|
||||
|
||||
// say we have a partitioned table with the following partition definitions with year(y) as
|
||||
// partition expression:
|
||||
// 1. p0 less than 1995
|
||||
// 2. p1 less than 1996
|
||||
// 3. p2 less than maxvalue
|
||||
// Above infos, after this function, will become the following:
|
||||
// 1. p0: year(y) < 1995
|
||||
// 2. p1: 1995 <= year(y) and year(y) < 1996
|
||||
// 3. p2: 1996 <= year(y) and true
|
||||
// true will become {@Code Constant} 1.
|
||||
private List<Expression> generateRangePartExprs(TiTableInfo tableInfo) {
|
||||
TiPartitionInfo partInfo = tableInfo.getPartitionInfo();
|
||||
List<Expression> partExprs = new ArrayList<>();
|
||||
TiParser parser = new TiParser(tableInfo);
|
||||
// check year expression
|
||||
// rewrite filter condition
|
||||
// purchased > '1995-10-10'
|
||||
// year(purchased) > year('1995-10-10')
|
||||
// purchased > 1995
|
||||
String partExprStr = tableInfo.getPartitionInfo().getExpr();
|
||||
|
||||
partExpr = parser.parseExpression(partExprStr);
|
||||
// when partExpr is null, it indicates partition expression
|
||||
// is not supported for now
|
||||
if (partExpr == null) {
|
||||
throw new UnsupportedPartitionExprException(
|
||||
String.format("%s is not supported", partExprStr));
|
||||
} // when it is not range column case, only first element stores useful info.
|
||||
|
||||
partExprColRefs.addAll(PredicateUtils.extractColumnRefFromExpression(partExpr));
|
||||
PartitionPruner.generateRangeExprs(partInfo, partExprs, parser, partExprStr, 0);
|
||||
|
||||
return partExprs;
|
||||
}
|
||||
|
||||
/**
|
||||
* When table is a partition table and its type is range. We use this method to do the pruning.
|
||||
* Range partition has two types: 1. range 2. range column. If it is the first case,
|
||||
* pruneRangeNormalPart will be called. Otherwise pruneRangeColPart will be called. For now, we
|
||||
* simply skip range column partition case.
|
||||
*
|
||||
* @param filters is where condition belong to a select statement.
|
||||
* @return a pruned partition for scanning.
|
||||
*/
|
||||
public List<TiPartitionDef> prune(List<Expression> filters) {
|
||||
filters = extractLogicalOrComparisonExpr(filters);
|
||||
Expression cnfExpr = PredicateUtils.mergeCNFExpressions(filters);
|
||||
if (!canBePruned(cnfExpr)) {
|
||||
return this.partInfo.getDefs();
|
||||
}
|
||||
|
||||
return pruneRangeNormalPart(cnfExpr);
|
||||
}
|
||||
|
||||
/**
|
||||
* return false if table cannot be pruning or partition table is not enabled. Return true if
|
||||
* partition pruning can be applied.
|
||||
*
|
||||
* @param filter is a where condition. It must be a cnf and does not contain not or isnull.
|
||||
* @return true if partition pruning can apply on filter.
|
||||
*/
|
||||
public boolean canBePruned(Expression filter) {
|
||||
if (foundUnsupportedPartExpr) {
|
||||
return false;
|
||||
}
|
||||
// if query is select * from t, then filter will be null.
|
||||
return filter != null;
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,144 @@
|
|||
/*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.tikv.common.expression;
|
||||
|
||||
import static java.util.Objects.requireNonNull;
|
||||
import static org.tikv.common.expression.StringRegExpression.Type.*;
|
||||
|
||||
import com.google.common.collect.ImmutableList;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
import org.tikv.common.key.TypedKey;
|
||||
import org.tikv.common.types.DataType;
|
||||
import org.tikv.common.types.IntegerType;
|
||||
|
||||
public class StringRegExpression extends Expression {
|
||||
private final Expression left;
|
||||
private final Expression right;
|
||||
private final Expression reg;
|
||||
private final Type regType;
|
||||
private transient TypedKey key;
|
||||
|
||||
public StringRegExpression(Type type, Expression left, Expression right, Expression reg) {
|
||||
super(IntegerType.BOOLEAN);
|
||||
resolved = true;
|
||||
this.left = requireNonNull(left, "left expression is null");
|
||||
this.right = requireNonNull(right, "right expression is null");
|
||||
this.regType = requireNonNull(type, "type is null");
|
||||
this.reg = requireNonNull(reg, "reg string is null");
|
||||
}
|
||||
|
||||
public static StringRegExpression startsWith(Expression left, Expression right) {
|
||||
Expression reg = Constant.create(((Constant) right).getValue() + "%", right.getDataType());
|
||||
return new StringRegExpression(STARTS_WITH, left, right, reg);
|
||||
}
|
||||
|
||||
public static StringRegExpression contains(Expression left, Expression right) {
|
||||
Expression reg =
|
||||
Constant.create("%" + ((Constant) right).getValue() + "%", right.getDataType());
|
||||
return new StringRegExpression(CONTAINS, left, right, reg);
|
||||
}
|
||||
|
||||
public static StringRegExpression endsWith(Expression left, Expression right) {
|
||||
Expression reg = Constant.create("%" + ((Constant) right).getValue(), right.getDataType());
|
||||
return new StringRegExpression(ENDS_WITH, left, right, reg);
|
||||
}
|
||||
|
||||
public static StringRegExpression like(Expression left, Expression right) {
|
||||
return new StringRegExpression(LIKE, left, right, right);
|
||||
}
|
||||
|
||||
public ColumnRef getColumnRef() {
|
||||
return (ColumnRef) getLeft();
|
||||
}
|
||||
|
||||
public Constant getValue() {
|
||||
return (Constant) getRight();
|
||||
}
|
||||
|
||||
public TypedKey getTypedLiteral() {
|
||||
return getTypedLiteral(DataType.UNSPECIFIED_LEN);
|
||||
}
|
||||
|
||||
public TypedKey getTypedLiteral(int prefixLength) {
|
||||
if (key == null) {
|
||||
key = TypedKey.toTypedKey(getValue().getValue(), getColumnRef().getDataType(), prefixLength);
|
||||
}
|
||||
return key;
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<Expression> getChildren() {
|
||||
// For LIKE statement, an extra ESCAPE parameter is required as the third parameter for
|
||||
// ScalarFunc.
|
||||
// However in Spark ESCAPE is not supported so we simply set this value to zero.
|
||||
return ImmutableList.of(left, reg, Constant.create(0, IntegerType.BIGINT));
|
||||
}
|
||||
|
||||
@Override
|
||||
public <R, C> R accept(Visitor<R, C> visitor, C context) {
|
||||
return visitor.visit(this, context);
|
||||
}
|
||||
|
||||
public Expression getLeft() {
|
||||
return left;
|
||||
}
|
||||
|
||||
public Expression getRight() {
|
||||
return right;
|
||||
}
|
||||
|
||||
public Type getRegType() {
|
||||
return regType;
|
||||
}
|
||||
|
||||
public Expression getReg() {
|
||||
return reg;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return String.format("[%s %s %s reg: %s]", getLeft(), getRegType(), getRight(), getReg());
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object other) {
|
||||
if (this == other) {
|
||||
return true;
|
||||
}
|
||||
if (!(other instanceof StringRegExpression)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
StringRegExpression that = (StringRegExpression) other;
|
||||
return (regType == that.regType)
|
||||
&& Objects.equals(left, that.left)
|
||||
&& Objects.equals(right, that.right)
|
||||
&& Objects.equals(reg, that.reg);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(regType, left, right, reg);
|
||||
}
|
||||
|
||||
public enum Type {
|
||||
STARTS_WITH,
|
||||
CONTAINS,
|
||||
ENDS_WITH,
|
||||
LIKE
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,67 @@
|
|||
/*
|
||||
*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*
|
||||
*/
|
||||
|
||||
package org.tikv.common.expression;
|
||||
|
||||
import static org.tikv.common.types.MySQLType.*;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import org.tikv.common.types.MySQLType;
|
||||
|
||||
public class TypeBlocklist extends Blocklist {
|
||||
private static final Map<MySQLType, String> typeToMySQLMap = initialTypeMap();
|
||||
|
||||
public TypeBlocklist(String typesString) {
|
||||
super(typesString);
|
||||
}
|
||||
|
||||
private static HashMap<MySQLType, String> initialTypeMap() {
|
||||
HashMap<MySQLType, String> map = new HashMap<>();
|
||||
map.put(TypeDecimal, "decimal");
|
||||
map.put(TypeTiny, "tinyint");
|
||||
map.put(TypeShort, "smallint");
|
||||
map.put(TypeLong, "int");
|
||||
map.put(TypeFloat, "float");
|
||||
map.put(TypeDouble, "double");
|
||||
map.put(TypeNull, "null");
|
||||
map.put(TypeTimestamp, "timestamp");
|
||||
map.put(TypeLonglong, "bigint");
|
||||
map.put(TypeInt24, "mediumint");
|
||||
map.put(TypeDate, "date");
|
||||
map.put(TypeDuration, "time");
|
||||
map.put(TypeDatetime, "datetime");
|
||||
map.put(TypeYear, "year");
|
||||
map.put(TypeNewDate, "date");
|
||||
map.put(TypeVarchar, "varchar");
|
||||
map.put(TypeJSON, "json");
|
||||
map.put(TypeNewDecimal, "decimal");
|
||||
map.put(TypeEnum, "enum");
|
||||
map.put(TypeSet, "set");
|
||||
map.put(TypeTinyBlob, "tinytext");
|
||||
map.put(TypeMediumBlob, "mediumtext");
|
||||
map.put(TypeLongBlob, "longtext");
|
||||
map.put(TypeBlob, "text");
|
||||
map.put(TypeVarString, "varString");
|
||||
map.put(TypeString, "string");
|
||||
return map;
|
||||
}
|
||||
|
||||
public boolean isUnsupportedType(MySQLType sqlType) {
|
||||
return isUnsupported(typeToMySQLMap.getOrDefault(sqlType, ""));
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,40 @@
|
|||
/*
|
||||
*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*
|
||||
*/
|
||||
|
||||
package org.tikv.common.expression;
|
||||
|
||||
public abstract class Visitor<R, C> {
|
||||
protected abstract R visit(ColumnRef node, C context);
|
||||
|
||||
protected abstract R visit(ComparisonBinaryExpression node, C context);
|
||||
|
||||
protected abstract R visit(StringRegExpression node, C context);
|
||||
|
||||
protected abstract R visit(ArithmeticBinaryExpression node, C context);
|
||||
|
||||
protected abstract R visit(LogicalBinaryExpression node, C context);
|
||||
|
||||
protected abstract R visit(Constant node, C context);
|
||||
|
||||
protected abstract R visit(AggregateFunction node, C context);
|
||||
|
||||
protected abstract R visit(IsNull node, C context);
|
||||
|
||||
protected abstract R visit(Not node, C context);
|
||||
|
||||
protected abstract R visit(FuncCallExpr node, C context);
|
||||
}
|
||||
|
|
@ -0,0 +1,42 @@
|
|||
/*
|
||||
* Copyright 2018 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.tikv.common.expression.visitor;
|
||||
|
||||
import org.tikv.common.expression.ColumnRef;
|
||||
import org.tikv.common.expression.Expression;
|
||||
|
||||
public class ColumnMatcher extends DefaultVisitor<Boolean, Void> {
|
||||
private final ColumnRef columnRef;
|
||||
|
||||
private ColumnMatcher(ColumnRef exp) {
|
||||
this.columnRef = exp;
|
||||
}
|
||||
|
||||
public static Boolean match(ColumnRef col, Expression expression) {
|
||||
ColumnMatcher matcher = new ColumnMatcher(col);
|
||||
return expression.accept(matcher, null);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Boolean process(Expression node, Void context) {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Boolean visit(ColumnRef node, Void context) {
|
||||
return node.matchName(columnRef.getName());
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,77 @@
|
|||
/*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.tikv.common.expression.visitor;
|
||||
|
||||
import org.tikv.common.expression.*;
|
||||
|
||||
public class DefaultVisitor<R, C> extends Visitor<R, C> {
|
||||
protected R process(Expression node, C context) {
|
||||
for (Expression expr : node.getChildren()) {
|
||||
expr.accept(this, context);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected R visit(ColumnRef node, C context) {
|
||||
return process(node, context);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected R visit(ComparisonBinaryExpression node, C context) {
|
||||
return process(node, context);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected R visit(StringRegExpression node, C context) {
|
||||
return process(node, context);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected R visit(ArithmeticBinaryExpression node, C context) {
|
||||
return process(node, context);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected R visit(LogicalBinaryExpression node, C context) {
|
||||
return process(node, context);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected R visit(Constant node, C context) {
|
||||
return process(node, context);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected R visit(AggregateFunction node, C context) {
|
||||
return process(node, context);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected R visit(IsNull node, C context) {
|
||||
return process(node, context);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected R visit(Not node, C context) {
|
||||
return process(node, context);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected R visit(FuncCallExpr node, C context) {
|
||||
return process(node, context);
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,111 @@
|
|||
/*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.tikv.common.expression.visitor;
|
||||
|
||||
import static java.util.Objects.requireNonNull;
|
||||
|
||||
import org.tikv.common.expression.*;
|
||||
import org.tikv.common.expression.ComparisonBinaryExpression.NormalizedPredicate;
|
||||
import org.tikv.common.meta.TiIndexColumn;
|
||||
|
||||
/**
|
||||
* Test if a predicate matches and index column entirely and can be convert to index related ranges
|
||||
* If a predicate matches only partially, it returns false
|
||||
*/
|
||||
public class IndexMatcher extends DefaultVisitor<Boolean, Void> {
|
||||
private final boolean matchEqualTestOnly;
|
||||
private final TiIndexColumn indexColumn;
|
||||
|
||||
private IndexMatcher(TiIndexColumn indexColumn, boolean matchEqualTestOnly) {
|
||||
this.matchEqualTestOnly = matchEqualTestOnly;
|
||||
this.indexColumn = requireNonNull(indexColumn, "index column is null");
|
||||
}
|
||||
|
||||
public static IndexMatcher equalOnlyMatcher(TiIndexColumn indexColumn) {
|
||||
return new IndexMatcher(indexColumn, true);
|
||||
}
|
||||
|
||||
public static IndexMatcher matcher(TiIndexColumn indexColumn) {
|
||||
return new IndexMatcher(indexColumn, false);
|
||||
}
|
||||
|
||||
public boolean match(Expression expression) {
|
||||
return expression.accept(this, null);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Boolean process(Expression node, Void context) {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Boolean visit(ColumnRef node, Void context) {
|
||||
String indexColumnName = indexColumn.getName();
|
||||
return node.matchName(indexColumnName);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Boolean visit(ComparisonBinaryExpression node, Void context) {
|
||||
switch (node.getComparisonType()) {
|
||||
case LESS_THAN:
|
||||
case LESS_EQUAL:
|
||||
case GREATER_THAN:
|
||||
case GREATER_EQUAL:
|
||||
case NOT_EQUAL:
|
||||
if (matchEqualTestOnly) {
|
||||
return false;
|
||||
}
|
||||
case EQUAL:
|
||||
NormalizedPredicate predicate = node.normalize();
|
||||
if (predicate == null) {
|
||||
return false;
|
||||
}
|
||||
return predicate.getColumnRef().accept(this, context);
|
||||
default:
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Boolean visit(StringRegExpression node, Void context) {
|
||||
switch (node.getRegType()) {
|
||||
// If the predicate is StartsWith(col, 'a'), this predicate
|
||||
// indicates a range of ['a', +∞) which can be used by index scan
|
||||
case STARTS_WITH:
|
||||
if (matchEqualTestOnly) {
|
||||
return false;
|
||||
}
|
||||
return node.getLeft().accept(this, context);
|
||||
default:
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Boolean visit(LogicalBinaryExpression node, Void context) {
|
||||
switch (node.getCompType()) {
|
||||
case AND:
|
||||
if (matchEqualTestOnly) {
|
||||
return false;
|
||||
}
|
||||
case OR:
|
||||
case XOR:
|
||||
return node.getLeft().accept(this, context) && node.getRight().accept(this, context);
|
||||
default:
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,108 @@
|
|||
/*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.tikv.common.expression.visitor;
|
||||
|
||||
import com.google.common.collect.Range;
|
||||
import com.google.common.collect.RangeSet;
|
||||
import com.google.common.collect.TreeRangeSet;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import org.tikv.common.expression.ColumnRef;
|
||||
import org.tikv.common.expression.ComparisonBinaryExpression;
|
||||
import org.tikv.common.expression.ComparisonBinaryExpression.NormalizedPredicate;
|
||||
import org.tikv.common.expression.StringRegExpression;
|
||||
import org.tikv.common.key.TypedKey;
|
||||
import org.tikv.common.meta.TiIndexColumn;
|
||||
import org.tikv.common.meta.TiIndexInfo;
|
||||
import org.tikv.common.meta.TiTableInfo;
|
||||
import org.tikv.common.types.DataType;
|
||||
|
||||
public class IndexRangeSetBuilder extends RangeSetBuilder<TypedKey> {
|
||||
|
||||
private final Map<ColumnRef, Integer> lengths; // length of corresponding ColumnRef
|
||||
|
||||
public IndexRangeSetBuilder(TiTableInfo table, TiIndexInfo index) {
|
||||
Map<ColumnRef, Integer> result = new HashMap<>();
|
||||
if (table != null && index != null) {
|
||||
for (TiIndexColumn indexColumn : index.getIndexColumns()) {
|
||||
ColumnRef columnRef = ColumnRef.create(indexColumn.getName(), table);
|
||||
result.put(columnRef, (int) indexColumn.getLength());
|
||||
}
|
||||
}
|
||||
this.lengths = result;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected RangeSet<TypedKey> visit(ComparisonBinaryExpression node, Void context) {
|
||||
NormalizedPredicate predicate = node.normalize();
|
||||
if (predicate == null) {
|
||||
throwOnError(node);
|
||||
}
|
||||
// In order to match a prefix index, we have to cut the literal by prefix length.
|
||||
// e.g., for table t:
|
||||
// CREATE TABLE `t` {
|
||||
// `b` VARCHAR(10) DEFAULT NULL,
|
||||
// KEY `prefix_index` (`b`(2))
|
||||
// }
|
||||
//
|
||||
// b(2) > "bbc" -> ["bb", +∞)
|
||||
// b(2) >= "bbc" -> ["bb", +∞)
|
||||
// b(2) < "bbc" -> (-∞, "bb"]
|
||||
// b(2) <= "bbc" -> (-∞, "bb"]
|
||||
// b(2) = "bbc" -> ["bb", "bb"]
|
||||
// b(2) > "b" -> ["b", +∞)
|
||||
// b(2) >= "b" -> ["b", +∞)
|
||||
// b(2) < "b" -> (-∞, "b"]
|
||||
// b(2) <= "b" -> (-∞, "b"]
|
||||
//
|
||||
// For varchar, `b`(2) will take first two characters(bytes) as prefix index.
|
||||
// TODO: Note that TiDB only supports UTF-8, we need to check if prefix index behave differently
|
||||
// under other encoding methods
|
||||
int prefixLen = lengths.getOrDefault(predicate.getColumnRef(), DataType.UNSPECIFIED_LEN);
|
||||
TypedKey literal = predicate.getTypedLiteral(prefixLen);
|
||||
boolean loose = !DataType.isLengthUnSpecified(prefixLen);
|
||||
// With prefix length specified, the filter is loosen and so should the ranges
|
||||
return visitComparisonBinaryExpr(node, context, literal, loose);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected RangeSet<TypedKey> visit(StringRegExpression node, Void context) {
|
||||
ColumnRef columnRef = node.getColumnRef();
|
||||
// In order to match a prefix index, we have to cut the literal by prefix length.
|
||||
// e.g., for table t:
|
||||
// CREATE TABLE `t` {
|
||||
// `c1` VARCHAR(10) DEFAULT NULL,
|
||||
// KEY `prefix_index` (`c`(2))
|
||||
// }
|
||||
// when the predicate is `c1` LIKE 'abc%', the index range should be ['ab', 'ab'].
|
||||
// when the predicate is `c1` LIKE 'a%', the index range should be ['a', 'b').
|
||||
// for varchar, `c1`(2) will take first two characters(bytes) as prefix index.
|
||||
// TODO: Note that TiDB only supports UTF-8, we need to check if prefix index behave differently
|
||||
// under other encoding methods
|
||||
int prefixLen = lengths.getOrDefault(columnRef, DataType.UNSPECIFIED_LEN);
|
||||
TypedKey literal = node.getTypedLiteral(prefixLen);
|
||||
RangeSet<TypedKey> ranges = TreeRangeSet.create();
|
||||
|
||||
switch (node.getRegType()) {
|
||||
case STARTS_WITH:
|
||||
ranges.add(Range.atLeast(literal).intersection(Range.lessThan(literal.next())));
|
||||
break;
|
||||
default:
|
||||
throwOnError(node);
|
||||
}
|
||||
return ranges;
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,78 @@
|
|||
/*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.tikv.common.expression.visitor;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
import org.tikv.common.expression.ColumnRef;
|
||||
import org.tikv.common.expression.ComparisonBinaryExpression;
|
||||
import org.tikv.common.expression.ComparisonBinaryExpression.NormalizedPredicate;
|
||||
import org.tikv.common.expression.Expression;
|
||||
import org.tikv.common.meta.TiTableInfo;
|
||||
|
||||
public class MetaResolver extends DefaultVisitor<Void, Expression> {
|
||||
private final TiTableInfo table;
|
||||
|
||||
public MetaResolver(TiTableInfo table) {
|
||||
this.table = table;
|
||||
}
|
||||
|
||||
public static void resolve(Expression expression, TiTableInfo table) {
|
||||
MetaResolver resolver = new MetaResolver(table);
|
||||
resolver.resolve(expression);
|
||||
}
|
||||
|
||||
public static void resolve(List<? extends Expression> expressions, TiTableInfo table) {
|
||||
MetaResolver resolver = new MetaResolver(table);
|
||||
resolver.resolve(expressions);
|
||||
}
|
||||
|
||||
public void resolve(List<? extends Expression> expressions) {
|
||||
expressions.forEach(expression -> expression.accept(this, null));
|
||||
}
|
||||
|
||||
public void resolve(Expression expression) {
|
||||
Objects.requireNonNull(expression, "expression is null");
|
||||
expression.accept(this, null);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Void visit(ComparisonBinaryExpression node, Expression parent) {
|
||||
NormalizedPredicate predicate = node.normalize();
|
||||
// TODO(Zhexuan Yang): fix this if we have complex ComparisonBinaryExpression
|
||||
// We may need add a expressionRewriter to address this.
|
||||
if (predicate != null) {
|
||||
visit(predicate.getColumnRef(), node);
|
||||
// do not set the constant data type to the column ref data type if they are the
|
||||
// same catalog because it may narrow the constant type, and cause wrong result.
|
||||
// for example when the filter is `bit_col op long_constant`, set long_constant
|
||||
// to bit type will truncated the long_constant, and may cause wrong result
|
||||
if (predicate.getValue().getDataType() == null
|
||||
|| !predicate
|
||||
.getValue()
|
||||
.getDataType()
|
||||
.isSameCatalog(predicate.getColumnRef().getDataType()))
|
||||
predicate.getValue().setDataType(predicate.getColumnRef().getDataType());
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Void visit(ColumnRef node, Expression parent) {
|
||||
node.resolve(table);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,116 @@
|
|||
/*
|
||||
* Copyright 2020 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.tikv.common.expression.visitor;
|
||||
|
||||
import java.util.Objects;
|
||||
import java.util.Set;
|
||||
import org.tikv.common.expression.*;
|
||||
import org.tikv.common.expression.ComparisonBinaryExpression.NormalizedPredicate;
|
||||
import org.tikv.common.expression.FuncCallExpr.Type;
|
||||
import org.tikv.common.predicates.PredicateUtils;
|
||||
|
||||
/**
|
||||
* PartAndFilterExprRewriter takes partition expression as an input. Rewriting rule is based on the
|
||||
* type of partition expression. 1. If partition expression is a columnRef, no rewriting will be
|
||||
* performed. 2. If partition expression is year and the expression to be rewritten in the form of y
|
||||
* < '1995-10-10' then its right hand child will be replaced with "1995". 3. If partition expression
|
||||
* is year and the expression to be rewritten in the form of year(y) < '1995' then its left hand
|
||||
* child will be replaced with y.
|
||||
*/
|
||||
public class PartAndFilterExprRewriter extends DefaultVisitor<Expression, Void> {
|
||||
private final Expression partExpr;
|
||||
private final Set<ColumnRef> columnRefs;
|
||||
|
||||
private boolean unsupportedPartFnFound;
|
||||
|
||||
public PartAndFilterExprRewriter(Expression partExpr) {
|
||||
Objects.requireNonNull(partExpr, "partition expression cannot be null");
|
||||
this.partExpr = partExpr;
|
||||
this.columnRefs = PredicateUtils.extractColumnRefFromExpression(partExpr);
|
||||
}
|
||||
|
||||
private boolean isYear() {
|
||||
return partExpr instanceof FuncCallExpr && ((FuncCallExpr) partExpr).getFuncTp() == Type.YEAR;
|
||||
}
|
||||
|
||||
private boolean isColumnRef() {
|
||||
return partExpr instanceof ColumnRef;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Expression process(Expression node, Void context) {
|
||||
for (Expression expr : node.getChildren()) {
|
||||
expr.accept(this, context);
|
||||
}
|
||||
return node;
|
||||
}
|
||||
|
||||
public Expression visit(LogicalBinaryExpression node, Void context) {
|
||||
Expression left = node.getLeft().accept(this, null);
|
||||
Expression right = node.getRight().accept(this, null);
|
||||
return new LogicalBinaryExpression(node.getCompType(), left, right);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Expression visit(FuncCallExpr node, Void context) {
|
||||
if (node.getFuncTp() == Type.YEAR) {
|
||||
return node.getExpression();
|
||||
}
|
||||
// other's is not supported right now.
|
||||
// TODO: when adding new type in FuncCallExpr, please also modify here
|
||||
// accordingly.
|
||||
return node;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Expression visit(Constant node, Void context) {
|
||||
return node;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Expression visit(ComparisonBinaryExpression node, Void context) {
|
||||
NormalizedPredicate predicate = node.normalize();
|
||||
// predicate maybe null if node's left or right does not have a column ref or a constant.
|
||||
if (predicate != null) {
|
||||
if (!columnRefs.contains(predicate.getColumnRef())) {
|
||||
return node;
|
||||
}
|
||||
// we only support year for now.
|
||||
if (isYear()) {
|
||||
FuncCallExpr year = new FuncCallExpr(predicate.getValue(), Type.YEAR);
|
||||
Constant newLiteral = year.eval(predicate.getValue());
|
||||
return new ComparisonBinaryExpression(node.getComparisonType(), node.getLeft(), newLiteral);
|
||||
} else if (isColumnRef()) {
|
||||
return node;
|
||||
}
|
||||
unsupportedPartFnFound = true;
|
||||
return null;
|
||||
}
|
||||
|
||||
// when we find a node in form like [year(y) < 1995], we need rewrite the left child.
|
||||
Expression left = node.getLeft().accept(this, null);
|
||||
Expression right = node.getRight().accept(this, null);
|
||||
return new ComparisonBinaryExpression(node.getComparisonType(), left, right);
|
||||
}
|
||||
|
||||
public Expression rewrite(Expression target) {
|
||||
return target.accept(this, null);
|
||||
}
|
||||
|
||||
public boolean isUnsupportedPartFnFound() {
|
||||
return unsupportedPartFnFound;
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,375 @@
|
|||
/*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.tikv.common.expression.visitor;
|
||||
|
||||
import static java.util.Objects.requireNonNull;
|
||||
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import com.pingcap.tidb.tipb.Expr;
|
||||
import com.pingcap.tidb.tipb.ExprType;
|
||||
import com.pingcap.tidb.tipb.FieldType;
|
||||
import com.pingcap.tidb.tipb.ScalarFuncSig;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
import org.tikv.common.codec.Codec.IntegerCodec;
|
||||
import org.tikv.common.codec.CodecDataOutput;
|
||||
import org.tikv.common.exception.TiExpressionException;
|
||||
import org.tikv.common.expression.*;
|
||||
import org.tikv.common.expression.AggregateFunction.FunctionType;
|
||||
import org.tikv.common.expression.ComparisonBinaryExpression.NormalizedPredicate;
|
||||
import org.tikv.common.types.*;
|
||||
import org.tikv.common.types.DataType.EncodeType;
|
||||
|
||||
public class ProtoConverter extends Visitor<Expr, Object> {
|
||||
// All concrete data type should be hooked to a type name
|
||||
private static final Map<Class<? extends DataType>, String> SCALAR_SIG_MAP =
|
||||
ImmutableMap.<Class<? extends DataType>, String>builder()
|
||||
.put(IntegerType.class, "Int")
|
||||
.put(BitType.class, "Int")
|
||||
.put(DecimalType.class, "Decimal")
|
||||
.put(RealType.class, "Real")
|
||||
.put(DateTimeType.class, "Time")
|
||||
.put(DateType.class, "Time")
|
||||
.put(TimestampType.class, "Time")
|
||||
.put(BytesType.class, "String")
|
||||
.put(StringType.class, "String")
|
||||
.put(TimeType.class, "Duration")
|
||||
.build();
|
||||
|
||||
private final boolean validateColPosition;
|
||||
|
||||
public ProtoConverter() {
|
||||
this(true);
|
||||
}
|
||||
|
||||
/**
|
||||
* Instantiate a {{@code ProtoConverter}} using a typeMap.
|
||||
*
|
||||
* @param validateColPosition whether to consider column position in this converter. By default, a
|
||||
* {{@code TiDAGRequest}} should check whether a {{@code ColumnRef}}'s position is correct in
|
||||
* it's executors. Can ignore this validation if `validateColPosition` is set to false.
|
||||
*/
|
||||
public ProtoConverter(boolean validateColPosition) {
|
||||
this.validateColPosition = validateColPosition;
|
||||
}
|
||||
|
||||
public static Expr toProto(Expression expression) {
|
||||
return toProto(expression, null);
|
||||
}
|
||||
|
||||
public static Expr toProto(Expression expression, Object context) {
|
||||
ProtoConverter converter = new ProtoConverter();
|
||||
return expression.accept(converter, context);
|
||||
}
|
||||
|
||||
private DataType getType(Expression expression) {
|
||||
DataType type = expression.getDataType();
|
||||
|
||||
if (type == null) {
|
||||
throw new TiExpressionException(String.format("Expression %s type unknown", expression));
|
||||
}
|
||||
// for timestamp type, coprocessor will use datetime to do calculation.
|
||||
if (type instanceof TimestampType) {
|
||||
return DateTimeType.DATETIME;
|
||||
}
|
||||
return type;
|
||||
}
|
||||
|
||||
private String getTypeSignature(Expression expression) {
|
||||
DataType type = getType(expression);
|
||||
String typeSignature = SCALAR_SIG_MAP.get(type.getClass());
|
||||
if (typeSignature == null) {
|
||||
throw new TiExpressionException(String.format("Type %s signature unknown", type));
|
||||
}
|
||||
return typeSignature;
|
||||
}
|
||||
|
||||
private FieldType toPBFieldType(DataType fieldType) {
|
||||
return FieldType.newBuilder()
|
||||
.setTp(fieldType.getTypeCode())
|
||||
.setFlag(fieldType.getFlag())
|
||||
.setFlen((int) fieldType.getLength())
|
||||
.setDecimal(fieldType.getDecimal())
|
||||
.setCharset(fieldType.getCharset())
|
||||
.setCollate(fieldType.getCollationCode())
|
||||
.build();
|
||||
}
|
||||
|
||||
// Generate protobuf builder with partial data encoded.
|
||||
// Scalar Signature is left alone
|
||||
private Expr.Builder scalarToPartialProto(Expression node, Object context) {
|
||||
Expr.Builder builder = Expr.newBuilder();
|
||||
// Scalar function type
|
||||
builder.setTp(ExprType.ScalarFunc);
|
||||
|
||||
// Return type
|
||||
builder.setFieldType(toPBFieldType(getType(node)));
|
||||
|
||||
for (Expression child : node.getChildren()) {
|
||||
Expr exprProto = child.accept(this, context);
|
||||
builder.addChildren(exprProto);
|
||||
}
|
||||
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Expr visit(LogicalBinaryExpression node, Object context) {
|
||||
ScalarFuncSig protoSig;
|
||||
switch (node.getCompType()) {
|
||||
case AND:
|
||||
protoSig = ScalarFuncSig.LogicalAnd;
|
||||
break;
|
||||
case OR:
|
||||
protoSig = ScalarFuncSig.LogicalOr;
|
||||
break;
|
||||
case XOR:
|
||||
protoSig = ScalarFuncSig.LogicalXor;
|
||||
break;
|
||||
default:
|
||||
throw new TiExpressionException(
|
||||
String.format("Unknown comparison type %s", node.getCompType()));
|
||||
}
|
||||
Expr.Builder builder = scalarToPartialProto(node, context);
|
||||
builder.setSig(protoSig);
|
||||
|
||||
builder.setFieldType(toPBFieldType(getType(node)));
|
||||
return builder.build();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Expr visit(ArithmeticBinaryExpression node, Object context) {
|
||||
// assume after type coerce, children should be compatible
|
||||
Expression child = node.getLeft();
|
||||
String typeSignature = getTypeSignature(child);
|
||||
ScalarFuncSig protoSig;
|
||||
switch (node.getCompType()) {
|
||||
// TODO: Add test for bitwise push down
|
||||
case BIT_AND:
|
||||
protoSig = ScalarFuncSig.BitAndSig;
|
||||
break;
|
||||
case BIT_OR:
|
||||
protoSig = ScalarFuncSig.BitOrSig;
|
||||
break;
|
||||
case BIT_XOR:
|
||||
protoSig = ScalarFuncSig.BitXorSig;
|
||||
break;
|
||||
case DIVIDE:
|
||||
protoSig = ScalarFuncSig.valueOf("Divide" + typeSignature);
|
||||
break;
|
||||
case MINUS:
|
||||
protoSig = ScalarFuncSig.valueOf("Minus" + typeSignature);
|
||||
break;
|
||||
case MULTIPLY:
|
||||
protoSig = ScalarFuncSig.valueOf("Multiply" + typeSignature);
|
||||
break;
|
||||
case PLUS:
|
||||
protoSig = ScalarFuncSig.valueOf("Plus" + typeSignature);
|
||||
break;
|
||||
default:
|
||||
throw new TiExpressionException(
|
||||
String.format("Unknown comparison type %s", node.getCompType()));
|
||||
}
|
||||
Expr.Builder builder = scalarToPartialProto(node, context);
|
||||
builder.setSig(protoSig);
|
||||
builder.setFieldType(toPBFieldType(getType(node)));
|
||||
return builder.build();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Expr visit(ComparisonBinaryExpression node, Object context) {
|
||||
NormalizedPredicate predicate = node.normalize();
|
||||
if (predicate.getValue().isOverflowed()) {
|
||||
throw new UnsupportedOperationException(
|
||||
"overflowed ComparisonBinaryExpression cannot be pushed down");
|
||||
}
|
||||
Expression child = node.getLeft();
|
||||
String typeSignature = getTypeSignature(child);
|
||||
ScalarFuncSig protoSig;
|
||||
switch (node.getComparisonType()) {
|
||||
case EQUAL:
|
||||
protoSig = ScalarFuncSig.valueOf("EQ" + typeSignature);
|
||||
break;
|
||||
case GREATER_EQUAL:
|
||||
protoSig = ScalarFuncSig.valueOf("GE" + typeSignature);
|
||||
break;
|
||||
case GREATER_THAN:
|
||||
protoSig = ScalarFuncSig.valueOf("GT" + typeSignature);
|
||||
break;
|
||||
case LESS_EQUAL:
|
||||
protoSig = ScalarFuncSig.valueOf("LE" + typeSignature);
|
||||
break;
|
||||
case LESS_THAN:
|
||||
protoSig = ScalarFuncSig.valueOf("LT" + typeSignature);
|
||||
break;
|
||||
case NOT_EQUAL:
|
||||
protoSig = ScalarFuncSig.valueOf("NE" + typeSignature);
|
||||
break;
|
||||
default:
|
||||
throw new TiExpressionException(
|
||||
String.format("Unknown comparison type %s", node.getComparisonType()));
|
||||
}
|
||||
Expr.Builder builder = scalarToPartialProto(node, context);
|
||||
builder.setSig(protoSig);
|
||||
builder.setFieldType(toPBFieldType(getType(node)));
|
||||
return builder.build();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Expr visit(StringRegExpression node, Object context) {
|
||||
// assume after type coerce, children should be compatible
|
||||
ScalarFuncSig protoSig;
|
||||
switch (node.getRegType()) {
|
||||
case STARTS_WITH:
|
||||
case CONTAINS:
|
||||
case ENDS_WITH:
|
||||
case LIKE:
|
||||
protoSig = ScalarFuncSig.LikeSig;
|
||||
break;
|
||||
default:
|
||||
throw new TiExpressionException(String.format("Unknown reg type %s", node.getRegType()));
|
||||
}
|
||||
Expr.Builder builder = scalarToPartialProto(node, context);
|
||||
builder.setSig(protoSig);
|
||||
return builder.build();
|
||||
}
|
||||
|
||||
@Override
|
||||
@SuppressWarnings("unchecked")
|
||||
protected Expr visit(ColumnRef node, Object context) {
|
||||
long position = 0;
|
||||
if (validateColPosition) {
|
||||
requireNonNull(context, "Context of a ColumnRef should not be null");
|
||||
Map<String, Integer> colIdOffsetMap = (Map<String, Integer>) context;
|
||||
position =
|
||||
requireNonNull(
|
||||
colIdOffsetMap.get(node.getName()),
|
||||
"Required column position info " + node.getName() + " is not in a valid context.");
|
||||
}
|
||||
Expr.Builder builder = Expr.newBuilder();
|
||||
builder.setTp(ExprType.ColumnRef);
|
||||
CodecDataOutput cdo = new CodecDataOutput();
|
||||
// After switching to DAG request mode, expression value
|
||||
// should be the index of table columns we provided in
|
||||
// the first executor of a DAG request.
|
||||
IntegerCodec.writeLong(cdo, position);
|
||||
builder.setVal(cdo.toByteString());
|
||||
builder.setFieldType(toPBFieldType(getType(node)));
|
||||
return builder.build();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Expr visit(Constant node, Object context) {
|
||||
Expr.Builder builder = Expr.newBuilder();
|
||||
DataType type = node.getDataType();
|
||||
if (node.getValue() == null) {
|
||||
builder.setTp(ExprType.Null);
|
||||
} else {
|
||||
// this is useful since SupportedExpressionValidator will catch this exception
|
||||
// can mark it cannot be pushed down to coprocessor.
|
||||
if (node.isOverflowed()) {
|
||||
throw new UnsupportedOperationException(
|
||||
"overflowed value cannot be pushed down to coprocessor");
|
||||
}
|
||||
builder.setTp(type.getProtoExprType());
|
||||
CodecDataOutput cdo = new CodecDataOutput();
|
||||
type.encode(cdo, EncodeType.PROTO, node.getValue());
|
||||
builder.setVal(cdo.toByteString());
|
||||
builder.setFieldType(toPBFieldType(getType(node)));
|
||||
}
|
||||
return builder.build();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Expr visit(AggregateFunction node, Object context) {
|
||||
Expr.Builder builder = Expr.newBuilder();
|
||||
|
||||
FunctionType type = node.getType();
|
||||
switch (type) {
|
||||
case Max:
|
||||
builder.setTp(ExprType.Max);
|
||||
break;
|
||||
case Sum:
|
||||
builder.setTp(ExprType.Sum);
|
||||
break;
|
||||
case Min:
|
||||
builder.setTp(ExprType.Min);
|
||||
break;
|
||||
case First:
|
||||
builder.setTp(ExprType.First);
|
||||
break;
|
||||
case Count:
|
||||
builder.setTp(ExprType.Count);
|
||||
break;
|
||||
}
|
||||
|
||||
for (Expression arg : node.getChildren()) {
|
||||
Expr exprProto = arg.accept(this, context);
|
||||
builder.addChildren(exprProto);
|
||||
}
|
||||
|
||||
builder.setFieldType(toPBFieldType(getType(node)));
|
||||
return builder.build();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Expr visit(IsNull node, Object context) {
|
||||
String typeSignature = getTypeSignature(node.getExpression());
|
||||
ScalarFuncSig protoSig = ScalarFuncSig.valueOf(typeSignature + "IsNull");
|
||||
Expr.Builder builder = scalarToPartialProto(node, context);
|
||||
builder.setSig(protoSig);
|
||||
builder.setFieldType(toPBFieldType(getType(node)));
|
||||
return builder.build();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Expr visit(Not node, Object context) {
|
||||
ScalarFuncSig protoSig = null;
|
||||
DataType dataType = getType(node);
|
||||
switch (dataType.getType()) {
|
||||
case TypeDecimal:
|
||||
protoSig = ScalarFuncSig.UnaryNotDecimal;
|
||||
break;
|
||||
case TypeDouble:
|
||||
case TypeFloat:
|
||||
protoSig = ScalarFuncSig.UnaryNotReal;
|
||||
break;
|
||||
case TypeInt24:
|
||||
case TypeLong:
|
||||
case TypeShort:
|
||||
case TypeLonglong:
|
||||
case TypeTiny:
|
||||
protoSig = ScalarFuncSig.UnaryNotInt;
|
||||
break;
|
||||
default:
|
||||
}
|
||||
|
||||
Objects.requireNonNull(protoSig, "unary not can not find proper proto signature.");
|
||||
Expr.Builder builder = scalarToPartialProto(node, context);
|
||||
builder.setSig(protoSig);
|
||||
builder.setFieldType(toPBFieldType(getType(node)));
|
||||
return builder.build();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Expr visit(FuncCallExpr node, Object context) {
|
||||
ScalarFuncSig protoSig = ScalarFuncSig.Year;
|
||||
Expr.Builder builder = scalarToPartialProto(node, context);
|
||||
builder.setSig(protoSig);
|
||||
builder.setFieldType(toPBFieldType(getType(node)));
|
||||
return builder.build();
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,73 @@
|
|||
/*
|
||||
* Copyright 2019 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.tikv.common.expression.visitor;
|
||||
|
||||
import com.google.common.collect.RangeSet;
|
||||
import com.google.common.collect.TreeRangeSet;
|
||||
import java.util.Set;
|
||||
import org.tikv.common.expression.ColumnRef;
|
||||
import org.tikv.common.expression.ComparisonBinaryExpression;
|
||||
import org.tikv.common.expression.ComparisonBinaryExpression.NormalizedPredicate;
|
||||
import org.tikv.common.expression.Constant;
|
||||
import org.tikv.common.expression.Expression;
|
||||
import org.tikv.common.key.TypedKey;
|
||||
|
||||
/**
|
||||
* Apply partition pruning rule on filter condition. Partition pruning is based on a simple idea and
|
||||
* can be described as "Do not scan partitions where there can be no matching values". Currently
|
||||
* only range partition pruning is supported(range column on multiple columns is not supported at
|
||||
* TiDB side, so we can't optimize this yet).
|
||||
*/
|
||||
@SuppressWarnings("UnstableApiUsage")
|
||||
public class PrunedPartitionBuilder extends RangeSetBuilder<TypedKey> {
|
||||
private final Set<ColumnRef> partExprColRefs;
|
||||
|
||||
public PrunedPartitionBuilder(Set<ColumnRef> partExprColRefs) {
|
||||
this.partExprColRefs = partExprColRefs;
|
||||
}
|
||||
|
||||
protected RangeSet<TypedKey> process(Expression node, Void context) {
|
||||
throwOnError(node);
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
// This deals with partition definition's 'lessthan' is "maxvalue".
|
||||
protected RangeSet<TypedKey> visit(Constant node, Void context) {
|
||||
RangeSet<TypedKey> ranges = TreeRangeSet.create();
|
||||
if (node.getValue() instanceof Number) {
|
||||
long val = ((Number) node.getValue()).longValue();
|
||||
if (val == 1) {
|
||||
return ranges.complement();
|
||||
}
|
||||
return ranges;
|
||||
}
|
||||
return ranges;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected RangeSet<TypedKey> visit(ComparisonBinaryExpression node, Void context) {
|
||||
NormalizedPredicate predicate = node.normalize();
|
||||
// when meet a comparison binary expression cannot be normalized
|
||||
// which indicates it cannot be pruned such as a > b + 1
|
||||
if (predicate == null) return TreeRangeSet.<TypedKey>create().complement();
|
||||
if (!partExprColRefs.contains(predicate.getColumnRef()))
|
||||
return TreeRangeSet.<TypedKey>create().complement();
|
||||
TypedKey literal;
|
||||
literal = predicate.getTypedLiteral(-1);
|
||||
return visitComparisonBinaryExpr(node, context, literal, false);
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,65 @@
|
|||
/*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.tikv.common.expression.visitor;
|
||||
|
||||
import org.tikv.common.expression.ComparisonBinaryExpression;
|
||||
import org.tikv.common.expression.Expression;
|
||||
import org.tikv.common.expression.LogicalBinaryExpression;
|
||||
|
||||
public class PseudoCostCalculator extends DefaultVisitor<Double, Void> {
|
||||
public static double calculateCost(Expression expr) {
|
||||
PseudoCostCalculator calc = new PseudoCostCalculator();
|
||||
return expr.accept(calc, null);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Double process(Expression node, Void context) {
|
||||
return 1.0;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Double visit(LogicalBinaryExpression node, Void context) {
|
||||
double leftCost = node.getLeft().accept(this, context);
|
||||
double rightCost = node.getLeft().accept(this, context);
|
||||
switch (node.getCompType()) {
|
||||
case AND:
|
||||
return leftCost * rightCost;
|
||||
case OR:
|
||||
case XOR:
|
||||
return leftCost + rightCost;
|
||||
default:
|
||||
return 1.0;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Double visit(ComparisonBinaryExpression node, Void context) {
|
||||
switch (node.getComparisonType()) {
|
||||
case EQUAL:
|
||||
return 0.01;
|
||||
case GREATER_EQUAL:
|
||||
case GREATER_THAN:
|
||||
case LESS_EQUAL:
|
||||
case LESS_THAN:
|
||||
// magic number for testing
|
||||
return 0.3;
|
||||
case NOT_EQUAL:
|
||||
return 0.99;
|
||||
default:
|
||||
return 1.0;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,143 @@
|
|||
/*
|
||||
* Copyright 2019 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.tikv.common.expression.visitor;
|
||||
|
||||
import com.google.common.collect.Range;
|
||||
import com.google.common.collect.RangeSet;
|
||||
import com.google.common.collect.TreeRangeSet;
|
||||
import java.util.Objects;
|
||||
import org.tikv.common.exception.TiExpressionException;
|
||||
import org.tikv.common.expression.ComparisonBinaryExpression;
|
||||
import org.tikv.common.expression.ComparisonBinaryExpression.NormalizedPredicate;
|
||||
import org.tikv.common.expression.Expression;
|
||||
import org.tikv.common.expression.LogicalBinaryExpression;
|
||||
|
||||
/**
|
||||
* A builder can build a range set of type {@code C}. It also extends {@code DefaultVisitor} and
|
||||
* override and {@code LogicalBinaryExpression}'s visit. For {@code ComparisonBinaryExpression}, we
|
||||
* cannot just override it because {@code IndexRangeSetBuilder} and {@code LogicalBinaryExpression}
|
||||
* has different behavior. A method {@code visitComparisonBinaryExpr} is added with extra boolean
|
||||
* variable to control the behavior.
|
||||
*/
|
||||
@SuppressWarnings("UnstableApiUsage")
|
||||
public class RangeSetBuilder<C extends Comparable> extends DefaultVisitor<RangeSet<C>, Void> {
|
||||
|
||||
static void throwOnError(Expression node) {
|
||||
final String errorFormat = "Unsupported conversion to Range: %s";
|
||||
throw new TiExpressionException(String.format(errorFormat, node));
|
||||
}
|
||||
|
||||
/**
|
||||
* visits {@code ComparisonBinaryExpression} expression and constructs a range set.
|
||||
*
|
||||
* @param node represents a {@code ComparisonBinaryExpression}.
|
||||
* @param context represents a context during visiting process. It is not being used in this
|
||||
* method.
|
||||
* @param literal represents a comparable value.
|
||||
* @param loose If prefix length is specified, then filter is loose, so is the range.
|
||||
* @return a range set.
|
||||
*/
|
||||
RangeSet<C> visitComparisonBinaryExpr(
|
||||
ComparisonBinaryExpression node, Void context, C literal, boolean loose) {
|
||||
NormalizedPredicate predicate = node.normalize();
|
||||
RangeSet<C> ranges = TreeRangeSet.create();
|
||||
if (loose) {
|
||||
switch (predicate.getType()) {
|
||||
case GREATER_THAN:
|
||||
case GREATER_EQUAL:
|
||||
ranges.add(Range.atLeast(literal));
|
||||
break;
|
||||
case LESS_THAN:
|
||||
case LESS_EQUAL:
|
||||
ranges.add(Range.atMost(literal));
|
||||
break;
|
||||
case EQUAL:
|
||||
ranges.add(Range.singleton(literal));
|
||||
break;
|
||||
case NOT_EQUAL:
|
||||
// Should return full range because prefix index predicate for NOT_EQUAL
|
||||
// will be split into an NOT_EQUAL filter and a full range scan
|
||||
ranges.add(Range.all());
|
||||
break;
|
||||
default:
|
||||
throwOnError(node);
|
||||
}
|
||||
} else {
|
||||
switch (predicate.getType()) {
|
||||
case GREATER_THAN:
|
||||
ranges.add(Range.greaterThan(literal));
|
||||
break;
|
||||
case GREATER_EQUAL:
|
||||
ranges.add(Range.atLeast(literal));
|
||||
break;
|
||||
case LESS_THAN:
|
||||
ranges.add(Range.lessThan(literal));
|
||||
break;
|
||||
case LESS_EQUAL:
|
||||
ranges.add(Range.atMost(literal));
|
||||
break;
|
||||
case EQUAL:
|
||||
ranges.add(Range.singleton(literal));
|
||||
break;
|
||||
case NOT_EQUAL:
|
||||
ranges.add(Range.lessThan(literal));
|
||||
ranges.add(Range.greaterThan(literal));
|
||||
break;
|
||||
default:
|
||||
throwOnError(node);
|
||||
}
|
||||
}
|
||||
|
||||
return ranges;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected RangeSet<C> visit(LogicalBinaryExpression node, Void context) {
|
||||
RangeSet<C> leftRanges = node.getLeft().accept(this, context);
|
||||
RangeSet<C> rightRanges = node.getRight().accept(this, context);
|
||||
switch (node.getCompType()) {
|
||||
case AND:
|
||||
rightRanges.removeAll(leftRanges.complement());
|
||||
break;
|
||||
case OR:
|
||||
rightRanges.addAll(leftRanges);
|
||||
break;
|
||||
case XOR:
|
||||
// AND
|
||||
// We need make a copy of rightRanges rather than assign the pointer
|
||||
// to intersection since we need modify intersection later.
|
||||
RangeSet<C> intersection = TreeRangeSet.create(rightRanges);
|
||||
intersection.removeAll(leftRanges.complement());
|
||||
// full set
|
||||
rightRanges.addAll(leftRanges);
|
||||
rightRanges.removeAll(intersection);
|
||||
break;
|
||||
default:
|
||||
throwOnError(node);
|
||||
}
|
||||
return rightRanges;
|
||||
}
|
||||
|
||||
public RangeSet<C> buildRange(Expression predicate) {
|
||||
Objects.requireNonNull(predicate, "predicate is null");
|
||||
return predicate.accept(this, null);
|
||||
}
|
||||
|
||||
protected RangeSet<C> process(Expression node, Void context) {
|
||||
throwOnError(node);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,51 @@
|
|||
/*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.tikv.common.expression.visitor;
|
||||
|
||||
import org.tikv.common.expression.Expression;
|
||||
import org.tikv.common.expression.ExpressionBlocklist;
|
||||
|
||||
public class SupportedExpressionValidator extends DefaultVisitor<Boolean, ExpressionBlocklist> {
|
||||
private static final SupportedExpressionValidator validator = new SupportedExpressionValidator();
|
||||
|
||||
public static boolean isSupportedExpression(Expression node, ExpressionBlocklist blocklist) {
|
||||
if (!node.accept(validator, blocklist)) {
|
||||
return false;
|
||||
}
|
||||
try {
|
||||
ProtoConverter protoConverter = new ProtoConverter(false);
|
||||
if (node.accept(protoConverter, null) == null) {
|
||||
return false;
|
||||
}
|
||||
} catch (Exception e) {
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Boolean process(Expression node, ExpressionBlocklist blocklist) {
|
||||
if (blocklist != null && blocklist.isUnsupportedPushDownExpr(getClass())) {
|
||||
return false;
|
||||
}
|
||||
for (Expression expr : node.getChildren()) {
|
||||
if (!expr.accept(this, blocklist)) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,78 @@
|
|||
/*
|
||||
* Copyright 2017 PingCAP, Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.tikv.common.key;
|
||||
|
||||
import com.google.common.base.Joiner;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import org.tikv.common.codec.CodecDataOutput;
|
||||
|
||||
public class CompoundKey extends Key {
|
||||
|
||||
private final List<Key> keys;
|
||||
|
||||
protected CompoundKey(List<Key> keys, byte[] value) {
|
||||
super(value);
|
||||
this.keys = keys;
|
||||
}
|
||||
|
||||
public static CompoundKey concat(Key lKey, Key rKey) {
|
||||
Builder builder = newBuilder();
|
||||
builder.append(lKey).append(rKey);
|
||||
return builder.build();
|
||||
}
|
||||
|
||||
public static Builder newBuilder() {
|
||||
return new Builder();
|
||||
}
|
||||
|
||||
public List<Key> getKeys() {
|
||||
return keys;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return String.format("[%s]", Joiner.on(",").useForNull("Null").join(keys));
|
||||
}
|
||||
|
||||
public static class Builder {
|
||||
private final List<Key> keys = new ArrayList<>();
|
||||
|
||||
public Builder append(Key key) {
|
||||
if (key instanceof CompoundKey) {
|
||||
CompoundKey compKey = (CompoundKey) key;
|
||||
for (Key child : compKey.getKeys()) {
|
||||
append(child);
|
||||
}
|
||||
} else {
|
||||
keys.add(key);
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
public CompoundKey build() {
|
||||
int totalLen = 0;
|
||||
for (Key key : keys) {
|
||||
totalLen += key.getBytes().length;
|
||||
}
|
||||
CodecDataOutput cdo = new CodecDataOutput(totalLen);
|
||||
for (Key key : keys) {
|
||||
cdo.write(key.getBytes());
|
||||
}
|
||||
return new CompoundKey(keys, cdo.toBytes());
|
||||
}
|
||||
}
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue