Cassandra instrumentations should store normalised CQL queries as db.statement (#1427)

* Move `DbSystem` to package `...instrumentation.api.db`
* Move `SqlNormalizer` to `javaagent-api`  package ...api.db.normalizer
* Refactor Cassandra tests so that they use testcontainers (and run on Java 11)
* Implement Cassandra statement normalization
This commit is contained in:
Mateusz Rzeszutek 2020-10-22 17:56:06 +02:00 committed by GitHub
parent 9b91236775
commit 50990a7c17
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
32 changed files with 212 additions and 173 deletions

View File

@ -1,7 +1,7 @@
<?xml version="1.0" encoding="UTF-8"?>
<FindBugsFilter>
<Match>
<Class name="io.opentelemetry.javaagent.instrumentation.jdbc.normalizer.SimpleCharStream"/>
<Class name="io.opentelemetry.javaagent.instrumentation.api.db.normalizer.SimpleCharStream"/>
<Bug pattern="DM_DEFAULT_ENCODING"/> <!-- ignore default encoding for auto-genned class -->
</Match>

View File

@ -1,8 +1,5 @@
// Set properties before any plugins get loaded
ext {
// TODO switch to container-based tests (away from cassandraunit)
// Tests use cassandraunit, which runs embedded Cassandra 3, which is currently incompatible with Java 9.
maxJavaVersionForTests = JavaVersion.VERSION_1_8
cassandraDriverTestVersions = "[3.0,4.0)"
}
@ -40,7 +37,6 @@ dependencies {
library group: 'com.datastax.cassandra', name: 'cassandra-driver-core', version: '3.0.0'
testLibrary group: 'com.datastax.cassandra', name: 'cassandra-driver-core', version: '3.2.0'
testImplementation group: 'org.cassandraunit', name: 'cassandra-unit', version: '3.1.3.2'
testImplementation project(':instrumentation:guava-10.0')
latestDepTestLibrary group: 'com.datastax.cassandra', name: 'cassandra-driver-core', version: '3.+'

View File

@ -37,6 +37,7 @@ public class CassandraClientInstrumentation extends Instrumenter.Default {
@Override
public String[] helperClassNames() {
return new String[] {
packageName + ".CassandraQueryNormalizer",
packageName + ".CassandraDatabaseClientTracer",
packageName + ".TracingSession",
packageName + ".TracingSession$1",

View File

@ -10,7 +10,7 @@ import com.datastax.driver.core.Host;
import com.datastax.driver.core.Session;
import io.opentelemetry.instrumentation.api.tracer.DatabaseClientTracer;
import io.opentelemetry.instrumentation.api.tracer.utils.NetPeerUtils;
import io.opentelemetry.javaagent.instrumentation.api.jdbc.DbSystem;
import io.opentelemetry.javaagent.instrumentation.api.db.DbSystem;
import io.opentelemetry.trace.Span;
import io.opentelemetry.trace.attributes.SemanticAttributes;
import java.net.InetSocketAddress;
@ -25,7 +25,7 @@ public class CassandraDatabaseClientTracer extends DatabaseClientTracer<Session,
@Override
protected String normalizeQuery(String query) {
return query;
return CassandraQueryNormalizer.normalize(query);
}
@Override

View File

@ -0,0 +1,26 @@
/*
* Copyright The OpenTelemetry Authors
* SPDX-License-Identifier: Apache-2.0
*/
package io.opentelemetry.javaagent.instrumentation.cassandra.v3_0;
import io.opentelemetry.javaagent.instrumentation.api.db.normalizer.ParseException;
import io.opentelemetry.javaagent.instrumentation.api.db.normalizer.SqlNormalizer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public final class CassandraQueryNormalizer {
private static final Logger log = LoggerFactory.getLogger(CassandraQueryNormalizer.class);
public static String normalize(String query) {
try {
return SqlNormalizer.normalize(query);
} catch (ParseException e) {
log.debug("Could not normalize Cassandra query", e);
return null;
}
}
private CassandraQueryNormalizer() {}
}

View File

@ -13,39 +13,47 @@ import io.opentelemetry.instrumentation.test.AgentTestRunner
import io.opentelemetry.instrumentation.test.asserts.TraceAssert
import io.opentelemetry.sdk.trace.data.SpanData
import io.opentelemetry.trace.attributes.SemanticAttributes
import java.time.Duration
import java.util.concurrent.Executors
import java.util.concurrent.atomic.AtomicBoolean
import org.cassandraunit.utils.EmbeddedCassandraServerHelper
import org.slf4j.Logger
import org.slf4j.LoggerFactory
import org.testcontainers.containers.GenericContainer
import org.testcontainers.containers.output.Slf4jLogConsumer
import spock.lang.Requires
import spock.lang.Shared
@Requires({ "true" != System.getenv("CIRCLECI") })
class CassandraClientTest extends AgentTestRunner {
@Shared
Cluster cluster
private static final Logger log = LoggerFactory.getLogger(CassandraClientTest)
@Shared
def executor = Executors.newCachedThreadPool()
@Shared
GenericContainer cassandra
@Shared
int cassandraPort
@Shared
Cluster cluster
def setupSpec() {
/*
This timeout seems excessive but we've seen tests fail with timeout of 40s.
TODO: if we continue to see failures we may want to consider using 'real' Cassandra
started in container like we do for memcached. Note: this will complicate things because
tests would have to assume they run under shared Cassandra and act accordingly.
*/
EmbeddedCassandraServerHelper.startEmbeddedCassandra(EmbeddedCassandraServerHelper.CASSANDRA_RNDPORT_YML_FILE, 120000L)
cassandra = new GenericContainer("cassandra:3")
.withExposedPorts(9042)
.withLogConsumer(new Slf4jLogConsumer(log))
.withStartupTimeout(Duration.ofSeconds(120))
cassandra.start()
cluster = EmbeddedCassandraServerHelper.getCluster()
cassandraPort = cassandra.getMappedPort(9042)
/*
Looks like sometimes our requests fail because Cassandra takes to long to respond,
Increase this timeout as well to try to cope with this.
*/
cluster.getConfiguration().getSocketOptions().setReadTimeoutMillis(120000)
cluster = Cluster.builder()
.addContactPointsWithPorts(new InetSocketAddress("localhost", cassandraPort))
.build()
}
def cleanupSpec() {
EmbeddedCassandraServerHelper.cleanEmbeddedCassandra()
cluster.close()
cassandra.stop()
}
def "test sync"() {
@ -62,7 +70,7 @@ class CassandraClientTest extends AgentTestRunner {
}
}
trace(keyspace ? 1 : 0, 1) {
cassandraSpan(it, 0, statement, keyspace)
cassandraSpan(it, 0, expectedStatement, keyspace)
}
}
@ -70,12 +78,12 @@ class CassandraClientTest extends AgentTestRunner {
session.close()
where:
statement | keyspace
"DROP KEYSPACE IF EXISTS sync_test" | null
"CREATE KEYSPACE sync_test WITH REPLICATION = {'class':'SimpleStrategy', 'replication_factor':3}" | null
"CREATE TABLE sync_test.users ( id UUID PRIMARY KEY, name text )" | "sync_test"
"INSERT INTO sync_test.users (id, name) values (uuid(), 'alice')" | "sync_test"
"SELECT * FROM users where name = 'alice' ALLOW FILTERING" | "sync_test"
keyspace | statement | expectedStatement
null | "DROP KEYSPACE IF EXISTS sync_test" | "DROP KEYSPACE IF EXISTS sync_test"
null | "CREATE KEYSPACE sync_test WITH REPLICATION = {'class':'SimpleStrategy', 'replication_factor':3}" | "CREATE KEYSPACE sync_test WITH REPLICATION = {?:?, ?:?}"
"sync_test" | "CREATE TABLE sync_test.users ( id UUID PRIMARY KEY, name text )" | "CREATE TABLE sync_test.users ( id UUID PRIMARY KEY, name text )"
"sync_test" | "INSERT INTO sync_test.users (id, name) values (uuid(), 'alice')" | "INSERT INTO sync_test.users (id, name) values (uuid(), ?)"
"sync_test" | "SELECT * FROM users where name = 'alice' ALLOW FILTERING" | "SELECT * FROM users where name = ? ALLOW FILTERING"
}
def "test async"() {
@ -100,7 +108,7 @@ class CassandraClientTest extends AgentTestRunner {
}
trace(keyspace ? 1 : 0, 3) {
basicSpan(it, 0, "parent")
cassandraSpan(it, 1, statement, keyspace, span(0))
cassandraSpan(it, 1, expectedStatement, keyspace, span(0))
basicSpan(it, 2, "callbackListener", span(0))
}
}
@ -109,12 +117,12 @@ class CassandraClientTest extends AgentTestRunner {
session.close()
where:
statement | keyspace
"DROP KEYSPACE IF EXISTS async_test" | null
"CREATE KEYSPACE async_test WITH REPLICATION = {'class':'SimpleStrategy', 'replication_factor':3}" | null
"CREATE TABLE async_test.users ( id UUID PRIMARY KEY, name text )" | "async_test"
"INSERT INTO async_test.users (id, name) values (uuid(), 'alice')" | "async_test"
"SELECT * FROM users where name = 'alice' ALLOW FILTERING" | "async_test"
keyspace | statement | expectedStatement
null | "DROP KEYSPACE IF EXISTS async_test" | "DROP KEYSPACE IF EXISTS async_test"
null | "CREATE KEYSPACE async_test WITH REPLICATION = {'class':'SimpleStrategy', 'replication_factor':3}" | "CREATE KEYSPACE async_test WITH REPLICATION = {?:?, ?:?}"
"async_test" | "CREATE TABLE async_test.users ( id UUID PRIMARY KEY, name text )" | "CREATE TABLE async_test.users ( id UUID PRIMARY KEY, name text )"
"async_test" | "INSERT INTO async_test.users (id, name) values (uuid(), 'alice')" | "INSERT INTO async_test.users (id, name) values (uuid(), ?)"
"async_test" | "SELECT * FROM users where name = 'alice' ALLOW FILTERING" | "SELECT * FROM users where name = ? ALLOW FILTERING"
}
def cassandraSpan(TraceAssert trace, int index, String statement, String keyspace, Object parentSpan = null, Throwable exception = null) {
@ -127,15 +135,14 @@ class CassandraClientTest extends AgentTestRunner {
childOf((SpanData) parentSpan)
}
attributes {
"${SemanticAttributes.NET_PEER_NAME.key()}" "localhost"
"${SemanticAttributes.NET_PEER_IP.key()}" "127.0.0.1"
"${SemanticAttributes.NET_PEER_PORT.key()}" EmbeddedCassandraServerHelper.getNativeTransportPort()
"${SemanticAttributes.DB_SYSTEM.key()}" "cassandra"
"${SemanticAttributes.DB_NAME.key()}" keyspace
"${SemanticAttributes.DB_STATEMENT.key()}" statement
"${SemanticAttributes.CASSANDRA_KEYSPACE.key()}" keyspace
"$SemanticAttributes.NET_PEER_NAME.key" "localhost"
"$SemanticAttributes.NET_PEER_IP.key" "127.0.0.1"
"$SemanticAttributes.NET_PEER_PORT.key" cassandraPort
"$SemanticAttributes.DB_SYSTEM.key" "cassandra"
"$SemanticAttributes.DB_NAME.key" keyspace
"$SemanticAttributes.DB_STATEMENT.key" statement
"$SemanticAttributes.CASSANDRA_KEYSPACE.key" keyspace
}
}
}
}

View File

@ -1,16 +1,10 @@
// Set properties before any plugins get loaded
ext {
maxJavaVersionForTests = JavaVersion.VERSION_1_8
cassandraDriverTestVersions = "[4.0,)"
}
apply from: "$rootDir/gradle/instrumentation.gradle"
muzzle {
pass {
group = "com.datastax.oss"
module = "java-driver-core"
versions = cassandraDriverTestVersions
versions = "[4.0,)"
assertInverse = true
}
}
@ -18,7 +12,5 @@ muzzle {
dependencies {
library group: 'com.datastax.oss', name: 'java-driver-core', version: '4.0.0'
testImplementation group: 'org.cassandraunit', name: 'cassandra-unit', version: '4.3.1.0'
latestDepTestLibrary group: 'com.datastax.oss', name: 'java-driver-core', version: '4.+'
}

View File

@ -35,6 +35,7 @@ public class CassandraClientInstrumentation extends Instrumenter.Default {
@Override
public String[] helperClassNames() {
return new String[] {
packageName + ".CassandraQueryNormalizer",
packageName + ".CassandraDatabaseClientTracer",
packageName + ".TracingCqlSession",
packageName + ".CompletionStageFunction"

View File

@ -11,7 +11,7 @@ import com.datastax.oss.driver.api.core.cql.ExecutionInfo;
import com.datastax.oss.driver.api.core.metadata.Node;
import io.opentelemetry.instrumentation.api.tracer.DatabaseClientTracer;
import io.opentelemetry.instrumentation.api.tracer.utils.NetPeerUtils;
import io.opentelemetry.javaagent.instrumentation.api.jdbc.DbSystem;
import io.opentelemetry.javaagent.instrumentation.api.db.DbSystem;
import io.opentelemetry.trace.Span;
import java.net.InetSocketAddress;
import java.util.Optional;
@ -26,7 +26,7 @@ public class CassandraDatabaseClientTracer extends DatabaseClientTracer<CqlSessi
@Override
protected String normalizeQuery(String query) {
return query;
return CassandraQueryNormalizer.normalize(query);
}
@Override

View File

@ -0,0 +1,26 @@
/*
* Copyright The OpenTelemetry Authors
* SPDX-License-Identifier: Apache-2.0
*/
package io.opentelemetry.javaagent.instrumentation.cassandra.v4_0;
import io.opentelemetry.javaagent.instrumentation.api.db.normalizer.ParseException;
import io.opentelemetry.javaagent.instrumentation.api.db.normalizer.SqlNormalizer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public final class CassandraQueryNormalizer {
private static final Logger log = LoggerFactory.getLogger(CassandraQueryNormalizer.class);
public static String normalize(String query) {
try {
return SqlNormalizer.normalize(query);
} catch (ParseException e) {
log.debug("Could not normalize Cassandra query", e);
return null;
}
}
private CassandraQueryNormalizer() {}
}

View File

@ -16,22 +16,34 @@ import io.opentelemetry.instrumentation.test.asserts.TraceAssert
import io.opentelemetry.sdk.trace.data.SpanData
import io.opentelemetry.trace.attributes.SemanticAttributes
import java.time.Duration
import org.cassandraunit.utils.EmbeddedCassandraServerHelper
import org.slf4j.Logger
import org.slf4j.LoggerFactory
import org.testcontainers.containers.GenericContainer
import org.testcontainers.containers.output.Slf4jLogConsumer
import spock.lang.Requires
import spock.lang.Shared
@Requires({ "true" != System.getenv("CIRCLECI") })
class CassandraClientTest extends AgentTestRunner {
private static final Logger log = LoggerFactory.getLogger(CassandraClientTest)
@Shared
GenericContainer cassandra
@Shared
int cassandraPort
def setupSpec() {
/*
This timeout seems excessive but we've seen tests fail with timeout of 40s.
TODO: if we continue to see failures we may want to consider using 'real' Cassandra
started in container like we do for memcached. Note: this will complicate things because
tests would have to assume they run under shared Cassandra and act accordingly.
*/
EmbeddedCassandraServerHelper.startEmbeddedCassandra(EmbeddedCassandraServerHelper.CASSANDRA_RNDPORT_YML_FILE, 120000L)
cassandra = new GenericContainer("cassandra:4.0")
.withExposedPorts(9042)
.withLogConsumer(new Slf4jLogConsumer(log))
.withStartupTimeout(Duration.ofSeconds(120))
cassandra.start()
cassandraPort = cassandra.getMappedPort(9042)
}
def cleanupSpec() {
EmbeddedCassandraServerHelper.cleanEmbeddedCassandra()
cassandra.stop()
}
def "test sync"() {
@ -43,7 +55,7 @@ class CassandraClientTest extends AgentTestRunner {
expect:
assertTraces(1) {
trace(0, 1) {
cassandraSpan(it, 0, statement, keyspace)
cassandraSpan(it, 0, expectedStatement, keyspace)
}
}
@ -51,12 +63,12 @@ class CassandraClientTest extends AgentTestRunner {
session.close()
where:
statement | keyspace
"DROP KEYSPACE IF EXISTS sync_test" | null
"CREATE KEYSPACE sync_test WITH REPLICATION = {'class':'SimpleStrategy', 'replication_factor':3}" | null
"CREATE TABLE sync_test.users ( id UUID PRIMARY KEY, name text )" | "sync_test"
"INSERT INTO sync_test.users (id, name) values (uuid(), 'alice')" | "sync_test"
"SELECT * FROM users where name = 'alice' ALLOW FILTERING" | "sync_test"
keyspace | statement | expectedStatement
null | "DROP KEYSPACE IF EXISTS sync_test" | "DROP KEYSPACE IF EXISTS sync_test"
null | "CREATE KEYSPACE sync_test WITH REPLICATION = {'class':'SimpleStrategy', 'replication_factor':3}" | "CREATE KEYSPACE sync_test WITH REPLICATION = {?:?, ?:?}"
"sync_test" | "CREATE TABLE sync_test.users ( id UUID PRIMARY KEY, name text )" | "CREATE TABLE sync_test.users ( id UUID PRIMARY KEY, name text )"
"sync_test" | "INSERT INTO sync_test.users (id, name) values (uuid(), 'alice')" | "INSERT INTO sync_test.users (id, name) values (uuid(), ?)"
"sync_test" | "SELECT * FROM users where name = 'alice' ALLOW FILTERING" | "SELECT * FROM users where name = ? ALLOW FILTERING"
}
def "test async"() {
@ -71,7 +83,7 @@ class CassandraClientTest extends AgentTestRunner {
assertTraces(1) {
trace(0, 2) {
basicSpan(it, 0, "parent")
cassandraSpan(it, 1, statement, keyspace, span(0))
cassandraSpan(it, 1, expectedStatement, keyspace, span(0))
}
}
@ -79,12 +91,12 @@ class CassandraClientTest extends AgentTestRunner {
session.close()
where:
statement | keyspace
"DROP KEYSPACE IF EXISTS async_test" | null
"CREATE KEYSPACE async_test WITH REPLICATION = {'class':'SimpleStrategy', 'replication_factor':3}" | null
"CREATE TABLE async_test.users ( id UUID PRIMARY KEY, name text )" | "async_test"
"INSERT INTO async_test.users (id, name) values (uuid(), 'alice')" | "async_test"
"SELECT * FROM users where name = 'alice' ALLOW FILTERING" | "async_test"
keyspace | statement | expectedStatement
null | "DROP KEYSPACE IF EXISTS async_test" | "DROP KEYSPACE IF EXISTS async_test"
null | "CREATE KEYSPACE async_test WITH REPLICATION = {'class':'SimpleStrategy', 'replication_factor':3}" | "CREATE KEYSPACE async_test WITH REPLICATION = {?:?, ?:?}"
"async_test" | "CREATE TABLE async_test.users ( id UUID PRIMARY KEY, name text )" | "CREATE TABLE async_test.users ( id UUID PRIMARY KEY, name text )"
"async_test" | "INSERT INTO async_test.users (id, name) values (uuid(), 'alice')" | "INSERT INTO async_test.users (id, name) values (uuid(), ?)"
"async_test" | "SELECT * FROM users where name = 'alice' ALLOW FILTERING" | "SELECT * FROM users where name = ? ALLOW FILTERING"
}
def cassandraSpan(TraceAssert trace, int index, String statement, String keyspace, Object parentSpan = null, Throwable exception = null) {
@ -97,12 +109,12 @@ class CassandraClientTest extends AgentTestRunner {
childOf((SpanData) parentSpan)
}
attributes {
"${SemanticAttributes.NET_PEER_NAME.key()}" "localhost"
"${SemanticAttributes.NET_PEER_IP.key()}" "127.0.0.1"
"${SemanticAttributes.NET_PEER_PORT.key()}" EmbeddedCassandraServerHelper.getNativeTransportPort()
"${SemanticAttributes.DB_SYSTEM.key()}" "cassandra"
"${SemanticAttributes.DB_NAME.key()}" keyspace
"${SemanticAttributes.DB_STATEMENT.key()}" statement
"$SemanticAttributes.NET_PEER_NAME.key" "localhost"
"$SemanticAttributes.NET_PEER_IP.key" "127.0.0.1"
"$SemanticAttributes.NET_PEER_PORT.key" cassandraPort
"$SemanticAttributes.DB_SYSTEM.key" "cassandra"
"$SemanticAttributes.DB_NAME.key" keyspace
"$SemanticAttributes.DB_STATEMENT.key" statement
}
}
}
@ -112,11 +124,10 @@ class CassandraClientTest extends AgentTestRunner {
.withDuration(DefaultDriverOption.REQUEST_TIMEOUT, Duration.ofSeconds(0))
.build()
return CqlSession.builder()
.addContactPoint(new InetSocketAddress(EmbeddedCassandraServerHelper.getHost(), EmbeddedCassandraServerHelper.getNativeTransportPort()))
.addContactPoint(new InetSocketAddress("localhost", cassandraPort))
.withConfigLoader(configLoader)
.withLocalDatacenter("datacenter1")
.withKeyspace((String) keyspace)
.build()
}
}

View File

@ -6,7 +6,7 @@
package io.opentelemetry.javaagent.instrumentation.couchbase.v2_0;
import io.opentelemetry.instrumentation.api.tracer.DatabaseClientTracer;
import io.opentelemetry.javaagent.instrumentation.api.jdbc.DbSystem;
import io.opentelemetry.javaagent.instrumentation.api.db.DbSystem;
import java.lang.reflect.Method;
import java.net.InetSocketAddress;

View File

@ -8,7 +8,7 @@ package io.opentelemetry.javaagent.instrumentation.couchbase.v2_0;
import static io.opentelemetry.javaagent.instrumentation.couchbase.v2_0.CouchbaseClientTracer.TRACER;
import static io.opentelemetry.trace.Span.Kind.CLIENT;
import io.opentelemetry.javaagent.instrumentation.api.jdbc.DbSystem;
import io.opentelemetry.javaagent.instrumentation.api.db.DbSystem;
import io.opentelemetry.javaagent.instrumentation.rxjava.TracedOnSubscribe;
import io.opentelemetry.trace.Span;
import io.opentelemetry.trace.attributes.SemanticAttributes;

View File

@ -8,7 +8,7 @@ package io.opentelemetry.javaagent.instrumentation.geode;
import static io.opentelemetry.trace.Span.Kind.CLIENT;
import io.opentelemetry.instrumentation.api.tracer.DatabaseClientTracer;
import io.opentelemetry.javaagent.instrumentation.api.jdbc.DbSystem;
import io.opentelemetry.javaagent.instrumentation.api.db.DbSystem;
import io.opentelemetry.trace.Span;
import io.opentelemetry.trace.attributes.SemanticAttributes;
import java.net.InetSocketAddress;

View File

@ -1,29 +1,11 @@
plugins {
id 'com.intershop.gradle.javacc' version '4.0.0'
}
apply from: "$rootDir/gradle/instrumentation.gradle"
muzzle {
pass {
coreJdk()
}
}
javacc {
configs {
template {
inputFile = file('src/main/javacc/SqlNormalizer.jj')
packageName = 'io.opentelemetry.javaagent.instrumentation.jdbc.normalizer'
}
}
}
tasks.withType(Checkstyle).configureEach {
exclude '**/jdbc/normalizer/*.java'
}
dependencies {
// jdbc unit testing
testLibrary group: 'com.h2database', name: 'h2', version: '1.3.169'

View File

@ -43,15 +43,7 @@ public final class ConnectionInstrumentation extends Instrumenter.Default {
@Override
public String[] helperClassNames() {
return new String[] {
packageName + ".normalizer.Token",
packageName + ".normalizer.ParseException",
packageName + ".normalizer.SimpleCharStream",
packageName + ".normalizer.SqlNormalizerConstants",
packageName + ".normalizer.TokenMgrError",
packageName + ".normalizer.SqlNormalizerTokenManager",
packageName + ".normalizer.SqlNormalizer",
packageName + ".JDBCMaps",
packageName + ".JDBCUtils",
packageName + ".JDBCMaps", packageName + ".JDBCUtils",
};
}

View File

@ -8,7 +8,7 @@ package io.opentelemetry.javaagent.instrumentation.jdbc;
import static io.opentelemetry.javaagent.instrumentation.jdbc.DBInfo.DEFAULT;
import static java.util.regex.Pattern.CASE_INSENSITIVE;
import io.opentelemetry.javaagent.instrumentation.api.jdbc.DbSystem;
import io.opentelemetry.javaagent.instrumentation.api.db.DbSystem;
import java.io.UnsupportedEncodingException;
import java.net.URI;
import java.net.URLDecoder;

View File

@ -6,7 +6,7 @@
package io.opentelemetry.javaagent.instrumentation.jdbc;
import io.opentelemetry.instrumentation.api.config.Config;
import io.opentelemetry.javaagent.instrumentation.jdbc.normalizer.SqlNormalizer;
import io.opentelemetry.javaagent.instrumentation.api.db.normalizer.SqlNormalizer;
import java.lang.reflect.Field;
import java.sql.Connection;
import java.sql.Statement;

View File

@ -46,16 +46,7 @@ public final class PreparedStatementInstrumentation extends Instrumenter.Default
@Override
public String[] helperClassNames() {
return new String[] {
packageName + ".normalizer.Token",
packageName + ".normalizer.ParseException",
packageName + ".normalizer.SimpleCharStream",
packageName + ".normalizer.SqlNormalizerConstants",
packageName + ".normalizer.TokenMgrError",
packageName + ".normalizer.SqlNormalizerTokenManager",
packageName + ".normalizer.SqlNormalizer",
packageName + ".JDBCUtils",
packageName + ".JDBCMaps",
packageName + ".JdbcTracer",
packageName + ".JDBCUtils", packageName + ".JDBCMaps", packageName + ".JdbcTracer",
};
}

View File

@ -46,16 +46,7 @@ public final class StatementInstrumentation extends Instrumenter.Default {
@Override
public String[] helperClassNames() {
return new String[] {
packageName + ".normalizer.Token",
packageName + ".normalizer.ParseException",
packageName + ".normalizer.SimpleCharStream",
packageName + ".normalizer.SqlNormalizerConstants",
packageName + ".normalizer.TokenMgrError",
packageName + ".normalizer.SqlNormalizerTokenManager",
packageName + ".normalizer.SqlNormalizer",
packageName + ".JDBCMaps",
packageName + ".JDBCUtils",
packageName + ".JdbcTracer",
packageName + ".JDBCMaps", packageName + ".JDBCUtils", packageName + ".JdbcTracer",
};
}

View File

@ -6,7 +6,7 @@
package io.opentelemetry.javaagent.instrumentation.jedis.v1_4;
import io.opentelemetry.instrumentation.api.tracer.DatabaseClientTracer;
import io.opentelemetry.javaagent.instrumentation.api.jdbc.DbSystem;
import io.opentelemetry.javaagent.instrumentation.api.db.DbSystem;
import java.net.InetSocketAddress;
import redis.clients.jedis.Connection;
import redis.clients.jedis.Protocol.Command;

View File

@ -6,7 +6,7 @@
package io.opentelemetry.javaagent.instrumentation.jedis.v3_0;
import io.opentelemetry.instrumentation.api.tracer.DatabaseClientTracer;
import io.opentelemetry.javaagent.instrumentation.api.jdbc.DbSystem;
import io.opentelemetry.javaagent.instrumentation.api.db.DbSystem;
import java.net.InetSocketAddress;
import java.nio.charset.StandardCharsets;
import redis.clients.jedis.Connection;

View File

@ -7,7 +7,7 @@ package io.opentelemetry.javaagent.instrumentation.lettuce.v4_0;
import com.lambdaworks.redis.RedisURI;
import io.opentelemetry.instrumentation.api.tracer.DatabaseClientTracer;
import io.opentelemetry.javaagent.instrumentation.api.jdbc.DbSystem;
import io.opentelemetry.javaagent.instrumentation.api.db.DbSystem;
import io.opentelemetry.trace.Span;
import io.opentelemetry.trace.attributes.SemanticAttributes;
import java.net.InetSocketAddress;

View File

@ -7,7 +7,7 @@ package io.opentelemetry.javaagent.instrumentation.lettuce.v5_0;
import io.lettuce.core.RedisURI;
import io.opentelemetry.instrumentation.api.tracer.DatabaseClientTracer;
import io.opentelemetry.javaagent.instrumentation.api.jdbc.DbSystem;
import io.opentelemetry.javaagent.instrumentation.api.db.DbSystem;
import io.opentelemetry.trace.Span;
import io.opentelemetry.trace.attributes.SemanticAttributes;
import java.net.InetSocketAddress;

View File

@ -16,7 +16,7 @@ import io.lettuce.core.tracing.Tracing;
import io.opentelemetry.OpenTelemetry;
import io.opentelemetry.instrumentation.api.tracer.utils.NetPeerUtils;
import io.opentelemetry.instrumentation.api.tracer.utils.NetPeerUtils.SpanAttributeSetter;
import io.opentelemetry.javaagent.instrumentation.api.jdbc.DbSystem;
import io.opentelemetry.javaagent.instrumentation.api.db.DbSystem;
import io.opentelemetry.trace.Span;
import io.opentelemetry.trace.Span.Kind;
import io.opentelemetry.trace.StatusCanonicalCode;

View File

@ -9,7 +9,7 @@ import com.mongodb.ServerAddress;
import com.mongodb.connection.ConnectionDescription;
import com.mongodb.event.CommandStartedEvent;
import io.opentelemetry.instrumentation.api.tracer.DatabaseClientTracer;
import io.opentelemetry.javaagent.instrumentation.api.jdbc.DbSystem;
import io.opentelemetry.javaagent.instrumentation.api.db.DbSystem;
import io.opentelemetry.trace.Span;
import io.opentelemetry.trace.attributes.SemanticAttributes;
import java.net.InetSocketAddress;

View File

@ -6,7 +6,7 @@
package io.opentelemetry.javaagent.instrumentation.rediscala;
import io.opentelemetry.instrumentation.api.tracer.DatabaseClientTracer;
import io.opentelemetry.javaagent.instrumentation.api.jdbc.DbSystem;
import io.opentelemetry.javaagent.instrumentation.api.db.DbSystem;
import java.net.InetSocketAddress;
import redis.RedisCommand;

View File

@ -7,7 +7,7 @@ package io.opentelemetry.javaagent.instrumentation.redisson;
import io.netty.channel.Channel;
import io.opentelemetry.instrumentation.api.tracer.DatabaseClientTracer;
import io.opentelemetry.javaagent.instrumentation.api.jdbc.DbSystem;
import io.opentelemetry.javaagent.instrumentation.api.db.DbSystem;
import java.net.InetSocketAddress;
import java.util.List;
import org.redisson.client.RedisConnection;

View File

@ -1,3 +1,7 @@
plugins {
id 'com.intershop.gradle.javacc' version '4.0.0'
}
group = 'io.opentelemetry.javaagent'
apply from: "$rootDir/gradle/java.gradle"
@ -6,6 +10,19 @@ apply from: "$rootDir/gradle/publish.gradle"
project.ext.minimumBranchCoverage = 0.0
project.ext.minimumInstructionCoverage = 0.0
javacc {
configs {
template {
inputFile = file('src/main/javacc/SqlNormalizer.jj')
packageName = 'io.opentelemetry.javaagent.instrumentation.api.db.normalizer'
}
}
}
tasks.withType(Checkstyle).configureEach {
exclude '**/db/normalizer/*.java'
}
dependencies {
api deps.opentelemetryApi
compileOnly deps.opentelemetrySdk

View File

@ -3,7 +3,7 @@
* SPDX-License-Identifier: Apache-2.0
*/
package io.opentelemetry.javaagent.instrumentation.api.jdbc;
package io.opentelemetry.javaagent.instrumentation.api.db;
public final class DbSystem {
@ -23,4 +23,6 @@ public final class DbSystem {
public static final String OTHER_SQL = "other_sql";
public static final String POSTGRESQL = "postgresql";
public static final String REDIS = "redis";
private DbSystem() {}
}

View File

@ -1,17 +1,6 @@
/*
* Copyright The OpenTelemetry Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* SPDX-License-Identifier: Apache-2.0
*/
options {
@ -22,7 +11,7 @@ options {
PARSER_BEGIN(SqlNormalizer)
package io.opentelemetry.javaagent.instrumentation.jdbc.normalizer;
package io.opentelemetry.javaagent.instrumentation.api.db.normalizer;
import java.io.StringReader;
@ -78,6 +67,12 @@ StringBuffer Input() :
if (sb.length() > LIMIT) { return sb; }
}
|
t = <DollarQuotedStr>
{
sb.append('?');
if (sb.length() > LIMIT) { return sb; }
}
|
t = <Whitespace>
{
sb.append(' ');
@ -194,6 +189,8 @@ TOKEN :
|
< DoubleQuotedStr: "\"" ( ("\"\"")|(~["\""]) ) * "\"" >
|
< DollarQuotedStr: "$$" (~["$"]) * "$$" >
|
< Whitespace: ( [" ","\t","\n","\r"] )+ >
|
< Other: ~[] >

View File

@ -3,8 +3,8 @@
* SPDX-License-Identifier: Apache-2.0
*/
import io.opentelemetry.javaagent.instrumentation.jdbc.JDBCUtils
import io.opentelemetry.javaagent.instrumentation.jdbc.normalizer.SqlNormalizer
package io.opentelemetry.javaagent.instrumentation.api.db.normalizer
import spock.lang.Specification
import spock.lang.Timeout
@ -13,7 +13,7 @@ class SqlNormalizerTest extends Specification {
def "normalize #originalSql"() {
setup:
def actualNormalized = JDBCUtils.normalizeSql(originalSql)
def actualNormalized = SqlNormalizer.normalize(originalSql)
expect:
actualNormalized == normalizedSql
@ -57,7 +57,7 @@ class SqlNormalizerTest extends Specification {
"SELECT * FROM TABLE WHERE FIELD = ' an escaped '' quote mark inside'" | "SELECT * FROM TABLE WHERE FIELD = ?"
"SELECT * FROM TABLE WHERE FIELD = '\\\\'" | "SELECT * FROM TABLE WHERE FIELD = ?"
"SELECT * FROM TABLE WHERE FIELD = '\"inside doubles\"'" | "SELECT * FROM TABLE WHERE FIELD = ?"
"SELECT * FROM TABLE WHERE FIELD = '\"\"'" | "SELECT * FROM TABLE WHERE FIELD = ?"
"SELECT * FROM TABLE WHERE FIELD = '\"\$\$\$\$\"'" | "SELECT * FROM TABLE WHERE FIELD = ?"
"SELECT * FROM TABLE WHERE FIELD = 'a single \" doublequote inside'" | "SELECT * FROM TABLE WHERE FIELD = ?"
// Some databases support/encourage " instead of ' with same escape rules
@ -66,9 +66,16 @@ class SqlNormalizerTest extends Specification {
"SELECT * FROM TABLE WHERE FIELD = \" an escaped \"\" quote mark inside\"" | "SELECT * FROM TABLE WHERE FIELD = ?"
"SELECT * FROM TABLE WHERE FIELD = \"\\\\\"" | "SELECT * FROM TABLE WHERE FIELD = ?"
"SELECT * FROM TABLE WHERE FIELD = \"'inside singles'\"" | "SELECT * FROM TABLE WHERE FIELD = ?"
"SELECT * FROM TABLE WHERE FIELD = \"''\"" | "SELECT * FROM TABLE WHERE FIELD = ?"
"SELECT * FROM TABLE WHERE FIELD = \"'\$\$\$\$'\"" | "SELECT * FROM TABLE WHERE FIELD = ?"
"SELECT * FROM TABLE WHERE FIELD = \"a single ' singlequote inside\"" | "SELECT * FROM TABLE WHERE FIELD = ?"
// Some databases allow using dollar-quoted strings
"SELECT * FROM TABLE WHERE FIELD = \$\$\$\$" | "SELECT * FROM TABLE WHERE FIELD = ?"
"SELECT * FROM TABLE WHERE FIELD = \$\$words and spaces\$\$" | "SELECT * FROM TABLE WHERE FIELD = ?"
"SELECT * FROM TABLE WHERE FIELD = \$\$quotes '\" inside\$\$" | "SELECT * FROM TABLE WHERE FIELD = ?"
"SELECT * FROM TABLE WHERE FIELD = \$\$\"''\"\$\$" | "SELECT * FROM TABLE WHERE FIELD = ?"
"SELECT * FROM TABLE WHERE FIELD = \$\$\\\\\$\$" | "SELECT * FROM TABLE WHERE FIELD = ?"
// Unicode, including a unicode identifier with a trailing number
"SELECT * FROM TABLE\u09137 WHERE FIELD = '\u0194'" | "SELECT * FROM TABLE\u09137 WHERE FIELD = ?"
@ -80,7 +87,7 @@ class SqlNormalizerTest extends Specification {
setup:
String s = "'"
for (int i = 0; i < 10000; i++) {
assert JDBCUtils.normalizeSql(s) != null
assert SqlNormalizer.normalize(s) != null
s += "'"
}
}
@ -91,7 +98,7 @@ class SqlNormalizerTest extends Specification {
for (int i = 0; i < 10000; i++) {
s += String.valueOf(i)
}
assert "?" == JDBCUtils.normalizeSql(s)
assert "?" == SqlNormalizer.normalize(s)
}
def "very long numbers at end of table name don't cause problem"() {
@ -100,7 +107,7 @@ class SqlNormalizerTest extends Specification {
for (int i = 0; i < 10000; i++) {
s += String.valueOf(i)
}
assert s.substring(0, SqlNormalizer.LIMIT) == JDBCUtils.normalizeSql(s)
assert s.substring(0, SqlNormalizer.LIMIT) == SqlNormalizer.normalize(s)
}
def "test 32k truncation"() {
@ -109,7 +116,7 @@ class SqlNormalizerTest extends Specification {
for (int i = 0; i < 10000; i++) {
s.append("SELECT * FROM TABLE WHERE FIELD = 1234 AND ")
}
String normalized = JDBCUtils.normalizeSql(s.toString())
String normalized = SqlNormalizer.normalize(s.toString())
System.out.println(normalized.length())
assert normalized.length() <= SqlNormalizer.LIMIT
assert !normalized.contains("1234")
@ -123,7 +130,7 @@ class SqlNormalizerTest extends Specification {
for (int c = 0; c < 1000; c++) {
sb.append((char) r.nextInt((int) Character.MAX_VALUE))
}
JDBCUtils.normalizeSql(sb.toString())
SqlNormalizer.normalize(sb.toString())
}
}
}