Merge pull request #1016 from DataDog/tyler/parse-optimization
Minor optimization to avoid using BigInteger if possible
This commit is contained in:
commit
91cbb67f6e
|
@ -417,6 +417,7 @@ public class DDSpan implements Span, MutableSpan {
|
|||
}
|
||||
|
||||
protected static class UInt64IDStringSerializer extends StdSerializer<String> {
|
||||
private static final int LONG_PARSE_LIMIT = String.valueOf(Long.MAX_VALUE).length();
|
||||
|
||||
public UInt64IDStringSerializer() {
|
||||
this(null);
|
||||
|
@ -430,7 +431,15 @@ public class DDSpan implements Span, MutableSpan {
|
|||
public void serialize(
|
||||
final String value, final JsonGenerator gen, final SerializerProvider provider)
|
||||
throws IOException {
|
||||
gen.writeNumber(new BigInteger(value));
|
||||
final int length = value.length();
|
||||
// BigInteger's are expensive, so lets try to avoid using them if possible.
|
||||
// This is a rough approximation for optimization.
|
||||
// There are some values that would pass this test that could be parsed with Long.parseLong.
|
||||
if (length > LONG_PARSE_LIMIT || (length == LONG_PARSE_LIMIT && value.startsWith("9"))) {
|
||||
gen.writeNumber(new BigInteger(value));
|
||||
} else {
|
||||
gen.writeNumber(Long.parseLong(value));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
package datadog.opentracing
|
||||
|
||||
|
||||
import com.fasterxml.jackson.databind.ObjectMapper
|
||||
import com.google.common.collect.Maps
|
||||
import datadog.trace.agent.test.utils.ConfigUtils
|
||||
|
@ -128,7 +127,9 @@ class DDSpanSerializationTest extends Specification {
|
|||
value | _
|
||||
BigInteger.ZERO | _
|
||||
BigInteger.ONE | _
|
||||
8223372036854775807G | _
|
||||
BigInteger.valueOf(Long.MAX_VALUE).subtract(BigInteger.ONE) | _
|
||||
BigInteger.valueOf(Long.MAX_VALUE).add(BigInteger.ONE) | _
|
||||
BigInteger.valueOf(2).pow(64).subtract(BigInteger.ONE) | _
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue