mirror of https://github.com/grpc/grpc-java.git
context: add basic context profiling
This commit is contained in:
parent
6ee6eae5a0
commit
6ea8fffa2b
|
|
@ -17,13 +17,16 @@
|
|||
package io.grpc;
|
||||
|
||||
import java.io.Closeable;
|
||||
import java.lang.reflect.Method;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Random;
|
||||
import java.util.concurrent.Callable;
|
||||
import java.util.concurrent.Executor;
|
||||
import java.util.concurrent.ScheduledExecutorService;
|
||||
import java.util.concurrent.ScheduledFuture;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.concurrent.TimeoutException;
|
||||
import java.util.concurrent.atomic.AtomicLongArray;
|
||||
import java.util.concurrent.atomic.AtomicReference;
|
||||
import java.util.logging.Level;
|
||||
import java.util.logging.Logger;
|
||||
|
|
@ -100,6 +103,55 @@ public class Context {
|
|||
private static final PersistentHashArrayMappedTrie<Key<?>, Object> EMPTY_ENTRIES =
|
||||
new PersistentHashArrayMappedTrie<Key<?>, Object>();
|
||||
|
||||
static final AtomicLongArray withValueCounts;
|
||||
/**
|
||||
* Counts how many times a unique value added to the context.
|
||||
*/
|
||||
static final AtomicLongArray withValueUniqueCounts;
|
||||
static final AtomicLongArray getCounts;
|
||||
private static final Method threadLocalRandomCurrent;
|
||||
|
||||
static {
|
||||
Method localThreadLocalRandomCurrent = null;
|
||||
try {
|
||||
Class<?> cls = Class.forName("java.util.concurrent.ThreadLocalRandom");
|
||||
localThreadLocalRandomCurrent = cls.getMethod("current");
|
||||
// call it once just to check.
|
||||
localThreadLocalRandomCurrent.invoke(null);
|
||||
} catch (Throwable t) {
|
||||
log.log(Level.FINE, "Can't find TLR, skipping", t);
|
||||
localThreadLocalRandomCurrent = null;
|
||||
}
|
||||
if (localThreadLocalRandomCurrent != null) {
|
||||
withValueCounts = new AtomicLongArray(100);
|
||||
withValueUniqueCounts = new AtomicLongArray(100);
|
||||
getCounts = new AtomicLongArray(100);
|
||||
threadLocalRandomCurrent = localThreadLocalRandomCurrent;
|
||||
} else {
|
||||
withValueCounts = new AtomicLongArray(0);
|
||||
withValueUniqueCounts = new AtomicLongArray(0);
|
||||
getCounts = new AtomicLongArray(0);
|
||||
threadLocalRandomCurrent = null;
|
||||
}
|
||||
}
|
||||
|
||||
private static boolean shouldSample() {
|
||||
if (threadLocalRandomCurrent == null) {
|
||||
return false;
|
||||
}
|
||||
Random r;
|
||||
try {
|
||||
r = (Random) threadLocalRandomCurrent.invoke(null);
|
||||
} catch (Exception e) {
|
||||
log.log(Level.FINE, "Can't get TLR", e);
|
||||
return false;
|
||||
}
|
||||
if (r.nextInt(256) != 0) {
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
// Long chains of contexts are suspicious and usually indicate a misuse of Context.
|
||||
// The threshold is arbitrarily chosen.
|
||||
// VisibleForTesting
|
||||
|
|
@ -316,7 +368,15 @@ public class Context {
|
|||
*
|
||||
*/
|
||||
public <V> Context withValue(Key<V> k1, V v1) {
|
||||
return new Context(this, keyValueEntries.put(k1, v1));
|
||||
PersistentHashArrayMappedTrie<Key<?>, Object> newKeyValueEntries = keyValueEntries.put(k1, v1);
|
||||
if (shouldSample()) {
|
||||
withValueUniqueCounts.addAndGet(
|
||||
Math.min(keyValueEntries.size(), withValueUniqueCounts.length() - 1),
|
||||
newKeyValueEntries.size() - keyValueEntries.size());
|
||||
withValueCounts.incrementAndGet(
|
||||
Math.min(keyValueEntries.size(), withValueCounts.length() - 1));
|
||||
}
|
||||
return new Context(this, newKeyValueEntries);
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
@ -324,7 +384,17 @@ public class Context {
|
|||
* from its parent.
|
||||
*/
|
||||
public <V1, V2> Context withValues(Key<V1> k1, V1 v1, Key<V2> k2, V2 v2) {
|
||||
return new Context(this, keyValueEntries.put(k1, v1).put(k2, v2));
|
||||
PersistentHashArrayMappedTrie<Key<?>, Object> newKeyValueEntries =
|
||||
keyValueEntries.put(k1, v1).put(k2, v2);
|
||||
if (shouldSample()) {
|
||||
withValueUniqueCounts.addAndGet(
|
||||
Math.min(keyValueEntries.size(), withValueUniqueCounts.length() - 1),
|
||||
newKeyValueEntries.size() - keyValueEntries.size());
|
||||
withValueCounts.addAndGet(
|
||||
Math.min(keyValueEntries.size(), withValueCounts.length() - 1),
|
||||
2);
|
||||
}
|
||||
return new Context(this, newKeyValueEntries);
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
@ -332,7 +402,17 @@ public class Context {
|
|||
* from its parent.
|
||||
*/
|
||||
public <V1, V2, V3> Context withValues(Key<V1> k1, V1 v1, Key<V2> k2, V2 v2, Key<V3> k3, V3 v3) {
|
||||
return new Context(this, keyValueEntries.put(k1, v1).put(k2, v2).put(k3, v3));
|
||||
PersistentHashArrayMappedTrie<Key<?>, Object> newKeyValueEntries =
|
||||
keyValueEntries.put(k1, v1).put(k2, v2).put(k3, v3);
|
||||
if (shouldSample()) {
|
||||
withValueUniqueCounts.addAndGet(
|
||||
Math.min(keyValueEntries.size(), withValueUniqueCounts.length() - 1),
|
||||
newKeyValueEntries.size() - keyValueEntries.size());
|
||||
withValueCounts.addAndGet(
|
||||
Math.min(keyValueEntries.size(), withValueCounts.length() - 1),
|
||||
3);
|
||||
}
|
||||
return new Context(this, newKeyValueEntries);
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
@ -341,7 +421,17 @@ public class Context {
|
|||
*/
|
||||
public <V1, V2, V3, V4> Context withValues(Key<V1> k1, V1 v1, Key<V2> k2, V2 v2,
|
||||
Key<V3> k3, V3 v3, Key<V4> k4, V4 v4) {
|
||||
return new Context(this, keyValueEntries.put(k1, v1).put(k2, v2).put(k3, v3).put(k4, v4));
|
||||
PersistentHashArrayMappedTrie<Key<?>, Object> newKeyValueEntries =
|
||||
keyValueEntries.put(k1, v1).put(k2, v2).put(k3, v3).put(k4, v4);
|
||||
if (shouldSample()) {
|
||||
withValueUniqueCounts.addAndGet(
|
||||
Math.min(keyValueEntries.size(), withValueUniqueCounts.length() - 1),
|
||||
newKeyValueEntries.size() - keyValueEntries.size());
|
||||
withValueCounts.addAndGet(
|
||||
Math.min(keyValueEntries.size(), withValueCounts.length() - 1),
|
||||
4);
|
||||
}
|
||||
return new Context(this, newKeyValueEntries);
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
@ -650,6 +740,9 @@ public class Context {
|
|||
* Lookup the value for a key in the context inheritance chain.
|
||||
*/
|
||||
private Object lookup(Key<?> key) {
|
||||
if (shouldSample()) {
|
||||
getCounts.incrementAndGet(Math.min(keyValueEntries.size(), getCounts.length() - 1));
|
||||
}
|
||||
return keyValueEntries.get(key);
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -0,0 +1,36 @@
|
|||
/*
|
||||
* Copyright 2018, gRPC Authors All rights reserved.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package io.grpc;
|
||||
|
||||
import java.util.concurrent.atomic.AtomicLongArray;
|
||||
|
||||
/**
|
||||
* Internal accessor for {@link InternalContext}.
|
||||
*/
|
||||
public final class InternalContext {
|
||||
public static AtomicLongArray getCounts() {
|
||||
return Context.getCounts;
|
||||
}
|
||||
|
||||
public static AtomicLongArray withValueCounts() {
|
||||
return Context.withValueCounts;
|
||||
}
|
||||
|
||||
public static AtomicLongArray withValueUniqueCounts() {
|
||||
return Context.withValueUniqueCounts;
|
||||
}
|
||||
}
|
||||
|
|
@ -40,6 +40,13 @@ final class PersistentHashArrayMappedTrie<K,V> {
|
|||
this.root = root;
|
||||
}
|
||||
|
||||
public int size() {
|
||||
if (root == null) {
|
||||
return 0;
|
||||
}
|
||||
return root.size();
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the value with the specified key, or {@code null} if it does not exist.
|
||||
*/
|
||||
|
|
@ -72,6 +79,11 @@ final class PersistentHashArrayMappedTrie<K,V> {
|
|||
this.value = value;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int size() {
|
||||
return 1;
|
||||
}
|
||||
|
||||
@Override
|
||||
public V get(K key, int hash, int bitsConsumed) {
|
||||
if (this.key == key) {
|
||||
|
|
@ -124,6 +136,11 @@ final class PersistentHashArrayMappedTrie<K,V> {
|
|||
this.values = values;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int size() {
|
||||
return values.length;
|
||||
}
|
||||
|
||||
@Override
|
||||
public V get(K key, int hash, int bitsConsumed) {
|
||||
for (int i = 0; i < keys.length; i++) {
|
||||
|
|
@ -188,10 +205,17 @@ final class PersistentHashArrayMappedTrie<K,V> {
|
|||
|
||||
final int bitmap;
|
||||
final Node<K,V>[] values;
|
||||
private final int size;
|
||||
|
||||
private CompressedIndex(int bitmap, Node<K,V>[] values) {
|
||||
private CompressedIndex(int bitmap, Node<K,V>[] values, int size) {
|
||||
this.bitmap = bitmap;
|
||||
this.values = values;
|
||||
this.size = size;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int size() {
|
||||
return size;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
@ -221,13 +245,16 @@ final class PersistentHashArrayMappedTrie<K,V> {
|
|||
newValues,
|
||||
compressedIndex + 1,
|
||||
values.length - compressedIndex);
|
||||
return new CompressedIndex<K,V>(newBitmap, newValues);
|
||||
return new CompressedIndex<K,V>(newBitmap, newValues, size() + 1);
|
||||
} else {
|
||||
// Replace
|
||||
Node<K,V>[] newValues = Arrays.copyOf(values, values.length);
|
||||
newValues[compressedIndex] =
|
||||
values[compressedIndex].put(key, value, hash, bitsConsumed + BITS);
|
||||
return new CompressedIndex<K,V>(bitmap, newValues);
|
||||
int newSize = size();
|
||||
newSize += newValues[compressedIndex].size();
|
||||
newSize -= values[compressedIndex].size();
|
||||
return new CompressedIndex<K,V>(bitmap, newValues, newSize);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -240,7 +267,7 @@ final class PersistentHashArrayMappedTrie<K,V> {
|
|||
Node<K,V> node = combine(node1, hash1, node2, hash2, bitsConsumed + BITS);
|
||||
@SuppressWarnings("unchecked")
|
||||
Node<K,V>[] values = (Node<K,V>[]) new Node<?,?>[] {node};
|
||||
return new CompressedIndex<K,V>(indexBit1, values);
|
||||
return new CompressedIndex<K,V>(indexBit1, values, node.size());
|
||||
} else {
|
||||
// Make node1 the smallest
|
||||
if (uncompressedIndex(hash1, bitsConsumed) > uncompressedIndex(hash2, bitsConsumed)) {
|
||||
|
|
@ -250,7 +277,7 @@ final class PersistentHashArrayMappedTrie<K,V> {
|
|||
}
|
||||
@SuppressWarnings("unchecked")
|
||||
Node<K,V>[] values = (Node<K,V>[]) new Node<?,?>[] {node1, node2};
|
||||
return new CompressedIndex<K,V>(indexBit1 | indexBit2, values);
|
||||
return new CompressedIndex<K,V>(indexBit1 | indexBit2, values, node1.size() + node2.size());
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -283,5 +310,7 @@ final class PersistentHashArrayMappedTrie<K,V> {
|
|||
V get(K key, int hash, int bitsConsumed);
|
||||
|
||||
Node<K,V> put(K key, V value, int hash, int bitsConsumed);
|
||||
|
||||
int size();
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -41,6 +41,9 @@ public class PersistentHashArrayMappedTrieTest {
|
|||
assertSame(value2, ret.get(key, key.hashCode(), 0));
|
||||
|
||||
assertSame(value1, leaf.get(key, key.hashCode(), 0));
|
||||
|
||||
assertEquals(1, leaf.size());
|
||||
assertEquals(1, ret.size());
|
||||
}
|
||||
|
||||
@Test
|
||||
|
|
@ -57,6 +60,9 @@ public class PersistentHashArrayMappedTrieTest {
|
|||
|
||||
assertSame(value1, leaf.get(key1, key1.hashCode(), 0));
|
||||
assertSame(null, leaf.get(key2, key2.hashCode(), 0));
|
||||
|
||||
assertEquals(1, leaf.size());
|
||||
assertEquals(2, ret.size());
|
||||
}
|
||||
|
||||
@Test
|
||||
|
|
@ -73,6 +79,9 @@ public class PersistentHashArrayMappedTrieTest {
|
|||
|
||||
assertSame(value1, leaf.get(key1, key1.hashCode(), 0));
|
||||
assertSame(null, leaf.get(key2, key2.hashCode(), 0));
|
||||
|
||||
assertEquals(1, leaf.size());
|
||||
assertEquals(2, ret.size());
|
||||
}
|
||||
|
||||
@Test(expected = AssertionError.class)
|
||||
|
|
@ -106,6 +115,9 @@ public class PersistentHashArrayMappedTrieTest {
|
|||
assertSame(value1, leaf.get(key1, key1.hashCode(), 0));
|
||||
assertSame(value2, leaf.get(key2, key2.hashCode(), 0));
|
||||
assertSame(null, leaf.get(insertKey, insertKey.hashCode(), 0));
|
||||
|
||||
assertEquals(2, leaf.size());
|
||||
assertEquals(3, ret.size());
|
||||
}
|
||||
|
||||
@Test
|
||||
|
|
@ -124,6 +136,9 @@ public class PersistentHashArrayMappedTrieTest {
|
|||
|
||||
assertSame(value, leaf.get(key, key.hashCode(), 0));
|
||||
assertSame(originalValue, leaf.get(replaceKey, replaceKey.hashCode(), 0));
|
||||
|
||||
assertEquals(2, leaf.size());
|
||||
assertEquals(2, ret.size());
|
||||
}
|
||||
|
||||
@Test
|
||||
|
|
@ -146,6 +161,9 @@ public class PersistentHashArrayMappedTrieTest {
|
|||
assertSame(value1, leaf.get(key1, key1.hashCode(), 0));
|
||||
assertSame(value2, leaf.get(key2, key2.hashCode(), 0));
|
||||
assertSame(null, leaf.get(key3, key3.hashCode(), 0));
|
||||
|
||||
assertEquals(2, leaf.size());
|
||||
assertEquals(3, ret.size());
|
||||
}
|
||||
|
||||
@Test
|
||||
|
|
@ -166,12 +184,17 @@ public class PersistentHashArrayMappedTrieTest {
|
|||
|
||||
assertSame(value1, ret.get(key1, key1.hashCode(), 0));
|
||||
assertSame(value2, ret.get(key2, key2.hashCode(), 0));
|
||||
|
||||
assertEquals(2, ret.size());
|
||||
}
|
||||
}
|
||||
|
||||
Verifier verifier = new Verifier();
|
||||
verifier.verify(CompressedIndex.combine(leaf1, key1.hashCode(), leaf2, key2.hashCode(), 0));
|
||||
verifier.verify(CompressedIndex.combine(leaf2, key2.hashCode(), leaf1, key1.hashCode(), 0));
|
||||
|
||||
assertEquals(1, leaf1.size());
|
||||
assertEquals(1, leaf2.size());
|
||||
}
|
||||
|
||||
@Test
|
||||
|
|
@ -192,12 +215,17 @@ public class PersistentHashArrayMappedTrieTest {
|
|||
assertEquals((1 << 31) | (1 << 17), collisionLeaf.bitmap);
|
||||
assertSame(value1, ret.get(key1, key1.hashCode(), 0));
|
||||
assertSame(value2, ret.get(key2, key2.hashCode(), 0));
|
||||
|
||||
assertEquals(2, ret.size());
|
||||
}
|
||||
}
|
||||
|
||||
Verifier verifier = new Verifier();
|
||||
verifier.verify(CompressedIndex.combine(leaf1, key1.hashCode(), leaf2, key2.hashCode, 0));
|
||||
verifier.verify(CompressedIndex.combine(leaf2, key2.hashCode(), leaf1, key1.hashCode, 0));
|
||||
|
||||
assertEquals(1, leaf1.size());
|
||||
assertEquals(1, leaf2.size());
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
|||
Loading…
Reference in New Issue