Update pkg and opencensus dep. (#56)

This commit is contained in:
Matt Moore 2019-07-17 15:30:28 -07:00 committed by Knative Prow Robot
parent 194ab54362
commit 191bdc53f9
36 changed files with 1106 additions and 382 deletions

20
Gopkg.lock generated
View File

@ -13,6 +13,14 @@
revision = "775730d6e48254a2430366162cf6298e5368833c"
version = "v0.39.0"
[[projects]]
digest = "1:642cf8e80572f9dc0677b0f241c8ab2e715c9dccc215270ea873c86ddca0062c"
name = "contrib.go.opencensus.io/exporter/prometheus"
packages = ["."]
pruneopts = "NUT"
revision = "f4a2c1e53ec45636355d35fb9022b64e4bdd4a91"
version = "v0.1.0"
[[projects]]
digest = "1:b6eb7c2538ec2999a072c0e372a18d7b7e3aedac249f26e159586fa5f892909f"
name = "contrib.go.opencensus.io/exporter/stackdriver"
@ -363,14 +371,14 @@
version = "v1.0.3"
[[projects]]
digest = "1:69a97603fe8952de86ee1e74a065f7974ec7d7d1d2301d3f5da6d342156363f4"
digest = "1:d2e799c7b52b568fa8ee9e62dbd2c5b0a1a34bf4a6bc43c2dd4b0b35bf52874f"
name = "go.opencensus.io"
packages = [
".",
"exporter/prometheus",
"internal",
"internal/tagencoding",
"metric/metricdata",
"metric/metricexport",
"metric/metricproducer",
"plugin/ocgrpc",
"plugin/ochttp",
@ -386,8 +394,8 @@
"trace/tracestate",
]
pruneopts = "NUT"
revision = "75c0cca22312e51bfd4fafdbe9197ae399e18b38"
version = "v0.20.2"
revision = "9c377598961b706d1542bd2d84d538b5094d596e"
version = "v0.22.0"
[[projects]]
digest = "1:cc9d86ec4e6e3bdf87e3a421273bfeed003cf8e21351c0302fe8b0eb7b10efe6"
@ -930,7 +938,7 @@
[[projects]]
branch = "master"
digest = "1:45a635362a586b2729559eceffcc1ba62dd3f79c0fbb2a666b6f4410f46515ab"
digest = "1:84c8f2d8045716adf46ada32c56b1a6fdd61d0d0b21fa03ca6308c07b1913d7c"
name = "knative.dev/pkg"
packages = [
"apis",
@ -949,7 +957,7 @@
"metrics/metricskey",
]
pruneopts = "T"
revision = "76da19bbc6f409dba9017f60003560e03aeff6e7"
revision = "a3e4b8d9f7bd4bc96c266f751762e194d5efe98f"
[solve-meta]
analyzer-name = "dep"

View File

@ -60,6 +60,11 @@ required = [
# Needed by the stackdriver exporter.
revision = "21591786a5e0c21806209b266cc6dfdfa85b3cdb"
[[override]]
name = "go.opencensus.io"
# Needed because this includes the appropriate version of metricsdata
version = "v0.22.0"
[prune]
go-tests = true
unused-packages = true

View File

@ -0,0 +1,201 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

View File

@ -14,21 +14,20 @@
// Package prometheus contains a Prometheus exporter that supports exporting
// OpenCensus views as Prometheus metrics.
package prometheus // import "go.opencensus.io/exporter/prometheus"
package prometheus // import "contrib.go.opencensus.io/exporter/prometheus"
import (
"bytes"
"fmt"
"log"
"net/http"
"sync"
"go.opencensus.io/internal"
"go.opencensus.io/stats/view"
"go.opencensus.io/tag"
"context"
"github.com/prometheus/client_golang/prometheus"
"github.com/prometheus/client_golang/prometheus/promhttp"
"go.opencensus.io/metric/metricdata"
"go.opencensus.io/metric/metricexport"
"go.opencensus.io/stats/view"
)
// Exporter exports stats to Prometheus, users need
@ -61,39 +60,12 @@ func NewExporter(o Options) (*Exporter, error) {
c: collector,
handler: promhttp.HandlerFor(o.Registry, promhttp.HandlerOpts{}),
}
collector.ensureRegisteredOnce()
return e, nil
}
var _ http.Handler = (*Exporter)(nil)
var _ view.Exporter = (*Exporter)(nil)
func (c *collector) registerViews(views ...*view.View) {
count := 0
for _, view := range views {
sig := viewSignature(c.opts.Namespace, view)
c.registeredViewsMu.Lock()
_, ok := c.registeredViews[sig]
c.registeredViewsMu.Unlock()
if !ok {
desc := prometheus.NewDesc(
viewName(c.opts.Namespace, view),
view.Description,
tagKeysToLabels(view.TagKeys),
c.opts.ConstLabels,
)
c.registeredViewsMu.Lock()
c.registeredViews[sig] = desc
c.registeredViewsMu.Unlock()
count++
}
}
if count == 0 {
return
}
c.ensureRegisteredOnce()
}
// ensureRegisteredOnce invokes reg.Register on the collector itself
// exactly once to ensure that we don't get errors such as
@ -123,11 +95,8 @@ func (o *Options) onError(err error) {
// corresponding Prometheus Metric: SumData will be converted
// to Untyped Metric, CountData will be a Counter Metric,
// DistributionData will be a Histogram Metric.
// Deprecated in lieu of metricexport.Reader interface.
func (e *Exporter) ExportView(vd *view.Data) {
if len(vd.Rows) == 0 {
return
}
e.c.addViewData(vd)
}
// ServeHTTP serves the Prometheus endpoint.
@ -145,151 +114,164 @@ type collector struct {
// reg helps collector register views dynamically.
reg *prometheus.Registry
// viewData are accumulated and atomically
// appended to on every Export invocation, from
// stats. These views are cleared out when
// Collect is invoked and the cycle is repeated.
viewData map[string]*view.Data
registeredViewsMu sync.Mutex
// registeredViews maps a view to a prometheus desc.
registeredViews map[string]*prometheus.Desc
}
func (c *collector) addViewData(vd *view.Data) {
c.registerViews(vd.View)
sig := viewSignature(c.opts.Namespace, vd.View)
c.mu.Lock()
c.viewData[sig] = vd
c.mu.Unlock()
// reader reads metrics from all registered producers.
reader *metricexport.Reader
}
func (c *collector) Describe(ch chan<- *prometheus.Desc) {
c.registeredViewsMu.Lock()
registered := make(map[string]*prometheus.Desc)
for k, desc := range c.registeredViews {
registered[k] = desc
}
c.registeredViewsMu.Unlock()
for _, desc := range registered {
ch <- desc
}
de := &descExporter{c: c, descCh: ch}
c.reader.ReadAndExport(de)
}
// Collect fetches the statistics from OpenCensus
// and delivers them as Prometheus Metrics.
// Collect is invoked everytime a prometheus.Gatherer is run
// Collect is invoked every time a prometheus.Gatherer is run
// for example when the HTTP endpoint is invoked by Prometheus.
func (c *collector) Collect(ch chan<- prometheus.Metric) {
// We need a copy of all the view data up until this point.
viewData := c.cloneViewData()
for _, vd := range viewData {
sig := viewSignature(c.opts.Namespace, vd.View)
c.registeredViewsMu.Lock()
desc := c.registeredViews[sig]
c.registeredViewsMu.Unlock()
for _, row := range vd.Rows {
metric, err := c.toMetric(desc, vd.View, row)
if err != nil {
c.opts.onError(err)
} else {
ch <- metric
}
}
}
}
func (c *collector) toMetric(desc *prometheus.Desc, v *view.View, row *view.Row) (prometheus.Metric, error) {
switch data := row.Data.(type) {
case *view.CountData:
return prometheus.NewConstMetric(desc, prometheus.CounterValue, float64(data.Value), tagValues(row.Tags, v.TagKeys)...)
case *view.DistributionData:
points := make(map[float64]uint64)
// Histograms are cumulative in Prometheus.
// Get cumulative bucket counts.
cumCount := uint64(0)
for i, b := range v.Aggregation.Buckets {
cumCount += uint64(data.CountPerBucket[i])
points[b] = cumCount
}
return prometheus.NewConstHistogram(desc, uint64(data.Count), data.Sum(), points, tagValues(row.Tags, v.TagKeys)...)
case *view.SumData:
return prometheus.NewConstMetric(desc, prometheus.UntypedValue, data.Value, tagValues(row.Tags, v.TagKeys)...)
case *view.LastValueData:
return prometheus.NewConstMetric(desc, prometheus.GaugeValue, data.Value, tagValues(row.Tags, v.TagKeys)...)
default:
return nil, fmt.Errorf("aggregation %T is not yet supported", v.Aggregation)
}
}
func tagKeysToLabels(keys []tag.Key) (labels []string) {
for _, key := range keys {
labels = append(labels, internal.Sanitize(key.Name()))
}
return labels
me := &metricExporter{c: c, metricCh: ch}
c.reader.ReadAndExport(me)
}
func newCollector(opts Options, registrar *prometheus.Registry) *collector {
return &collector{
reg: registrar,
opts: opts,
registeredViews: make(map[string]*prometheus.Desc),
viewData: make(map[string]*view.Data),
}
reg: registrar,
opts: opts,
reader: metricexport.NewReader()}
}
func tagValues(t []tag.Tag, expectedKeys []tag.Key) []string {
var values []string
// Add empty string for all missing keys in the tags map.
idx := 0
for _, t := range t {
for t.Key != expectedKeys[idx] {
idx++
values = append(values, "")
func (c *collector) toDesc(metric *metricdata.Metric) *prometheus.Desc {
return prometheus.NewDesc(
metricName(c.opts.Namespace, metric),
metric.Descriptor.Description,
toPromLabels(metric.Descriptor.LabelKeys),
c.opts.ConstLabels)
}
type metricExporter struct {
c *collector
metricCh chan<- prometheus.Metric
}
// ExportMetrics exports to the Prometheus.
// Each OpenCensus Metric will be converted to
// corresponding Prometheus Metric:
// TypeCumulativeInt64 and TypeCumulativeFloat64 will be a Counter Metric,
// TypeCumulativeDistribution will be a Histogram Metric.
// TypeGaugeFloat64 and TypeGaugeInt64 will be a Gauge Metric
func (me *metricExporter) ExportMetrics(ctx context.Context, metrics []*metricdata.Metric) error {
for _, metric := range metrics {
desc := me.c.toDesc(metric)
for _, ts := range metric.TimeSeries {
tvs := toLabelValues(ts.LabelValues)
for _, point := range ts.Points {
metric, err := toPromMetric(desc, metric, point, tvs)
if err != nil {
me.c.opts.onError(err)
} else if metric != nil {
me.metricCh <- metric
}
}
}
values = append(values, t.Value)
idx++
}
for idx < len(expectedKeys) {
idx++
values = append(values, "")
}
return values
return nil
}
func viewName(namespace string, v *view.View) string {
type descExporter struct {
c *collector
descCh chan<- *prometheus.Desc
}
// ExportMetrics exports descriptor to the Prometheus.
// It is invoked when request to scrape descriptors is received.
func (me *descExporter) ExportMetrics(ctx context.Context, metrics []*metricdata.Metric) error {
for _, metric := range metrics {
desc := me.c.toDesc(metric)
me.descCh <- desc
}
return nil
}
func toPromLabels(mls []metricdata.LabelKey) (labels []string) {
for _, ml := range mls {
labels = append(labels, sanitize(ml.Key))
}
return labels
}
func metricName(namespace string, m *metricdata.Metric) string {
var name string
if namespace != "" {
name = namespace + "_"
}
return name + internal.Sanitize(v.Name)
return name + sanitize(m.Descriptor.Name)
}
func viewSignature(namespace string, v *view.View) string {
var buf bytes.Buffer
buf.WriteString(viewName(namespace, v))
for _, k := range v.TagKeys {
buf.WriteString("-" + k.Name())
func toPromMetric(
desc *prometheus.Desc,
metric *metricdata.Metric,
point metricdata.Point,
labelValues []string) (prometheus.Metric, error) {
switch metric.Descriptor.Type {
case metricdata.TypeCumulativeFloat64, metricdata.TypeCumulativeInt64:
pv, err := toPromValue(point)
if err != nil {
return nil, err
}
return prometheus.NewConstMetric(desc, prometheus.CounterValue, pv, labelValues...)
case metricdata.TypeGaugeFloat64, metricdata.TypeGaugeInt64:
pv, err := toPromValue(point)
if err != nil {
return nil, err
}
return prometheus.NewConstMetric(desc, prometheus.GaugeValue, pv, labelValues...)
case metricdata.TypeCumulativeDistribution:
switch v := point.Value.(type) {
case *metricdata.Distribution:
points := make(map[float64]uint64)
// Histograms are cumulative in Prometheus.
// Get cumulative bucket counts.
cumCount := uint64(0)
for i, b := range v.BucketOptions.Bounds {
cumCount += uint64(v.Buckets[i].Count)
points[b] = cumCount
}
return prometheus.NewConstHistogram(desc, uint64(v.Count), v.Sum, points, labelValues...)
default:
return nil, typeMismatchError(point)
}
case metricdata.TypeSummary:
// TODO: [rghetia] add support for TypeSummary.
return nil, nil
default:
return nil, fmt.Errorf("aggregation %T is not yet supported", metric.Descriptor.Type)
}
return buf.String()
}
func (c *collector) cloneViewData() map[string]*view.Data {
c.mu.Lock()
defer c.mu.Unlock()
viewDataCopy := make(map[string]*view.Data)
for sig, viewData := range c.viewData {
viewDataCopy[sig] = viewData
func toLabelValues(labelValues []metricdata.LabelValue) (values []string) {
for _, lv := range labelValues {
if lv.Present {
values = append(values, lv.Value)
} else {
values = append(values, "")
}
}
return values
}
func typeMismatchError(point metricdata.Point) error {
return fmt.Errorf("point type %T does not match metric type", point)
}
func toPromValue(point metricdata.Point) (float64, error) {
switch v := point.Value.(type) {
case float64:
return v, nil
case int64:
return float64(v), nil
default:
return 0.0, typeMismatchError(point)
}
return viewDataCopy
}

View File

@ -0,0 +1,50 @@
// Copyright 2017, OpenCensus Authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package prometheus
import (
"strings"
"unicode"
)
const labelKeySizeLimit = 100
// sanitize returns a string that is trunacated to 100 characters if it's too
// long, and replaces non-alphanumeric characters to underscores.
func sanitize(s string) string {
if len(s) == 0 {
return s
}
if len(s) > labelKeySizeLimit {
s = s[:labelKeySizeLimit]
}
s = strings.Map(sanitizeRune, s)
if unicode.IsDigit(rune(s[0])) {
s = "key_" + s
}
if s[0] == '_' {
s = "key" + s
}
return s
}
// converts anything that is not a letter or digit to an underscore
func sanitizeRune(r rune) rune {
if unicode.IsLetter(r) || unicode.IsDigit(r) {
return r
}
// Everything else turns into an underscore
return '_'
}

View File

@ -18,6 +18,11 @@ import (
"time"
)
// Exemplars keys.
const (
AttachmentKeySpanContext = "SpanContext"
)
// Exemplar is an example data point associated with each bucket of a
// distribution type aggregation.
//

View File

@ -14,6 +14,13 @@
package metricdata
// LabelKey represents key of a label. It has optional
// description attribute.
type LabelKey struct {
Key string
Description string
}
// LabelValue represents the value of a label.
// The zero value represents a missing label value, which may be treated
// differently to an empty string value by some back ends.

View File

@ -22,11 +22,11 @@ import (
// Descriptor holds metadata about a metric.
type Descriptor struct {
Name string // full name of the metric
Description string // human-readable description
Unit Unit // units for the measure
Type Type // type of measure
LabelKeys []string // label keys
Name string // full name of the metric
Description string // human-readable description
Unit Unit // units for the measure
Type Type // type of measure
LabelKeys []LabelKey // label keys
}
// Metric represents a quantity measured against a resource with different

19
vendor/go.opencensus.io/metric/metricexport/doc.go generated vendored Normal file
View File

@ -0,0 +1,19 @@
// Copyright 2018, OpenCensus Authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// Package metricexport contains support for exporting metric data.
//
// This is an EXPERIMENTAL package, and may change in arbitrary ways without
// notice.
package metricexport // import "go.opencensus.io/metric/metricexport"

26
vendor/go.opencensus.io/metric/metricexport/export.go generated vendored Normal file
View File

@ -0,0 +1,26 @@
// Copyright 2019, OpenCensus Authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package metricexport
import (
"context"
"go.opencensus.io/metric/metricdata"
)
// Exporter is an interface that exporters implement to export the metric data.
type Exporter interface {
ExportMetrics(ctx context.Context, data []*metricdata.Metric) error
}

187
vendor/go.opencensus.io/metric/metricexport/reader.go generated vendored Normal file
View File

@ -0,0 +1,187 @@
// Copyright 2019, OpenCensus Authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
package metricexport
import (
"context"
"fmt"
"sync"
"time"
"go.opencensus.io/metric/metricdata"
"go.opencensus.io/metric/metricproducer"
"go.opencensus.io/trace"
)
var (
defaultSampler = trace.ProbabilitySampler(0.0001)
errReportingIntervalTooLow = fmt.Errorf("reporting interval less than %d", minimumReportingDuration)
errAlreadyStarted = fmt.Errorf("already started")
errIntervalReaderNil = fmt.Errorf("interval reader is nil")
errExporterNil = fmt.Errorf("exporter is nil")
errReaderNil = fmt.Errorf("reader is nil")
)
const (
defaultReportingDuration = 60 * time.Second
minimumReportingDuration = 1 * time.Second
defaultSpanName = "ExportMetrics"
)
// ReaderOptions contains options pertaining to metrics reader.
type ReaderOptions struct {
// SpanName is the name used for span created to export metrics.
SpanName string
}
// Reader reads metrics from all producers registered
// with producer manager and exports those metrics using provided
// exporter.
type Reader struct {
sampler trace.Sampler
spanName string
}
// IntervalReader periodically reads metrics from all producers registered
// with producer manager and exports those metrics using provided
// exporter. Call Reader.Stop() to stop the reader.
type IntervalReader struct {
// ReportingInterval it the time duration between two consecutive
// metrics reporting. defaultReportingDuration is used if it is not set.
// It cannot be set lower than minimumReportingDuration.
ReportingInterval time.Duration
exporter Exporter
timer *time.Ticker
quit, done chan bool
mu sync.RWMutex
reader *Reader
}
// ReaderOption apply changes to ReaderOptions.
type ReaderOption func(*ReaderOptions)
// WithSpanName makes new reader to use given span name when exporting metrics.
func WithSpanName(spanName string) ReaderOption {
return func(o *ReaderOptions) {
o.SpanName = spanName
}
}
// NewReader returns a reader configured with specified options.
func NewReader(o ...ReaderOption) *Reader {
var opts ReaderOptions
for _, op := range o {
op(&opts)
}
reader := &Reader{defaultSampler, defaultSpanName}
if opts.SpanName != "" {
reader.spanName = opts.SpanName
}
return reader
}
// NewIntervalReader creates a reader. Once started it periodically
// reads metrics from all producers and exports them using provided exporter.
func NewIntervalReader(reader *Reader, exporter Exporter) (*IntervalReader, error) {
if exporter == nil {
return nil, errExporterNil
}
if reader == nil {
return nil, errReaderNil
}
r := &IntervalReader{
exporter: exporter,
reader: reader,
}
return r, nil
}
// Start starts the IntervalReader which periodically reads metrics from all
// producers registered with global producer manager. If the reporting interval
// is not set prior to calling this function then default reporting interval
// is used.
func (ir *IntervalReader) Start() error {
if ir == nil {
return errIntervalReaderNil
}
ir.mu.Lock()
defer ir.mu.Unlock()
var reportingInterval = defaultReportingDuration
if ir.ReportingInterval != 0 {
if ir.ReportingInterval < minimumReportingDuration {
return errReportingIntervalTooLow
}
reportingInterval = ir.ReportingInterval
}
if ir.done != nil {
return errAlreadyStarted
}
ir.timer = time.NewTicker(reportingInterval)
ir.quit = make(chan bool)
ir.done = make(chan bool)
go ir.startInternal()
return nil
}
func (ir *IntervalReader) startInternal() {
for {
select {
case <-ir.timer.C:
ir.reader.ReadAndExport(ir.exporter)
case <-ir.quit:
ir.timer.Stop()
ir.done <- true
return
}
}
}
// Stop stops the reader from reading and exporting metrics.
// Additional call to Stop are no-ops.
func (ir *IntervalReader) Stop() {
if ir == nil {
return
}
ir.mu.Lock()
defer ir.mu.Unlock()
if ir.quit == nil {
return
}
ir.quit <- true
<-ir.done
close(ir.quit)
close(ir.done)
ir.quit = nil
}
// ReadAndExport reads metrics from all producer registered with
// producer manager and then exports them using provided exporter.
func (r *Reader) ReadAndExport(exporter Exporter) {
ctx, span := trace.StartSpan(context.Background(), r.spanName, trace.WithSampler(r.sampler))
defer span.End()
producers := metricproducer.GlobalManager().GetAll()
data := []*metricdata.Metric{}
for _, producer := range producers {
data = append(data, producer.Read()...)
}
// TODO: [rghetia] add metrics for errors.
exporter.ExportMetrics(ctx, data)
}

View File

@ -17,5 +17,5 @@ package opencensus // import "go.opencensus.io"
// Version is the current release version of OpenCensus in use.
func Version() string {
return "0.21.0"
return "0.22.0"
}

View File

@ -15,8 +15,8 @@
package ocgrpc
import (
"context"
"go.opencensus.io/trace"
"golang.org/x/net/context"
"google.golang.org/grpc/stats"
)

View File

@ -18,8 +18,8 @@ package ocgrpc
import (
"time"
"context"
"go.opencensus.io/tag"
"golang.org/x/net/context"
"google.golang.org/grpc/grpclog"
"google.golang.org/grpc/stats"
)

View File

@ -15,8 +15,8 @@
package ocgrpc
import (
"context"
"go.opencensus.io/trace"
"golang.org/x/net/context"
"google.golang.org/grpc/stats"
)

View File

@ -18,7 +18,7 @@ package ocgrpc
import (
"time"
"golang.org/x/net/context"
"context"
"go.opencensus.io/tag"
"google.golang.org/grpc/grpclog"

View File

@ -22,9 +22,11 @@ import (
"sync/atomic"
"time"
"go.opencensus.io/metric/metricdata"
ocstats "go.opencensus.io/stats"
"go.opencensus.io/stats/view"
"go.opencensus.io/tag"
"go.opencensus.io/trace"
"google.golang.org/grpc/codes"
"google.golang.org/grpc/grpclog"
"google.golang.org/grpc/stats"
@ -141,27 +143,31 @@ func handleRPCEnd(ctx context.Context, s *stats.End) {
}
latencyMillis := float64(elapsedTime) / float64(time.Millisecond)
attachments := getSpanCtxAttachment(ctx)
if s.Client {
ocstats.RecordWithTags(ctx,
[]tag.Mutator{
ocstats.RecordWithOptions(ctx,
ocstats.WithTags(
tag.Upsert(KeyClientMethod, methodName(d.method)),
tag.Upsert(KeyClientStatus, st),
},
ClientSentBytesPerRPC.M(atomic.LoadInt64(&d.sentBytes)),
ClientSentMessagesPerRPC.M(atomic.LoadInt64(&d.sentCount)),
ClientReceivedMessagesPerRPC.M(atomic.LoadInt64(&d.recvCount)),
ClientReceivedBytesPerRPC.M(atomic.LoadInt64(&d.recvBytes)),
ClientRoundtripLatency.M(latencyMillis))
tag.Upsert(KeyClientStatus, st)),
ocstats.WithAttachments(attachments),
ocstats.WithMeasurements(
ClientSentBytesPerRPC.M(atomic.LoadInt64(&d.sentBytes)),
ClientSentMessagesPerRPC.M(atomic.LoadInt64(&d.sentCount)),
ClientReceivedMessagesPerRPC.M(atomic.LoadInt64(&d.recvCount)),
ClientReceivedBytesPerRPC.M(atomic.LoadInt64(&d.recvBytes)),
ClientRoundtripLatency.M(latencyMillis)))
} else {
ocstats.RecordWithTags(ctx,
[]tag.Mutator{
ocstats.RecordWithOptions(ctx,
ocstats.WithTags(
tag.Upsert(KeyServerStatus, st),
},
ServerSentBytesPerRPC.M(atomic.LoadInt64(&d.sentBytes)),
ServerSentMessagesPerRPC.M(atomic.LoadInt64(&d.sentCount)),
ServerReceivedMessagesPerRPC.M(atomic.LoadInt64(&d.recvCount)),
ServerReceivedBytesPerRPC.M(atomic.LoadInt64(&d.recvBytes)),
ServerLatency.M(latencyMillis))
),
ocstats.WithAttachments(attachments),
ocstats.WithMeasurements(
ServerSentBytesPerRPC.M(atomic.LoadInt64(&d.sentBytes)),
ServerSentMessagesPerRPC.M(atomic.LoadInt64(&d.sentCount)),
ServerReceivedMessagesPerRPC.M(atomic.LoadInt64(&d.recvCount)),
ServerReceivedBytesPerRPC.M(atomic.LoadInt64(&d.recvBytes)),
ServerLatency.M(latencyMillis)))
}
}
@ -206,3 +212,16 @@ func statusCodeToString(s *status.Status) string {
return "CODE_" + strconv.FormatInt(int64(c), 10)
}
}
func getSpanCtxAttachment(ctx context.Context) metricdata.Attachments {
attachments := map[string]interface{}{}
span := trace.FromContext(ctx)
if span == nil {
return attachments
}
spanCtx := span.SpanContext()
if spanCtx.IsSampled() {
attachments[metricdata.AttachmentKeySpanContext] = spanCtx
}
return attachments
}

View File

@ -19,9 +19,9 @@ import (
"google.golang.org/grpc/codes"
"context"
"go.opencensus.io/trace"
"go.opencensus.io/trace/propagation"
"golang.org/x/net/context"
"google.golang.org/grpc/metadata"
"google.golang.org/grpc/stats"
"google.golang.org/grpc/status"

View File

@ -124,6 +124,12 @@ func (h *Handler) startTrace(w http.ResponseWriter, r *http.Request) (*http.Requ
}
}
span.AddAttributes(requestAttrs(r)...)
if r.Body == nil {
// TODO: Handle cases where ContentLength is not set.
} else if r.ContentLength > 0 {
span.AddMessageReceiveEvent(0, /* TODO: messageID */
int64(r.ContentLength), -1)
}
return r.WithContext(ctx), span.End
}
@ -201,6 +207,9 @@ func (t *trackingResponseWriter) Header() http.Header {
func (t *trackingResponseWriter) Write(data []byte) (int, error) {
n, err := t.writer.Write(data)
t.respSize += int64(n)
// Add message event for request bytes sent.
span := trace.FromContext(t.ctx)
span.AddMessageSendEvent(0 /* TODO: messageID */, int64(n), -1)
return n, err
}

View File

@ -18,6 +18,7 @@ package stats
import (
"context"
"go.opencensus.io/metric/metricdata"
"go.opencensus.io/stats/internal"
"go.opencensus.io/tag"
)
@ -30,28 +31,48 @@ func init() {
}
}
type recordOptions struct {
attachments metricdata.Attachments
mutators []tag.Mutator
measurements []Measurement
}
// WithAttachments applies provided exemplar attachments.
func WithAttachments(attachments metricdata.Attachments) Options {
return func(ro *recordOptions) {
ro.attachments = attachments
}
}
// WithTags applies provided tag mutators.
func WithTags(mutators ...tag.Mutator) Options {
return func(ro *recordOptions) {
ro.mutators = mutators
}
}
// WithMeasurements applies provided measurements.
func WithMeasurements(measurements ...Measurement) Options {
return func(ro *recordOptions) {
ro.measurements = measurements
}
}
// Options apply changes to recordOptions.
type Options func(*recordOptions)
func createRecordOption(ros ...Options) *recordOptions {
o := &recordOptions{}
for _, ro := range ros {
ro(o)
}
return o
}
// Record records one or multiple measurements with the same context at once.
// If there are any tags in the context, measurements will be tagged with them.
func Record(ctx context.Context, ms ...Measurement) {
recorder := internal.DefaultRecorder
if recorder == nil {
return
}
if len(ms) == 0 {
return
}
record := false
for _, m := range ms {
if m.desc.subscribed() {
record = true
break
}
}
if !record {
return
}
// TODO(songy23): fix attachments.
recorder(tag.FromContext(ctx), ms, map[string]interface{}{})
RecordWithOptions(ctx, WithMeasurements(ms...))
}
// RecordWithTags records one or multiple measurements at once.
@ -60,10 +81,37 @@ func Record(ctx context.Context, ms ...Measurement) {
// RecordWithTags is useful if you want to record with tag mutations but don't want
// to propagate the mutations in the context.
func RecordWithTags(ctx context.Context, mutators []tag.Mutator, ms ...Measurement) error {
ctx, err := tag.New(ctx, mutators...)
if err != nil {
return err
return RecordWithOptions(ctx, WithTags(mutators...), WithMeasurements(ms...))
}
// RecordWithOptions records measurements from the given options (if any) against context
// and tags and attachments in the options (if any).
// If there are any tags in the context, measurements will be tagged with them.
func RecordWithOptions(ctx context.Context, ros ...Options) error {
o := createRecordOption(ros...)
if len(o.measurements) == 0 {
return nil
}
Record(ctx, ms...)
recorder := internal.DefaultRecorder
if recorder == nil {
return nil
}
record := false
for _, m := range o.measurements {
if m.desc.subscribed() {
record = true
break
}
}
if !record {
return nil
}
if len(o.mutators) > 0 {
var err error
if ctx, err = tag.New(ctx, o.mutators...); err != nil {
return err
}
}
recorder(tag.FromContext(ctx), o.measurements, o.attachments)
return nil
}

View File

@ -73,10 +73,10 @@ func getType(v *View) metricdata.Type {
}
}
func getLableKeys(v *View) []string {
labelKeys := []string{}
func getLabelKeys(v *View) []metricdata.LabelKey {
labelKeys := []metricdata.LabelKey{}
for _, k := range v.TagKeys {
labelKeys = append(labelKeys, k.Name())
labelKeys = append(labelKeys, metricdata.LabelKey{Key: k.Name()})
}
return labelKeys
}
@ -87,14 +87,23 @@ func viewToMetricDescriptor(v *View) *metricdata.Descriptor {
Description: v.Description,
Unit: getUnit(v.Measure.Unit()),
Type: getType(v),
LabelKeys: getLableKeys(v),
LabelKeys: getLabelKeys(v),
}
}
func toLabelValues(row *Row) []metricdata.LabelValue {
func toLabelValues(row *Row, expectedKeys []metricdata.LabelKey) []metricdata.LabelValue {
labelValues := []metricdata.LabelValue{}
tagMap := make(map[string]string)
for _, tag := range row.Tags {
labelValues = append(labelValues, metricdata.NewLabelValue(tag.Value))
tagMap[tag.Key.Name()] = tag.Value
}
for _, key := range expectedKeys {
if val, ok := tagMap[key.Key]; ok {
labelValues = append(labelValues, metricdata.NewLabelValue(val))
} else {
labelValues = append(labelValues, metricdata.LabelValue{})
}
}
return labelValues
}
@ -102,7 +111,7 @@ func toLabelValues(row *Row) []metricdata.LabelValue {
func rowToTimeseries(v *viewInternal, row *Row, now time.Time, startTime time.Time) *metricdata.TimeSeries {
return &metricdata.TimeSeries{
Points: []metricdata.Point{row.Data.toPoint(v.metricDescriptor.Type, now)},
LabelValues: toLabelValues(row),
LabelValues: toLabelValues(row, v.metricDescriptor.LabelKeys),
StartTime: startTime,
}
}

View File

@ -236,6 +236,8 @@ func (w *worker) reportView(v *viewInternal, now time.Time) {
}
func (w *worker) reportUsage(now time.Time) {
w.mu.Lock()
defer w.mu.Unlock()
for _, v := range w.views {
w.reportView(v, now)
}

View File

@ -121,6 +121,8 @@ type retrieveDataResp struct {
}
func (cmd *retrieveDataReq) handleCommand(w *worker) {
w.mu.Lock()
defer w.mu.Unlock()
vi, ok := w.views[cmd.v]
if !ok {
cmd.c <- &retrieveDataResp{
@ -153,6 +155,8 @@ type recordReq struct {
}
func (cmd *recordReq) handleCommand(w *worker) {
w.mu.Lock()
defer w.mu.Unlock()
for _, m := range cmd.ms {
if (m == stats.Measurement{}) { // not registered
continue

10
vendor/go.opencensus.io/tag/key.go generated vendored
View File

@ -29,6 +29,16 @@ func NewKey(name string) (Key, error) {
return Key{name: name}, nil
}
// MustNewKey creates or retrieves a string key identified by name.
// An invalid key name raises a panic.
func MustNewKey(name string) Key {
k, err := NewKey(name)
if err != nil {
panic(err)
}
return k
}
// Name returns the name of the key.
func (k Key) Name() string {
return k.name

66
vendor/go.opencensus.io/tag/map.go generated vendored
View File

@ -28,10 +28,15 @@ type Tag struct {
Value string
}
type tagContent struct {
value string
m metadatas
}
// Map is a map of tags. Use New to create a context containing
// a new Map.
type Map struct {
m map[Key]string
m map[Key]tagContent
}
// Value returns the value for the key if a value for the key exists.
@ -40,7 +45,7 @@ func (m *Map) Value(k Key) (string, bool) {
return "", false
}
v, ok := m.m[k]
return v, ok
return v.value, ok
}
func (m *Map) String() string {
@ -62,21 +67,21 @@ func (m *Map) String() string {
return buffer.String()
}
func (m *Map) insert(k Key, v string) {
func (m *Map) insert(k Key, v string, md metadatas) {
if _, ok := m.m[k]; ok {
return
}
m.m[k] = v
m.m[k] = tagContent{value: v, m: md}
}
func (m *Map) update(k Key, v string) {
func (m *Map) update(k Key, v string, md metadatas) {
if _, ok := m.m[k]; ok {
m.m[k] = v
m.m[k] = tagContent{value: v, m: md}
}
}
func (m *Map) upsert(k Key, v string) {
m.m[k] = v
func (m *Map) upsert(k Key, v string, md metadatas) {
m.m[k] = tagContent{value: v, m: md}
}
func (m *Map) delete(k Key) {
@ -84,7 +89,7 @@ func (m *Map) delete(k Key) {
}
func newMap() *Map {
return &Map{m: make(map[Key]string)}
return &Map{m: make(map[Key]tagContent)}
}
// Mutator modifies a tag map.
@ -95,13 +100,17 @@ type Mutator interface {
// Insert returns a mutator that inserts a
// value associated with k. If k already exists in the tag map,
// mutator doesn't update the value.
func Insert(k Key, v string) Mutator {
// Metadata applies metadata to the tag. It is optional.
// Metadatas are applied in the order in which it is provided.
// If more than one metadata updates the same attribute then
// the update from the last metadata prevails.
func Insert(k Key, v string, mds ...Metadata) Mutator {
return &mutator{
fn: func(m *Map) (*Map, error) {
if !checkValue(v) {
return nil, errInvalidValue
}
m.insert(k, v)
m.insert(k, v, createMetadatas(mds...))
return m, nil
},
}
@ -110,13 +119,17 @@ func Insert(k Key, v string) Mutator {
// Update returns a mutator that updates the
// value of the tag associated with k with v. If k doesn't
// exists in the tag map, the mutator doesn't insert the value.
func Update(k Key, v string) Mutator {
// Metadata applies metadata to the tag. It is optional.
// Metadatas are applied in the order in which it is provided.
// If more than one metadata updates the same attribute then
// the update from the last metadata prevails.
func Update(k Key, v string, mds ...Metadata) Mutator {
return &mutator{
fn: func(m *Map) (*Map, error) {
if !checkValue(v) {
return nil, errInvalidValue
}
m.update(k, v)
m.update(k, v, createMetadatas(mds...))
return m, nil
},
}
@ -126,18 +139,37 @@ func Update(k Key, v string) Mutator {
// value of the tag associated with k with v. It inserts the
// value if k doesn't exist already. It mutates the value
// if k already exists.
func Upsert(k Key, v string) Mutator {
// Metadata applies metadata to the tag. It is optional.
// Metadatas are applied in the order in which it is provided.
// If more than one metadata updates the same attribute then
// the update from the last metadata prevails.
func Upsert(k Key, v string, mds ...Metadata) Mutator {
return &mutator{
fn: func(m *Map) (*Map, error) {
if !checkValue(v) {
return nil, errInvalidValue
}
m.upsert(k, v)
m.upsert(k, v, createMetadatas(mds...))
return m, nil
},
}
}
func createMetadatas(mds ...Metadata) metadatas {
var metas metadatas
if len(mds) > 0 {
for _, md := range mds {
if md != nil {
md(&metas)
}
}
} else {
WithTTL(TTLUnlimitedPropagation)(&metas)
}
return metas
}
// Delete returns a mutator that deletes
// the value associated with k.
func Delete(k Key) Mutator {
@ -160,10 +192,10 @@ func New(ctx context.Context, mutator ...Mutator) (context.Context, error) {
if !checkKeyName(k.Name()) {
return ctx, fmt.Errorf("key:%q: %v", k, errInvalidKeyName)
}
if !checkValue(v) {
if !checkValue(v.value) {
return ctx, fmt.Errorf("key:%q value:%q: %v", k.Name(), v, errInvalidValue)
}
m.insert(k, v)
m.insert(k, v.value, v.m)
}
}
var err error

View File

@ -170,9 +170,11 @@ func Encode(m *Map) []byte {
}
eg.writeByte(byte(tagsVersionID))
for k, v := range m.m {
eg.writeByte(byte(keyTypeString))
eg.writeStringWithVarintLen(k.name)
eg.writeBytesWithVarintLen([]byte(v))
if v.m.ttl.ttl == valueTTLUnlimitedPropagation {
eg.writeByte(byte(keyTypeString))
eg.writeStringWithVarintLen(k.name)
eg.writeBytesWithVarintLen([]byte(v.value))
}
}
return eg.bytes()
}
@ -190,7 +192,7 @@ func Decode(bytes []byte) (*Map, error) {
// DecodeEach decodes the given serialized tag map, calling handler for each
// tag key and value decoded.
func DecodeEach(bytes []byte, fn func(key Key, val string)) error {
func DecodeEach(bytes []byte, fn func(key Key, val string, md metadatas)) error {
eg := &encoderGRPC{
buf: bytes,
}
@ -228,7 +230,7 @@ func DecodeEach(bytes []byte, fn func(key Key, val string)) error {
if !checkValue(val) {
return errInvalidValue
}
fn(key, val)
fn(key, val, createMetadatas(WithTTL(TTLUnlimitedPropagation)))
if err != nil {
return err
}

52
vendor/go.opencensus.io/tag/metadata.go generated vendored Normal file
View File

@ -0,0 +1,52 @@
// Copyright 2019, OpenCensus Authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
package tag
const (
// valueTTLNoPropagation prevents tag from propagating.
valueTTLNoPropagation = 0
// valueTTLUnlimitedPropagation allows tag to propagate without any limits on number of hops.
valueTTLUnlimitedPropagation = -1
)
// TTL is metadata that specifies number of hops a tag can propagate.
// Details about TTL metadata is specified at https://github.com/census-instrumentation/opencensus-specs/blob/master/tags/TagMap.md#tagmetadata
type TTL struct {
ttl int
}
var (
// TTLUnlimitedPropagation is TTL metadata that allows tag to propagate without any limits on number of hops.
TTLUnlimitedPropagation = TTL{ttl: valueTTLUnlimitedPropagation}
// TTLNoPropagation is TTL metadata that prevents tag from propagating.
TTLNoPropagation = TTL{ttl: valueTTLNoPropagation}
)
type metadatas struct {
ttl TTL
}
// Metadata applies metadatas specified by the function.
type Metadata func(*metadatas)
// WithTTL applies metadata with provided ttl.
func WithTTL(ttl TTL) Metadata {
return func(m *metadatas) {
m.ttl = ttl
}
}

View File

@ -25,7 +25,7 @@ func do(ctx context.Context, f func(ctx context.Context)) {
m := FromContext(ctx)
keyvals := make([]string, 0, 2*len(m.m))
for k, v := range m.m {
keyvals = append(keyvals, k.Name(), v)
keyvals = append(keyvals, k.Name(), v.value)
}
pprof.Do(ctx, pprof.Labels(keyvals...), f)
}

286
vendor/knative.dev/pkg/Gopkg.lock generated vendored
View File

@ -2,7 +2,7 @@
[[projects]]
digest = "1:ef8da480a66d7e8e9819261c3526685601b573e0005e84b75e47548d82021a7d"
digest = "1:5f43842d8fe08b43ada82d57e48a844800b4163d1150f7f451e81cb347fccb72"
name = "cloud.google.com/go"
packages = [
"compute/metadata",
@ -12,30 +12,38 @@
"trace/apiv2",
]
pruneopts = "NUT"
revision = "0ebda48a7f143b1cce9eb37a8c1106ac762a3430"
version = "v0.34.0"
revision = "cf81fad90a1a1de334c4fc27e23eb9a4224b627a"
version = "v0.41.0"
[[projects]]
digest = "1:43fbf05ea84c860a4e86b557d156b1e72511cd29375d3f71adb522362710aea7"
digest = "1:642cf8e80572f9dc0677b0f241c8ab2e715c9dccc215270ea873c86ddca0062c"
name = "contrib.go.opencensus.io/exporter/prometheus"
packages = ["."]
pruneopts = "NUT"
revision = "f4a2c1e53ec45636355d35fb9022b64e4bdd4a91"
version = "v0.1.0"
[[projects]]
digest = "1:83bd9ccdcc61bb43f45e4336cf9622849b5a867ef137f8b53303968202970225"
name = "contrib.go.opencensus.io/exporter/stackdriver"
packages = [
".",
"monitoredresource",
]
pruneopts = "NUT"
revision = "ab5a58af316a529613aadf9f50eeed1b6f044b2f"
version = "v0.9.2"
revision = "68e3d742b03c099c35428443886e65d9587c8d76"
version = "v0.12.2"
[[projects]]
branch = "master"
digest = "1:cef70b547ce62d12ea8e5dcb9905bccb57ea1bb253ee6809fd79a17c29ca3cd5"
name = "contrib.go.opencensus.io/resource"
packages = ["resourcekeys"]
digest = "1:7b5f423f5b0dd3dfa32a19a6183b0ab9129bff371ebf3f9efae32f87e4986d8f"
name = "contrib.go.opencensus.io/exporter/zipkin"
packages = ["."]
pruneopts = "NUT"
revision = "21591786a5e0c21806209b266cc6dfdfa85b3cdb"
revision = "30f9fad5db2c8944c21d223496e2543aeb445d4c"
version = "v0.1.1"
[[projects]]
digest = "1:4a31397b1b81c6856aab6d2d963a727b4235af18adaaedc2cc51646ae812f683"
digest = "1:acf5b7756eca7cd8133461c44771fd318ee2bef31d4cc013551165473a984ba8"
name = "github.com/aws/aws-sdk-go"
packages = [
"aws",
@ -62,6 +70,7 @@
"internal/sdkuri",
"internal/shareddefaults",
"private/protocol",
"private/protocol/json/jsonutil",
"private/protocol/query",
"private/protocol/query/queryutil",
"private/protocol/rest",
@ -69,19 +78,19 @@
"service/sts",
]
pruneopts = "NUT"
revision = "3991042237b45cf58c9d5f34295942d5533c28c6"
version = "v1.16.11"
revision = "420cda5d6383f94f7d9c231aa44bad3325181950"
version = "v1.20.20"
[[projects]]
branch = "master"
digest = "1:707ebe952a8b3d00b343c01536c79c73771d100f63ec6babeaed5c79e2b8a8dd"
name = "github.com/beorn7/perks"
packages = ["quantile"]
pruneopts = "NUT"
revision = "3a771d992973f24aa725d07868b467d1ddfceafb"
revision = "4b2b341e8d7715fae06375aa633dbb6e91b3fb46"
version = "v1.0.0"
[[projects]]
digest = "1:fa965c1fd0f17153f608037e109e62104058bc1d08d44849867795fd306fa8b8"
digest = "1:06bd9f98d7cf2097c16f820f980709eb4d04e1c7369132c351fbf1ffb54d3117"
name = "github.com/census-instrumentation/opencensus-proto"
packages = [
"gen-go/agent/common/v1",
@ -89,55 +98,59 @@
"gen-go/resource/v1",
]
pruneopts = "NUT"
revision = "7f2434bc10da710debe5c4315ed6d4df454b4024"
version = "v0.1.0"
revision = "a105b96453fe85139acc07b68de48f2cbdd71249"
version = "v0.2.0"
[[projects]]
digest = "1:6b21090f60571b20b3ddc2c8e48547dffcf409498ed6002c2cada023725ed377"
digest = "1:ffe9824d294da03b391f44e1ae8281281b4afc1bdaa9588c9097785e3af10cec"
name = "github.com/davecgh/go-spew"
packages = ["spew"]
pruneopts = "NUT"
revision = "782f4967f2dc4564575ca782fe2d04090b5faca8"
revision = "8991bc29aa16c548c550c7ff78260e27b9ab7c73"
version = "v1.1.1"
[[projects]]
digest = "1:32598368f409bbee79deb9d43569fcd92b9fb27f39155f5e166b3371217f051f"
digest = "1:4304cca260ab815326ca42d9c28fb843342748267034c51963e13f5e54e727d1"
name = "github.com/evanphx/json-patch"
packages = ["."]
pruneopts = "NUT"
revision = "72bf35d0ff611848c1dc9df0f976c81192392fa5"
version = "v4.1.0"
revision = "026c730a0dcc5d11f93f1cf1cc65b01247ea7b6f"
version = "v4.5.0"
[[projects]]
digest = "1:abfe129dc92b16fbf0cc9d6336096a2823151756f62072a700eb10754141b38e"
digest = "1:81466b4218bf6adddac2572a30ac733a9255919bc2f470b4827a317bd4ee1756"
name = "github.com/ghodss/yaml"
packages = ["."]
pruneopts = "NUT"
revision = "73d445a93680fa1a78ae23a5839bad48f32ba1ee"
revision = "0ca9ea5df5451ffdf184b4428c902747c2c11cd7"
version = "v1.0.0"
[[projects]]
digest = "1:373397317168dd5ac00efda13940668f1947fd641f572b9cf386a86a99c63ca9"
digest = "1:53151cc4366e3945282d4b783fd41f35222cabbc75601e68d8133648c63498d1"
name = "github.com/gobuffalo/envy"
packages = ["."]
pruneopts = "NUT"
revision = "801d7253ade1f895f74596b9a96147ed2d3b087e"
version = "v1.6.11"
revision = "043cb4b8af871b49563291e32c66bb84378a60ac"
version = "v1.7.0"
[[projects]]
digest = "1:a6afc27b2a73a5506832f3c5a1c19a30772cb69e7bd1ced4639eb36a55db224f"
digest = "1:a1b2a5e38f79688ee8250942d5fa960525fceb1024c855c7bc76fa77b0f3cca2"
name = "github.com/gogo/protobuf"
packages = [
"proto",
"sortkeys",
]
pruneopts = "NUT"
revision = "c0656edd0d9eab7c66d1eb0c568f9039345796f7"
revision = "ba06b47c162d49f2af050fb4c75bcbc86a159d5c"
version = "v1.2.1"
[[projects]]
digest = "1:78b8040ece2ff622580def2708b9eb0b2857711b6744c475439bf337e9c677ea"
branch = "master"
digest = "1:e2b86e41f3d669fc36b50d31d32d22c8ac656c75aa5ea89717ce7177e134ff2a"
name = "github.com/golang/glog"
packages = ["."]
pruneopts = "NUT"
revision = "44145f04b68cf362d9c4df2182967c2275eaefed"
revision = "23def4e6c14b4da8ac2ed8007337bc5eb5007998"
[[projects]]
branch = "master"
@ -145,10 +158,10 @@
name = "github.com/golang/groupcache"
packages = ["lru"]
pruneopts = "NUT"
revision = "5b532d6fd5efaf7fa130d4e859a2fde0fc3a9e1b"
revision = "869f871628b6baa9cfbc11732cdf6546b17c1298"
[[projects]]
digest = "1:4dacf728c83400b3e9d1d3025dd3c1e93e9a1b033726d1b193dc209f3fa9cb7a"
digest = "1:f5a98770ab68c1146ee5cc14ed24aafa2bb1a2b3c89cbeadc9eb913b1f9d930a"
name = "github.com/golang/protobuf"
packages = [
"proto",
@ -162,16 +175,16 @@
"ptypes/wrappers",
]
pruneopts = "NUT"
revision = "aa810b61a9c79d51363740d207bb46cf8e620ed5"
version = "v1.2.0"
revision = "6c65a5562fc06764971b7c5d05c76c75e84bdbf7"
version = "v1.3.2"
[[projects]]
branch = "master"
digest = "1:245bd4eb633039cd66106a5d340ae826d87f4e36a8602fcc940e14176fd26ea7"
digest = "1:05f95ffdfcf651bdb0f05b40b69e7f5663047f8da75c72d58728acb59b5cc107"
name = "github.com/google/btree"
packages = ["."]
pruneopts = "NUT"
revision = "e89373fe6b4a7413d7acd6da1725b83ef713e6e4"
revision = "4030bb1f1f0c35b30ca7009e9ebd06849dd45306"
version = "v1.0.0"
[[projects]]
digest = "1:010d46ea3c1e730897e53058d1013a963f3f987675dda87df64f891b945281db"
@ -188,15 +201,16 @@
revision = "6f77996f0c42f7b84e5a2b252227263f93432e9b"
[[projects]]
digest = "1:f9425215dccf1c63f659ec781ca46bc81804341821d0cd8d2459c5b58f8bd067"
digest = "1:52c5834e2bebac9030c97cc0798ac11c3aa8a39f098aeb419f142533da6cd3cc"
name = "github.com/google/gofuzz"
packages = ["."]
pruneopts = "NUT"
revision = "44d81051d367757e1c7c6a5a86423ece9afcf63c"
revision = "f140a6486e521aad38f5917de355cbf147cc0496"
version = "v1.0.0"
[[projects]]
branch = "master"
digest = "1:0d5e3798bfa2642ac268341c96710b8def1f3cbc3bc803c421d90704d72107d8"
digest = "1:a8674ea5ceb0c2a72a9b9518415b73d26d6c77aec49fe7fee78f15d6d137fc3a"
name = "github.com/google/licenseclassifier"
packages = [
".",
@ -207,7 +221,7 @@
"stringclassifier/searchset/tokenizer",
]
pruneopts = "NUT"
revision = "e979a0b10eebe748549c702a25e997c556349da6"
revision = "c3068f13fcc3961fd05f96f13c8250e350db4209"
[[projects]]
digest = "1:ab3ec1fe3e39bac4b3ab63390767766622be35b7cab03f47f787f9ec60522a53"
@ -218,18 +232,15 @@
version = "v1.1.1"
[[projects]]
digest = "1:fd4d1f4c2d75aee3833ee7d8ef11fcf42ddec3c63d1819548288c3d868d6eb14"
digest = "1:4b76f3e067eed897a45242383a2aa4d0a2fdbf73a8d00c03167dba80c43630b1"
name = "github.com/googleapis/gax-go"
packages = [
".",
"v2",
]
packages = ["v2"]
pruneopts = "NUT"
revision = "c8a15bac9b9fe955bd9f900272f9a306465d28cf"
version = "v2.0.3"
revision = "bd5b16380fd03dc758d11cef74ba2e3bc8b0e8c2"
version = "v2.0.5"
[[projects]]
digest = "1:27b4ab41ffdc76ad6db56db327a4db234a59588ef059fc3fd678ba0bc6b9094f"
digest = "1:459a00967aaf06edff3228e128dd243d7c91b0fc11ad2f7ceaa98f094bf66796"
name = "github.com/googleapis/gnostic"
packages = [
"OpenAPIv2",
@ -237,7 +248,8 @@
"extensions",
]
pruneopts = "NUT"
revision = "0c5108395e2debce0d731cf0287ddf7242066aba"
revision = "e73c7ec21d36ddb0711cb36d1502d18363b5c2c9"
version = "v0.3.0"
[[projects]]
digest = "1:4a0c072e44da763409da72d41492373a034baf2e6d849c76d239b4abdfbb6c49"
@ -249,32 +261,33 @@
[[projects]]
branch = "master"
digest = "1:7fdf3223c7372d1ced0b98bf53457c5e89d89aecbad9a77ba9fcc6e01f9e5621"
digest = "1:a86d65bc23eea505cd9139178e4d889733928fe165c7a008f41eaab039edf9df"
name = "github.com/gregjones/httpcache"
packages = [
".",
"diskcache",
]
pruneopts = "NUT"
revision = "9cad4c3443a7200dd6400aef47183728de563a38"
revision = "901d90724c7919163f472a9812253fb26761123d"
[[projects]]
digest = "1:475b179287e8afdcd352014b2c2500e67decdf63e66125e2129286873453e1cd"
digest = "1:52094d0f8bdf831d1a2401e9b6fee5795fdc0b2a2d1f8bb1980834c289e79129"
name = "github.com/hashicorp/golang-lru"
packages = [
".",
"simplelru",
]
pruneopts = "NUT"
revision = "a0d98a5f288019575c6d1f4bb1573fef2d1fcdc4"
revision = "7087cb70de9f7a8bc0a10c375cb0d2280a8edf9c"
version = "v0.5.1"
[[projects]]
digest = "1:9a52adf44086cead3b384e5d0dbf7a1c1cce65e67552ee3383a8561c42a18cd3"
digest = "1:aaa38889f11896ee3644d77e17dc7764cc47f5f3d3b488268df2af2b52541c5f"
name = "github.com/imdario/mergo"
packages = ["."]
pruneopts = "NUT"
revision = "9f23e2d6bd2a77f959b2bf6acdbefd708a83a4a4"
version = "v0.3.6"
revision = "7c29201646fa3de8506f701213473dd407f19646"
version = "v0.3.7"
[[projects]]
digest = "1:1f2aebae7e7c856562355ec0198d8ca2fa222fb05e5b1b66632a1fce39631885"
@ -300,14 +313,14 @@
[[projects]]
branch = "master"
digest = "1:1bc2182db9fca862cd2b3a028aad73e473705be1c3f2454429a37b8626e16eda"
digest = "1:5197df8038ae4c7d05c2edda32a77d7f16faa635df3c560f01fb73fa42682f69"
name = "github.com/knative/test-infra"
packages = [
"scripts",
"tools/dep-collector",
]
pruneopts = "UT"
revision = "2b0eeafd5300d91d0c96a719bd230fb3b3dd96ce"
revision = "81861c7c2060af68e3dbdc72bcd3a2f0584566d2"
[[projects]]
digest = "1:56dbf15e091bf7926cb33a57cb6bdfc658fc6d3498d2f76f10a97ce7856f1fde"
@ -350,7 +363,7 @@
version = "1.0.1"
[[projects]]
digest = "1:22d4043da943b919108e0d1b07983b8d29edeadfba9fb8f3213208d3e9798aae"
digest = "1:1dd0ef584fe04a3e14297c004da996de66c1816666d25836d24696bea6194a63"
name = "github.com/openzipkin/zipkin-go"
packages = [
".",
@ -362,24 +375,24 @@
"reporter/recorder",
]
pruneopts = "NUT"
revision = "1b5162aa314e6ccfcf83777bfb5218988c9e8283"
version = "v0.1.6"
revision = "1277a5f30075b9c13d37775aed4f0f3b44d1a710"
version = "v0.2.0"
[[projects]]
branch = "master"
digest = "1:3bf17a6e6eaa6ad24152148a631d18662f7212e21637c2699bff3369b7f00fa2"
digest = "1:89da0f0574bc94cfd0ac8b59af67bf76cdd110d503df2721006b9f0492394333"
name = "github.com/petar/GoLLRB"
packages = ["llrb"]
pruneopts = "NUT"
revision = "53be0d36a84c2a886ca057d34b6aa4468df9ccb4"
revision = "33fb24c13b99c46c93183c291836c573ac382536"
[[projects]]
digest = "1:6c6d91dc326ed6778783cff869c49fb2f61303cdd2ebbcf90abe53505793f3b6"
digest = "1:e1b94bd98c62fc2f905621fc6ba8209b7004e4513a1dfecb12a3de56ec2bb519"
name = "github.com/peterbourgon/diskv"
packages = ["."]
pruneopts = "NUT"
revision = "5f041e8faa004a95c88a202771f4cc3e991971e6"
version = "v2.0.1"
revision = "0be1b92a6df0e4f5cb0a5d15fb7f643d0ad93ce6"
version = "v3.0.0"
[[projects]]
digest = "1:14715f705ff5dfe0ffd6571d7d201dd8e921030f8070321a79380d8ca4ec1a24"
@ -390,7 +403,7 @@
version = "v0.8.1"
[[projects]]
digest = "1:7c7cfeecd2b7147bcfec48a4bf622b4879e26aec145a9e373ce51d0c23b16f6b"
digest = "1:097cc61836050f45cbb712ae3bb45d66fba464c16b8fac09907fa3c1f753eff6"
name = "github.com/prometheus/client_golang"
packages = [
"prometheus",
@ -398,8 +411,8 @@
"prometheus/promhttp",
]
pruneopts = "NUT"
revision = "505eaef017263e299324067d40ca2c48f6a2cf50"
version = "v0.9.2"
revision = "4ab88e80c249ed361d3299e2930427d9ac43ef8d"
version = "v1.0.0"
[[projects]]
branch = "master"
@ -407,11 +420,10 @@
name = "github.com/prometheus/client_model"
packages = ["go"]
pruneopts = "NUT"
revision = "5c3871d89910bfb32f5fcab2aa4b9ec68e65a99f"
revision = "fd36f4220a901265f90734c3183c5f0c91daa0b8"
[[projects]]
branch = "master"
digest = "1:fad5a35eea6a1a33d6c8f949fbc146f24275ca809ece854248187683f52cc30b"
digest = "1:d03ca24670416dc8fccc78b05d6736ec655416ca7db0a028e8fb92cfdfe3b55e"
name = "github.com/prometheus/common"
packages = [
"expfmt",
@ -419,20 +431,19 @@
"model",
]
pruneopts = "NUT"
revision = "c7de2306084e37d54b8be01f3541a8464345e9a5"
revision = "31bed53e4047fd6c510e43a941f90cb31be0972a"
version = "v0.6.0"
[[projects]]
branch = "master"
digest = "1:102dea0c03a915acfc634b7c67f2662012b5483b56d9025e33f5188e112759b6"
digest = "1:19305fc369377c111c865a7a01e11c675c57c52a932353bbd4ea360bd5b72d99"
name = "github.com/prometheus/procfs"
packages = [
".",
"internal/util",
"nfs",
"xfs",
"internal/fs",
]
pruneopts = "NUT"
revision = "185b4288413d2a0dd0806f78c90dde719829e5ae"
revision = "3f98efb27840a48a7a2898ec80be07674d19f9c8"
version = "v0.0.3"
[[projects]]
digest = "1:e09ada96a5a41deda4748b1659cc8953961799e798aea557257b56baee4ecaf3"
@ -462,20 +473,20 @@
version = "v1.0.3"
[[projects]]
digest = "1:b8baa7541ef444be218da02d3a7b607d33513263660e489d86d429afbffcdd86"
digest = "1:0e3fd52087079d1289983e4fef32268ca965973f5370b69204e2934185527baa"
name = "go.opencensus.io"
packages = [
".",
"exporter/prometheus",
"exporter/zipkin",
"internal",
"internal/tagencoding",
"metric/metricdata",
"metric/metricexport",
"metric/metricproducer",
"plugin/ocgrpc",
"plugin/ochttp",
"plugin/ochttp/propagation/b3",
"resource",
"resource/resourcekeys",
"stats",
"stats/internal",
"stats/view",
@ -486,16 +497,16 @@
"trace/tracestate",
]
pruneopts = "NUT"
revision = "75c0cca22312e51bfd4fafdbe9197ae399e18b38"
version = "v0.20.2"
revision = "9c377598961b706d1542bd2d84d538b5094d596e"
version = "v0.22.0"
[[projects]]
digest = "1:22f696cee54865fb8e9ff91df7b633f6b8f22037a8015253c6b6a71ca82219c7"
digest = "1:cc9d86ec4e6e3bdf87e3a421273bfeed003cf8e21351c0302fe8b0eb7b10efe6"
name = "go.uber.org/atomic"
packages = ["."]
pruneopts = "NUT"
revision = "1ea20fb1cbb1cc08cbd0d913a96dead89aa18289"
version = "v1.3.2"
revision = "df976f2515e274675050de7b3f42545de80594fd"
version = "v1.4.0"
[[projects]]
digest = "1:58ca93bdf81bac106ded02226b5395a0595d5346cdc4caa8d9c1f3a5f8f9976e"
@ -522,15 +533,16 @@
revision = "67bc79d13d155c02fd008f721863ff8cc5f30659"
[[projects]]
digest = "1:624a05c7c6ed502bf77364cd3d54631383dafc169982fddd8ee77b53c3d9cccf"
branch = "master"
digest = "1:bbe51412d9915d64ffaa96b51d409e070665efc5194fcf145c4a27d4133107a4"
name = "golang.org/x/crypto"
packages = ["ssh/terminal"]
pruneopts = "NUT"
revision = "81e90905daefcd6fd217b62423c0908922eadb30"
revision = "4def268fd1a49955bfb3dda92fe3db4f924f2285"
[[projects]]
branch = "master"
digest = "1:3033eba8bb0c8f2c6720e68e4c14e55b577ae9debb5f5b7b8cc6f319d89edc82"
digest = "1:5578b99717f08e6480d7e0480f758749c12f9cc5da19a33a863dc7307fd699fb"
name = "golang.org/x/net"
packages = [
"context",
@ -543,11 +555,11 @@
"trace",
]
pruneopts = "NUT"
revision = "49bb7cea24b1df9410e1712aa6433dae904ff66a"
revision = "da137c7871d730100384dbcf36e6f8fa493aef5b"
[[projects]]
branch = "master"
digest = "1:dcb89c032286a9c3c5118a1496f8e0e237c1437f5356ac9602f6fdef560a5c21"
digest = "1:1519760444b90c560eb01373869bc66fd539e6fe1bf77af22047c43edc40ab35"
name = "golang.org/x/oauth2"
packages = [
".",
@ -557,38 +569,40 @@
"jwt",
]
pruneopts = "NUT"
revision = "c57b0facaced709681d9f90397429b9430a74754"
revision = "0f29369cfe4552d0e4bcddc57cc75f4d7e672a33"
[[projects]]
branch = "master"
digest = "1:c313aef534e493304f3666fbd24dca5932ebf776a82b7a40f961c9355794a1b1"
digest = "1:a2fc247e64b5dafd3251f12d396ec85f163d5bb38763c4997856addddf6e78d8"
name = "golang.org/x/sync"
packages = [
"errgroup",
"semaphore",
]
pruneopts = "NUT"
revision = "1d60e4601c6fd243af51cc01ddf169918a5407ca"
revision = "112230192c580c3556b8cee6403af37a4fc5f28c"
[[projects]]
branch = "master"
digest = "1:a801d3c417117b67a96353daad340b250619780b75c29b652ea13697c946553e"
digest = "1:5f0606c755c423ee9970d55028e0ee09df03e33297a39d6b83c2502dc9a2193f"
name = "golang.org/x/sys"
packages = [
"unix",
"windows",
]
pruneopts = "NUT"
revision = "e072cadbbdc8dd3d3ffa82b8b4b9304c261d9311"
revision = "fae7ac547cb717d141c433a2a173315e216b64c4"
[[projects]]
digest = "1:e7071ed636b5422cc51c0e3a6cebc229d6c9fffc528814b519a980641422d619"
digest = "1:8d8faad6b12a3a4c819a3f9618cb6ee1fa1cfc33253abeeea8b55336721e3405"
name = "golang.org/x/text"
packages = [
"collate",
"collate/build",
"internal/colltab",
"internal/gen",
"internal/language",
"internal/language/compact",
"internal/tag",
"internal/triegen",
"internal/ucd",
@ -601,31 +615,41 @@
"unicode/rangetable",
]
pruneopts = "NUT"
revision = "f21a4dfb5e38f5895301dc265a8def02365cc3d0"
version = "v0.3.0"
revision = "342b2e1fbaa52c93f31447ad2c6abc048c63e475"
version = "v0.3.2"
[[projects]]
digest = "1:d37b0ef2944431fe9e8ef35c6fffc8990d9e2ca300588df94a6890f3649ae365"
branch = "master"
digest = "1:9fdc2b55e8e0fafe4b41884091e51e77344f7dc511c5acedcfd98200003bff90"
name = "golang.org/x/time"
packages = ["rate"]
pruneopts = "NUT"
revision = "f51c12702a4d776e4c1fa9b0fabab841babae631"
revision = "9d24e82272b4f38b78bc8cff74fa936d31ccd8ef"
[[projects]]
branch = "master"
digest = "1:e1c96c8c8ce0af57da9dccb008e540b3d13b55ea04b530fb4fceb81706082bdd"
digest = "1:59988b91533530837bdec96e34fefa9df629d8ee89f3603387c57763b9fed432"
name = "golang.org/x/tools"
packages = [
"go/ast/astutil",
"go/gcexportdata",
"go/internal/gcimporter",
"go/internal/packagesdriver",
"go/packages",
"go/types/typeutil",
"imports",
"internal/fastwalk",
"internal/gopathwalk",
"internal/imports",
"internal/module",
"internal/semver",
]
pruneopts = "NUT"
revision = "bfb5194568d3c40db30de765edc44cae9fc94671"
revision = "8b927904ee0dec805c89aaf9172f4459296ed6e8"
[[projects]]
branch = "master"
digest = "1:7689634b1a2940f3e725a37a7598b5462674a5b016b17d8ce22c8f71cacb0b34"
digest = "1:9b9245bd124d95af7072487cd1e5861174b859ebc31cbe9fbab3b88456701485"
name = "google.golang.org/api"
packages = [
"googleapi/transport",
@ -639,10 +663,10 @@
"transport/http/internal/propagation",
]
pruneopts = "NUT"
revision = "ce4acf611b3920b111e21272a15ddaea10c1fd2e"
revision = "aa15faf3c8a1cffc77fc3dabe95703bb12c5f6a9"
[[projects]]
digest = "1:898bf528e5c601c4a1111586f75ab9515467ebe7a41ae849d5a839720d4e2580"
digest = "1:a955e7c44c2be14b61aa2ddda744edfdfbc6817e993703a16e303c277ba84449"
name = "google.golang.org/appengine"
packages = [
".",
@ -659,12 +683,12 @@
"urlfetch",
]
pruneopts = "NUT"
revision = "e9657d882bb81064595ca3b56cbe2546bbabf7b1"
version = "v1.4.0"
revision = "b2f4a3cf3c67576a2ee09e1fe62656a5086ce880"
version = "v1.6.1"
[[projects]]
branch = "master"
digest = "1:3689f4cc57cc55b4631efc4b778c47d2a888a7060b4108f42cf289a2033be5ba"
digest = "1:0ee5f291bbeb4c9664aad14ad9a64e52cdeed406c97549beb6028adb9d7b8afc"
name = "google.golang.org/genproto"
packages = [
"googleapis/api",
@ -680,20 +704,30 @@
"protobuf/field_mask",
]
pruneopts = "NUT"
revision = "e7d98fc518a78c9f8b5ee77be7b0b317475d89e1"
revision = "3bdd9d9f5532d75d09efb230bd767d265245cfe5"
[[projects]]
digest = "1:40d377bfddee53c669db275071aa08b68d021941311580d902ab7c862d8741c1"
digest = "1:89cc3cf640fa24f7345509981e7ab088ee8d4d4f08cf3b5783508856b146b438"
name = "google.golang.org/grpc"
packages = [
".",
"balancer",
"balancer/base",
"balancer/grpclb",
"balancer/grpclb/grpc_lb_v1",
"balancer/roundrobin",
"binarylog/grpc_binarylog_v1",
"codes",
"connectivity",
"credentials",
"credentials/alts",
"credentials/alts/internal",
"credentials/alts/internal/authinfo",
"credentials/alts/internal/conn",
"credentials/alts/internal/handshaker",
"credentials/alts/internal/handshaker/service",
"credentials/alts/internal/proto/grpc_gcp",
"credentials/google",
"credentials/internal",
"credentials/oauth",
"encoding",
@ -701,6 +735,7 @@
"grpclog",
"internal",
"internal/backoff",
"internal/balancerload",
"internal/binarylog",
"internal/channelz",
"internal/envconfig",
@ -715,13 +750,14 @@
"resolver",
"resolver/dns",
"resolver/passthrough",
"serviceconfig",
"stats",
"status",
"tap",
]
pruneopts = "NUT"
revision = "df014850f6dee74ba2fc94874043a9f3f75fbfd8"
version = "v1.17.0"
revision = "1d89a3c832915b2314551c1d2a506874d62e53f7"
version = "v1.22.0"
[[projects]]
digest = "1:2d1fbdc6777e5408cabeb02bf336305e724b925ff4546ded0fa8715a7267922a"
@ -1078,28 +1114,30 @@
revision = "e17681d19d3ac4837a019ece36c2a0ec31ffe985"
[[projects]]
digest = "1:c263611800c3a97991dbcf9d3bc4de390f6224aaa8ca0a7226a9d734f65a416a"
digest = "1:43099cc4ed575c40f80277c7ba7168df37d0c663bdc4f541325430bd175cce8a"
name = "k8s.io/klog"
packages = ["."]
pruneopts = "NUT"
revision = "71442cd4037d612096940ceb0f3fec3f7fff66e0"
version = "v0.2.0"
revision = "d98d8acdac006fb39831f1b25640813fef9c314f"
version = "v0.3.3"
[[projects]]
branch = "master"
digest = "1:a2c842a1e0aed96fd732b535514556323a6f5edfded3b63e5e0ab1bce188aa54"
digest = "1:42674e29bf0cf4662d49bd9528e24b9ecc4895b32d0be281f9cf04d3a7671846"
name = "k8s.io/kube-openapi"
packages = ["pkg/util/proto"]
pruneopts = "NUT"
revision = "e3762e86a74c878ffed47484592986685639c2cd"
revision = "33be087ad058f99c78e067996202b60230737e49"
[solve-meta]
analyzer-name = "dep"
analyzer-version = 1
input-imports = [
"cloud.google.com/go/compute/metadata",
"contrib.go.opencensus.io/exporter/prometheus",
"contrib.go.opencensus.io/exporter/stackdriver",
"contrib.go.opencensus.io/exporter/stackdriver/monitoredresource",
"contrib.go.opencensus.io/exporter/zipkin",
"github.com/davecgh/go-spew/spew",
"github.com/evanphx/json-patch",
"github.com/ghodss/yaml",
@ -1120,8 +1158,6 @@
"github.com/pkg/errors",
"github.com/rogpeppe/go-internal/semver",
"github.com/spf13/pflag",
"go.opencensus.io/exporter/prometheus",
"go.opencensus.io/exporter/zipkin",
"go.opencensus.io/plugin/ochttp",
"go.opencensus.io/plugin/ochttp/propagation/b3",
"go.opencensus.io/stats",

View File

@ -57,7 +57,7 @@ required = [
[[constraint]]
name = "contrib.go.opencensus.io/exporter/stackdriver"
version = "0.9.2"
version = "v0.12.2"
[[constraint]]
name = "github.com/knative/test-infra"

View File

@ -51,9 +51,7 @@ aliases:
- chaodaiG
- coryrc
- dushyanthsc
- ericKlawitter
- Fredy-Z
- nbarthwal
- srinivashegde86
- steuhs
- yt3liu

View File

@ -147,8 +147,8 @@ func TestFoo(t *testing.T) {
## Starting controllers
All we do is import the controller packages and pass their constructors along
with a component name to our shared main. Then our shared main method sets it
all up and runs our controllers.
with a component name (single word) to our shared main. Then our shared main
method sets it all up and runs our controllers.
```go
package main
@ -163,7 +163,7 @@ import (
)
func main() {
sharedmain.Main("component-name",
sharedmain.Main("componentname",
bar.NewController,
blah.NewController,
)

View File

@ -56,7 +56,7 @@ limitations under the License.
// )
//
// func main() {
// sharedmain.Main("my-component",
// sharedmain.Main("mycomponent",
// // We pass in the list of controllers to construct, and that's it!
// // If we forget to add this, go will complain about the unused import.
// matt.NewController,

View File

@ -111,16 +111,28 @@ func (r *ShortDiffReporter) Report(rs cmp.Result) {
t := cur.Type()
var diff string
// Prefix struct values with the types to add clarity in output
if !vx.IsValid() || !vy.IsValid() {
if !vx.IsValid() && !vy.IsValid() {
r.err = fmt.Errorf("Unable to diff %+v and %+v on path %#v", vx, vy, r.path)
} else if t.Kind() == reflect.Struct {
diff = fmt.Sprintf("%#v:\n\t-: %+v: \"%+v\"\n\t+: %+v: \"%+v\"\n", r.path, t, vx, t, vy)
} else {
diff = fmt.Sprintf("%#v:\n\t-: \"%+v\"\n\t+: \"%+v\"\n", r.path, vx, vy)
diff = fmt.Sprintf("%#v:\n", r.path)
if vx.IsValid() {
diff += r.diffString("-", t, vx)
}
if vy.IsValid() {
diff += r.diffString("+", t, vy)
}
}
r.diffs = append(r.diffs, diff)
}
func (r *ShortDiffReporter) diffString(diffType string, t reflect.Type, v reflect.Value) string {
if t.Kind() == reflect.Struct {
return fmt.Sprintf("\t%s: %+v: \"%+v\"\n", diffType, t, v)
} else {
return fmt.Sprintf("\t%s: \"%+v\"\n", diffType, v)
}
}
// PopStep implements the cmp.Reporter.
func (r *ShortDiffReporter) PopStep() {
r.path = r.path[:len(r.path)-1]

View File

@ -18,7 +18,7 @@ import (
"net/http"
"sync"
"go.opencensus.io/exporter/prometheus"
"contrib.go.opencensus.io/exporter/prometheus"
"go.opencensus.io/stats/view"
"go.uber.org/zap"
)

View File

@ -4,10 +4,11 @@ import (
"errors"
"sync"
"contrib.go.opencensus.io/exporter/zipkin"
zipkinmodel "github.com/openzipkin/zipkin-go/model"
zipkinreporter "github.com/openzipkin/zipkin-go/reporter"
"go.opencensus.io/exporter/zipkin"
"go.opencensus.io/trace"
"knative.dev/pkg/tracing/config"
)