Switch to Google's v4 safebrowsing library. (#2446)
Right now we are using a third-party client for the Google Safe Browsing API, but Google has recently released their own [Golang library](https://github.com/google/safebrowsing) which also supports the newer v4 API. Using this library will let us avoid fixing some lingering race conditions & unpleasantness in our fork of `go-safebrowsing-api`. This PR adds support for using the Google library & the v4 API in place of our existing fork when the `GoogleSafeBrowsingV4` feature flag is enabled in the VA "features" configuration. Resolves https://github.com/letsencrypt/boulder/issues/1863 Per `CONTRIBUTING.md` I also ran the unit tests for the new dependency: ``` daniel@XXXXXXXXXX:~/go/src/github.com/google/safebrowsing$ go test ./... ok github.com/google/safebrowsing 3.274s ? github.com/google/safebrowsing/cmd/sblookup [no test files] ? github.com/google/safebrowsing/cmd/sbserver [no test files] ? github.com/google/safebrowsing/cmd/sbserver/statik [no test files] ? github.com/google/safebrowsing/internal/safebrowsing_proto [no test files] ok github.com/google/safebrowsing/vendor/github.com/golang/protobuf/jsonpb 0.012s ? github.com/google/safebrowsing/vendor/github.com/golang/protobuf/jsonpb/jsonpb_test_proto [no test files] ok github.com/google/safebrowsing/vendor/github.com/golang/protobuf/proto 0.062s ? github.com/google/safebrowsing/vendor/github.com/golang/protobuf/proto/proto3_proto [no test files] ? github.com/google/safebrowsing/vendor/github.com/golang/protobuf/protoc-gen-go [no test files] ? github.com/google/safebrowsing/vendor/github.com/golang/protobuf/protoc-gen-go/descriptor [no test files] ok github.com/google/safebrowsing/vendor/github.com/golang/protobuf/protoc-gen-go/generator 0.017s ? github.com/google/safebrowsing/vendor/github.com/golang/protobuf/protoc-gen-go/grpc [no test files] ? github.com/google/safebrowsing/vendor/github.com/golang/protobuf/protoc-gen-go/plugin [no test files] ok github.com/google/safebrowsing/vendor/github.com/golang/protobuf/ptypes 0.009s ? github.com/google/safebrowsing/vendor/github.com/golang/protobuf/ptypes/any [no test files] ? github.com/google/safebrowsing/vendor/github.com/golang/protobuf/ptypes/duration [no test files] ? github.com/google/safebrowsing/vendor/github.com/golang/protobuf/ptypes/empty [no test files] ? github.com/google/safebrowsing/vendor/github.com/golang/protobuf/ptypes/struct [no test files] ? github.com/google/safebrowsing/vendor/github.com/golang/protobuf/ptypes/timestamp [no test files] ? github.com/google/safebrowsing/vendor/github.com/golang/protobuf/ptypes/wrappers [no test files] ? github.com/google/safebrowsing/vendor/github.com/rakyll/statik [no test files] ? github.com/google/safebrowsing/vendor/github.com/rakyll/statik/fs [no test files] ok github.com/google/safebrowsing/vendor/golang.org/x/net/idna 0.003s ```
This commit is contained in:
parent
5acce8ba38
commit
74e281c1ce
|
@ -135,6 +135,14 @@
|
|||
"ImportPath": "github.com/google/certificate-transparency/go/x509/pkix",
|
||||
"Rev": "0f6e3d1d1ba4d03fdaab7cd716f36255c2e48341"
|
||||
},
|
||||
{
|
||||
"ImportPath": "github.com/google/safebrowsing",
|
||||
"Rev": "fc74adc270b82ff5a2f288fa84e40213eae713c5"
|
||||
},
|
||||
{
|
||||
"ImportPath": "github.com/google/safebrowsing/internal/safebrowsing_proto",
|
||||
"Rev": "fc74adc270b82ff5a2f288fa84e40213eae713c5"
|
||||
},
|
||||
{
|
||||
"ImportPath": "github.com/grpc-ecosystem/go-grpc-prometheus",
|
||||
"Comment": "v1.1",
|
||||
|
|
|
@ -1,38 +1,113 @@
|
|||
// go:generate mockgen -source ../../va/gsb.go -package mock_gsb -destination mock_gsb.go SafeBrowsingV4
|
||||
|
||||
package main
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"os"
|
||||
|
||||
safebrowsingv4 "github.com/google/safebrowsing"
|
||||
"github.com/letsencrypt/boulder/cmd"
|
||||
"github.com/letsencrypt/boulder/va"
|
||||
safebrowsing "github.com/letsencrypt/go-safe-browsing-api"
|
||||
)
|
||||
|
||||
// newGoogleSafeBrowsing returns nil if the GoogleSafeBrowsing struct given is
|
||||
// nil. If an empty Google API key or an unreadable data directory is in the
|
||||
// GoogleSafeBrowsing config struct, this function runs cmd.FailOnError.
|
||||
func newGoogleSafeBrowsing(gsb *cmd.GoogleSafeBrowsingConfig) va.SafeBrowsing {
|
||||
var (
|
||||
NilConfigErr = errors.New("Google Safe Browsing config was nil")
|
||||
EmptyAPIKeyErr = errors.New("a Google Safe Browsing config was given but " +
|
||||
"it did not include a Google API key in APIKey")
|
||||
EmptyDataDirErr = errors.New("a Google Safe Browsing config was given but " +
|
||||
"it did not include a DataDir for persistence")
|
||||
MissingDataDirErr = errors.New("a Google Safe Browsing data directory was " +
|
||||
"given but it does not exist")
|
||||
BadDataDirErr = errors.New("a Google Safe Browsing data directory was " +
|
||||
"given but it cannot be opened")
|
||||
EmptyURLThreatErr = errors.New("Empty URLThreat from LookupURLs[0]")
|
||||
)
|
||||
|
||||
// configCheck returns an error if:
|
||||
// * the gsb config struct given is nil
|
||||
// * the gsb config struct's APIKey is empty
|
||||
// * the gsb config struct's DataDir is empty
|
||||
// * the gsb config struct's DataDir doesn't exist or isn't readable
|
||||
func configCheck(gsb *cmd.GoogleSafeBrowsingConfig) error {
|
||||
if gsb == nil {
|
||||
return NilConfigErr
|
||||
}
|
||||
if gsb.APIKey == "" {
|
||||
return EmptyAPIKeyErr
|
||||
}
|
||||
if gsb.DataDir == "" {
|
||||
return EmptyDataDirErr
|
||||
}
|
||||
if _, err := os.Stat(gsb.DataDir); err != nil {
|
||||
if os.IsNotExist(err) {
|
||||
return MissingDataDirErr
|
||||
} else {
|
||||
return BadDataDirErr
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// gsbAdapter adapts the Google safebrowsing's `SafeBrowser` type to the
|
||||
// `va.SafeBrowsing` interface Boulder uses.
|
||||
type gsbAdapter struct {
|
||||
va.SafeBrowsingV4
|
||||
}
|
||||
|
||||
// IsListed provides the va.SafeBrowsing interface by using the
|
||||
// `safebrowsing4v.SafeBrowser` to look up one URL and return the first threat
|
||||
// list it is found on, or "" if the URL is safe.
|
||||
func (sb gsbAdapter) IsListed(url string) (string, error) {
|
||||
threats, err := sb.LookupURLs([]string{url})
|
||||
if err != nil {
|
||||
return "error", err
|
||||
}
|
||||
if len(threats) > 0 {
|
||||
// NOTE: We only return the _first_ URL threat's first ThreatType here. It's
|
||||
// possible a URL could return multiple threat's with distinct ThreatTypes,
|
||||
// but the va.SafeBrowser interface only returns 1 string that is compared
|
||||
// against "" to make a "safe or not" decision. We do not need more
|
||||
// granularity.
|
||||
if len(threats[0]) == 0 {
|
||||
return "error", EmptyURLThreatErr
|
||||
}
|
||||
return threats[0][0].ThreatType.String(), nil
|
||||
}
|
||||
return "", nil
|
||||
}
|
||||
|
||||
// newGoogleSafeBrowsingV4 constructs a va.SafeBrowsing instance using the new
|
||||
// Google upstream Safe Browsing version 4 client.
|
||||
func newGoogleSafeBrowsingV4(gsb *cmd.GoogleSafeBrowsingConfig) va.SafeBrowsing {
|
||||
// If there is no GSB configuration, don't create a client
|
||||
if gsb == nil {
|
||||
return nil
|
||||
}
|
||||
if gsb.APIKey == "" {
|
||||
cmd.FailOnError(errors.New(""), "a Google Safe Browsing config was given but it did not include a Google API key in APIKey")
|
||||
if err := configCheck(gsb); err != nil {
|
||||
cmd.FailOnError(err, "unable to create new safe browsing v4 client")
|
||||
}
|
||||
if gsb.DataDir == "" {
|
||||
cmd.FailOnError(errors.New(""), "a Google Safe Browsing config was given but it did not include data directory to store the hashes file in DataDir")
|
||||
}
|
||||
|
||||
f, err := os.Open(gsb.DataDir)
|
||||
sb, err := safebrowsingv4.NewSafeBrowser(safebrowsingv4.Config{
|
||||
APIKey: gsb.APIKey,
|
||||
DBPath: gsb.DataDir,
|
||||
})
|
||||
if err != nil {
|
||||
if os.IsNotExist(err) {
|
||||
cmd.FailOnError(err, fmt.Sprintf("Google Safe Browsing data directory (%#v) does not exist", gsb.DataDir))
|
||||
}
|
||||
cmd.FailOnError(err, "unable to open Google Safe Browsing data directory")
|
||||
cmd.FailOnError(err, "unable to create new safe browsing v4 client")
|
||||
}
|
||||
return gsbAdapter{sb}
|
||||
}
|
||||
|
||||
// newGoogleSafeBrowsing constructs a va.SafeBrowsing instance using the legacy
|
||||
// letsencrypt fork of the go-safebrowsing-api client.
|
||||
func newGoogleSafeBrowsing(gsb *cmd.GoogleSafeBrowsingConfig) va.SafeBrowsing {
|
||||
// If there is no GSB configuration, don't create a client
|
||||
if gsb == nil {
|
||||
return nil
|
||||
}
|
||||
if err := configCheck(gsb); err != nil {
|
||||
cmd.FailOnError(err, "unable to create new safe browsing client")
|
||||
}
|
||||
err = f.Close()
|
||||
cmd.FailOnError(err, "unable to access Google Safe Browsing data directory")
|
||||
sbc, err := safebrowsing.NewSafeBrowsing(gsb.APIKey, gsb.DataDir)
|
||||
if err != nil {
|
||||
cmd.FailOnError(err, "unable to create new safe browsing client")
|
||||
|
|
|
@ -0,0 +1,72 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"testing"
|
||||
|
||||
"github.com/golang/mock/gomock"
|
||||
"github.com/letsencrypt/boulder/cmd"
|
||||
"github.com/letsencrypt/boulder/cmd/boulder-va/mock_gsb"
|
||||
"github.com/letsencrypt/boulder/test"
|
||||
)
|
||||
|
||||
// TestConfigCheck tests that configCheck() does what it says on the tin
|
||||
func TestConfigCheck(t *testing.T) {
|
||||
testcases := []struct {
|
||||
conf *cmd.GoogleSafeBrowsingConfig
|
||||
expected error
|
||||
}{
|
||||
{
|
||||
conf: nil,
|
||||
expected: NilConfigErr,
|
||||
},
|
||||
{
|
||||
conf: &cmd.GoogleSafeBrowsingConfig{
|
||||
APIKey: "",
|
||||
},
|
||||
expected: EmptyAPIKeyErr,
|
||||
},
|
||||
{
|
||||
conf: &cmd.GoogleSafeBrowsingConfig{
|
||||
APIKey: "you are the keymaster!",
|
||||
DataDir: "",
|
||||
},
|
||||
expected: EmptyDataDirErr,
|
||||
},
|
||||
{
|
||||
conf: &cmd.GoogleSafeBrowsingConfig{
|
||||
APIKey: "you are the keymaster!",
|
||||
DataDir: "/distrust/everything/i/say/i/am/telling/the/truth",
|
||||
},
|
||||
expected: MissingDataDirErr,
|
||||
},
|
||||
{
|
||||
conf: &cmd.GoogleSafeBrowsingConfig{
|
||||
APIKey: "you are the keymaster!",
|
||||
DataDir: "./",
|
||||
},
|
||||
expected: nil,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tc := range testcases {
|
||||
result := configCheck(tc.conf)
|
||||
test.AssertEquals(t, result, tc.expected)
|
||||
}
|
||||
}
|
||||
|
||||
// TestV4IsListed creates a va.SafeBrowsing instance backed by the google v4 API
|
||||
// client and tests the `IsListed` function
|
||||
func TestV4IsListed(t *testing.T) {
|
||||
ctrl := gomock.NewController(t)
|
||||
defer ctrl.Finish()
|
||||
mockSB := mock_gsb.NewMockSafeBrowsingV4(ctrl)
|
||||
gsb := gsbAdapter{mockSB}
|
||||
url := "foobar.com"
|
||||
|
||||
// We EXPECT that calling `IsListed` on the gsbAdapter will result in a call to the SafeBrowser's `LookupURLs` function
|
||||
mockSB.EXPECT().LookupURLs([]string{url})
|
||||
result, err := gsb.IsListed(url)
|
||||
test.AssertNotError(t, err, fmt.Sprintf("IsListed(%q) returned non-nil err", url))
|
||||
test.AssertEquals(t, result, "")
|
||||
}
|
|
@ -11,6 +11,7 @@ import (
|
|||
"github.com/letsencrypt/boulder/bdns"
|
||||
"github.com/letsencrypt/boulder/cdr"
|
||||
"github.com/letsencrypt/boulder/cmd"
|
||||
"github.com/letsencrypt/boulder/features"
|
||||
bgrpc "github.com/letsencrypt/boulder/grpc"
|
||||
"github.com/letsencrypt/boulder/metrics"
|
||||
"github.com/letsencrypt/boulder/rpc"
|
||||
|
@ -87,7 +88,15 @@ func main() {
|
|||
pc.TLSPort = c.VA.PortConfig.TLSPort
|
||||
}
|
||||
|
||||
sbc := newGoogleSafeBrowsing(c.VA.GoogleSafeBrowsing)
|
||||
var sbc va.SafeBrowsing
|
||||
// If the feature flag is set, use the Google safebrowsing library that
|
||||
// implements the v4 api instead of the legacy letsencrypt fork of
|
||||
// go-safebrowsing-api
|
||||
if features.Enabled(features.GoogleSafeBrowsingV4) {
|
||||
sbc = newGoogleSafeBrowsingV4(c.VA.GoogleSafeBrowsing)
|
||||
} else {
|
||||
sbc = newGoogleSafeBrowsing(c.VA.GoogleSafeBrowsing)
|
||||
}
|
||||
|
||||
var cdrClient *cdr.CAADistributedResolver
|
||||
if c.VA.CAADistributedResolver != nil {
|
||||
|
|
|
@ -0,0 +1,73 @@
|
|||
// Automatically generated by MockGen. DO NOT EDIT!
|
||||
// Source: va/gsb.go
|
||||
|
||||
package mock_gsb
|
||||
|
||||
import (
|
||||
gomock "github.com/golang/mock/gomock"
|
||||
safebrowsing "github.com/google/safebrowsing"
|
||||
)
|
||||
|
||||
// Mock of SafeBrowsing interface
|
||||
type MockSafeBrowsing struct {
|
||||
ctrl *gomock.Controller
|
||||
recorder *_MockSafeBrowsingRecorder
|
||||
}
|
||||
|
||||
// Recorder for MockSafeBrowsing (not exported)
|
||||
type _MockSafeBrowsingRecorder struct {
|
||||
mock *MockSafeBrowsing
|
||||
}
|
||||
|
||||
func NewMockSafeBrowsing(ctrl *gomock.Controller) *MockSafeBrowsing {
|
||||
mock := &MockSafeBrowsing{ctrl: ctrl}
|
||||
mock.recorder = &_MockSafeBrowsingRecorder{mock}
|
||||
return mock
|
||||
}
|
||||
|
||||
func (_m *MockSafeBrowsing) EXPECT() *_MockSafeBrowsingRecorder {
|
||||
return _m.recorder
|
||||
}
|
||||
|
||||
func (_m *MockSafeBrowsing) IsListed(url string) (string, error) {
|
||||
ret := _m.ctrl.Call(_m, "IsListed", url)
|
||||
ret0, _ := ret[0].(string)
|
||||
ret1, _ := ret[1].(error)
|
||||
return ret0, ret1
|
||||
}
|
||||
|
||||
func (_mr *_MockSafeBrowsingRecorder) IsListed(arg0 interface{}) *gomock.Call {
|
||||
return _mr.mock.ctrl.RecordCall(_mr.mock, "IsListed", arg0)
|
||||
}
|
||||
|
||||
// Mock of SafeBrowsingV4 interface
|
||||
type MockSafeBrowsingV4 struct {
|
||||
ctrl *gomock.Controller
|
||||
recorder *_MockSafeBrowsingV4Recorder
|
||||
}
|
||||
|
||||
// Recorder for MockSafeBrowsingV4 (not exported)
|
||||
type _MockSafeBrowsingV4Recorder struct {
|
||||
mock *MockSafeBrowsingV4
|
||||
}
|
||||
|
||||
func NewMockSafeBrowsingV4(ctrl *gomock.Controller) *MockSafeBrowsingV4 {
|
||||
mock := &MockSafeBrowsingV4{ctrl: ctrl}
|
||||
mock.recorder = &_MockSafeBrowsingV4Recorder{mock}
|
||||
return mock
|
||||
}
|
||||
|
||||
func (_m *MockSafeBrowsingV4) EXPECT() *_MockSafeBrowsingV4Recorder {
|
||||
return _m.recorder
|
||||
}
|
||||
|
||||
func (_m *MockSafeBrowsingV4) LookupURLs(urls []string) ([][]safebrowsing.URLThreat, error) {
|
||||
ret := _m.ctrl.Call(_m, "LookupURLs", urls)
|
||||
ret0, _ := ret[0].([][]safebrowsing.URLThreat)
|
||||
ret1, _ := ret[1].(error)
|
||||
return ret0, ret1
|
||||
}
|
||||
|
||||
func (_mr *_MockSafeBrowsingV4Recorder) LookupURLs(arg0 interface{}) *gomock.Call {
|
||||
return _mr.mock.ctrl.RecordCall(_mr.mock, "LookupURLs", arg0)
|
||||
}
|
|
@ -4,9 +4,9 @@ package features
|
|||
|
||||
import "fmt"
|
||||
|
||||
const _FeatureFlag_name = "unusedIDNASupportAllowAccountDeactivationCertStatusOptimizationsMigratedAllowKeyRolloverResubmitMissingSCTsOnly"
|
||||
const _FeatureFlag_name = "unusedIDNASupportAllowAccountDeactivationCertStatusOptimizationsMigratedAllowKeyRolloverResubmitMissingSCTsOnlyGoogleSafeBrowsingV4"
|
||||
|
||||
var _FeatureFlag_index = [...]uint8{0, 6, 17, 41, 72, 88, 111}
|
||||
var _FeatureFlag_index = [...]uint8{0, 6, 17, 41, 72, 88, 111, 131}
|
||||
|
||||
func (i FeatureFlag) String() string {
|
||||
if i < 0 || i >= FeatureFlag(len(_FeatureFlag_index)-1) {
|
||||
|
|
|
@ -17,6 +17,7 @@ const (
|
|||
CertStatusOptimizationsMigrated
|
||||
AllowKeyRollover
|
||||
ResubmitMissingSCTsOnly
|
||||
GoogleSafeBrowsingV4
|
||||
)
|
||||
|
||||
// List of features and their default value, protected by fMu
|
||||
|
@ -27,6 +28,7 @@ var features = map[FeatureFlag]bool{
|
|||
CertStatusOptimizationsMigrated: false,
|
||||
AllowKeyRollover: false,
|
||||
ResubmitMissingSCTsOnly: false,
|
||||
GoogleSafeBrowsingV4: false,
|
||||
}
|
||||
|
||||
var fMu = new(sync.RWMutex)
|
||||
|
|
|
@ -24,6 +24,9 @@
|
|||
"serverURLFile": "test/secrets/amqp_url",
|
||||
"insecure": true,
|
||||
"serviceQueue": "VA.server"
|
||||
},
|
||||
"features": {
|
||||
"GoogleSafeBrowsingV4": true
|
||||
}
|
||||
},
|
||||
|
||||
|
|
16
va/gsb.go
16
va/gsb.go
|
@ -1,8 +1,9 @@
|
|||
// go:generate mockgen -source ./gsb.go -destination mock_gsb_test.go -package va
|
||||
// go:generate mockgen -source ./gsb.go -destination mock_gsb_test.go -package va SafeBrowsing
|
||||
|
||||
package va
|
||||
|
||||
import (
|
||||
safebrowsingv4 "github.com/google/safebrowsing"
|
||||
safebrowsing "github.com/letsencrypt/go-safe-browsing-api"
|
||||
"golang.org/x/net/context"
|
||||
|
||||
|
@ -10,14 +11,21 @@ import (
|
|||
vaPB "github.com/letsencrypt/boulder/va/proto"
|
||||
)
|
||||
|
||||
// SafeBrowsing is an interface for an third-party safe browing API client.
|
||||
// SafeBrowsing is an interface for a third-party safe browing API client.
|
||||
type SafeBrowsing interface {
|
||||
// IsListed returns a non-empty string if the domain was bad. Specifically,
|
||||
// that list is which Google Safe Browsing list the domain was found on.
|
||||
// it is which Google Safe Browsing list the domain was found on.
|
||||
IsListed(url string) (list string, err error)
|
||||
}
|
||||
|
||||
// IsSafeDomain returns true if the domain given is determined to be safe by an
|
||||
// SafeBrowsingV4 is an interface around the functions from Google
|
||||
// safebrowsing's v4 API's *SafeBrowser type that we use. Using this interface
|
||||
// allows mocking for tests
|
||||
type SafeBrowsingV4 interface {
|
||||
LookupURLs(urls []string) (threats [][]safebrowsingv4.URLThreat, err error)
|
||||
}
|
||||
|
||||
// IsSafeDomain returns true if the domain given is determined to be safe by a
|
||||
// third-party safe browsing API. It's meant be called by the RA before pending
|
||||
// authorization creation. If no third-party client was provided, it fails open
|
||||
// and increments a Skips metric.
|
||||
|
|
|
@ -0,0 +1,10 @@
|
|||
language: go
|
||||
|
||||
go:
|
||||
- 1.6
|
||||
- 1.7
|
||||
|
||||
script:
|
||||
- test -z "$(gofmt -s -l $(find . -name '*.go' -type f -print) | tee /dev/stderr)"
|
||||
- go test -v ./...
|
||||
- go test -race -v ./...
|
|
@ -0,0 +1,7 @@
|
|||
# This is the official list of Go Safe Browsing authors for copyright purposes.
|
||||
# This file is distinct from the CONTRIBUTORS files.
|
||||
# See the latter for an explanation.
|
||||
# Names should be added to this file as:
|
||||
# Name or Organization <email address>
|
||||
# The email address is not required for organizations.
|
||||
Google Inc.
|
|
@ -0,0 +1,25 @@
|
|||
Want to contribute? Great! First, read this page (including the small print at the end).
|
||||
|
||||
### Before you contribute
|
||||
Before we can use your code, you must sign the
|
||||
[Google Individual Contributor License Agreement](https://cla.developers.google.com/about/google-individual)
|
||||
(CLA), which you can do online. The CLA is necessary mainly because you own the
|
||||
copyright to your changes, even after your contribution becomes part of our
|
||||
codebase, so we need your permission to use and distribute your code. We also
|
||||
need to be sure of various other things—for instance that you'll tell us if you
|
||||
know that your code infringes on other people's patents. You don't have to sign
|
||||
the CLA until after you've submitted your code for review and a member has
|
||||
approved it, but you must do it before we can put your code into our codebase.
|
||||
Before you start working on a larger contribution, you should get in touch with
|
||||
us first through the issue tracker with your idea so that we can help out and
|
||||
possibly guide you. Coordinating up front makes it much easier to avoid
|
||||
frustration later on.
|
||||
|
||||
### Code reviews
|
||||
All submissions, including submissions by project members, require review. We
|
||||
use Github pull requests for this purpose.
|
||||
|
||||
### The small print
|
||||
Contributions made by corporations are covered by a different agreement than
|
||||
the one above, the
|
||||
[Software Grant and Corporate Contributor License Agreement](https://cla.developers.google.com/about/google-corporate).
|
|
@ -0,0 +1,202 @@
|
|||
|
||||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "[]"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
file or class name and description of purpose be included on the
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright [yyyy] [name of copyright owner]
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
|
@ -0,0 +1,106 @@
|
|||
[](https://travis-ci.org/google/safebrowsing)
|
||||
|
||||
# Reference Implementation for the Usage of Google Safe Browsing APIs (v4)
|
||||
|
||||
The `safebrowsing` Go package can be used with the
|
||||
[Google Safe Browsing APIs (v4)](https://developers.google.com/safe-browsing/v4/)
|
||||
to access the Google Safe Browsing lists of unsafe web resources. Inside the
|
||||
`cmd` sub-directory, you can find two programs: `sblookup` and `sbserver`. The
|
||||
`sbserver` program creates a proxy local server to check URLs and a URL
|
||||
redirector to redirect users to a warning page for unsafe URLs. The `sblookup`
|
||||
program is a command line service that can also be used to check URLs.
|
||||
|
||||
This **README.md** is a quickstart guide on how to build, deploy, and use the
|
||||
`safebrowsing` Go package. It can be used out-of-the-box. The GoDoc and API
|
||||
documentation provide more details on fine tuning the parameters if desired.
|
||||
|
||||
|
||||
# Setup
|
||||
|
||||
To use the `safebrowsing` Go package you must obtain an *API key* from the
|
||||
[Google Developer Console](https://console.developers.google.com/). For more
|
||||
information, see the *Get Started* section of the Google Safe Browsing APIs (v4)
|
||||
documentation.
|
||||
|
||||
|
||||
# How to Build
|
||||
|
||||
To download and install from the source, run the following command:
|
||||
|
||||
```
|
||||
go get github.com/google/safebrowsing
|
||||
```
|
||||
|
||||
The programs below execute from your `$GOPATH/bin` folder.
|
||||
Add that to your `$PATH` for convenience:
|
||||
|
||||
```
|
||||
export PATH=$PATH:$GOPATH/bin
|
||||
```
|
||||
|
||||
|
||||
# Proxy Server
|
||||
|
||||
The `sbserver` server binary runs a Safe Browsing API lookup proxy that allows
|
||||
users to check URLs via a simple JSON API. The server also runs an URL
|
||||
redirector to show an interstitial for anything marked unsafe. The interstitial
|
||||
shows warnings recommended by Safe Browsing.
|
||||
|
||||
1. Once the Go environment is setup, run the following command with your API
|
||||
key:
|
||||
|
||||
```
|
||||
go get github.com/google/safebrowsing/cmd/sbserver
|
||||
sbserver -apikey $APIKEY
|
||||
```
|
||||
|
||||
With the default settings this will start a local server at **127.0.0.1:8080**.
|
||||
|
||||
2. Load the proxy server redirector in any web browser. Try these URLs:
|
||||
|
||||
```
|
||||
127.0.0.1:8080/r?url=http://testsafebrowsing.appspot.com/apiv4/ANY_PLATFORM/MALWARE/URL/
|
||||
127.0.0.1:8080/r?url=http://testsafebrowsing.appspot.com/apiv4/ANY_PLATFORM/SOCIAL_ENGINEERING/URL/
|
||||
127.0.0.1:8080/r?url=http://testsafebrowsing.appspot.com/apiv4/ANY_PLATFORM/UNWANTED_SOFTWARE/URL/
|
||||
127.0.0.1:8080/r?url=http://www.google.com/
|
||||
```
|
||||
|
||||
3. To use the local proxy server to check a URL, send a POST request with the
|
||||
following JSON body:
|
||||
|
||||
```json
|
||||
{
|
||||
"threatInfo": {
|
||||
"threatEntries": [
|
||||
{"url": "google.com"},
|
||||
{"url": "http://testsafebrowsing.appspot.com/apiv4/ANY_PLATFORM/MALWARE/URL/"}
|
||||
]
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
Refer to the [Google Safe Browsing APIs (v4)]
|
||||
(https://developers.google.com/safe-browsing/v4/)
|
||||
for the format of the JSON request.
|
||||
|
||||
|
||||
# Command-Line Lookup
|
||||
|
||||
The `sblookup` command-line binary is another example of how the Go Safe
|
||||
Browsing library can be used to protect users from unsafe URLs. This
|
||||
command-line tool filters unsafe URLs piped via STDIN. Example usage:
|
||||
|
||||
```
|
||||
$ go get github.com/google/safebrowsing/cmd/sblookup
|
||||
$ echo "http://testsafebrowsing.appspot.com/apiv4/ANY_PLATFORM/MALWARE/URL/" | sblookup -apikey=$APIKEY
|
||||
Unsafe URL found: http://testsafebrowsing.appspot.com/apiv4/ANY_PLATFORM/MALWARE/URL/ [{testsafebrowsing.appspot.com/apiv4/ANY_PLATFORM/MALWARE/URL/ {MALWARE ANY_PLATFORM URL}}]
|
||||
```
|
||||
|
||||
|
||||
# Safe Browsing System Test
|
||||
To perform an end-to-end test on the package with the Safe Browsing backend,
|
||||
run the following command:
|
||||
|
||||
```
|
||||
go test github.com/google/safebrowsing -v -run TestSafeBrowser -apikey $APIKEY
|
||||
```
|
|
@ -0,0 +1,104 @@
|
|||
// Copyright 2016 Google Inc. All Rights Reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package safebrowsing
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"strings"
|
||||
|
||||
pb "github.com/google/safebrowsing/internal/safebrowsing_proto"
|
||||
|
||||
"github.com/golang/protobuf/proto"
|
||||
)
|
||||
|
||||
const (
|
||||
findHashPath = "/v4/fullHashes:find"
|
||||
fetchUpdatePath = "/v4/threatListUpdates:fetch"
|
||||
)
|
||||
|
||||
// The api interface specifies wrappers around the Safe Browsing API.
|
||||
type api interface {
|
||||
ListUpdate(req *pb.FetchThreatListUpdatesRequest) (*pb.FetchThreatListUpdatesResponse, error)
|
||||
HashLookup(req *pb.FindFullHashesRequest) (*pb.FindFullHashesResponse, error)
|
||||
}
|
||||
|
||||
// netAPI is an api object that talks to the server over HTTP.
|
||||
type netAPI struct {
|
||||
client http.Client
|
||||
url *url.URL
|
||||
}
|
||||
|
||||
// newNetAPI creates a new netAPI object pointed at the provided root URL.
|
||||
// For every request, it will use the provided API key.
|
||||
// If the protocol is not specified in root, then this defaults to using HTTPS.
|
||||
func newNetAPI(root string, key string) (*netAPI, error) {
|
||||
if !strings.Contains(root, "://") {
|
||||
root = "https://" + root
|
||||
}
|
||||
u, err := url.Parse(root)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
q := u.Query()
|
||||
q.Set("key", key)
|
||||
q.Set("alt", "proto")
|
||||
u.RawQuery = q.Encode()
|
||||
return &netAPI{url: u}, nil
|
||||
}
|
||||
|
||||
// doRequests performs a POST to requestPath. It uses the marshaled form of req
|
||||
// as the request body payload, and automatically unmarshals the response body
|
||||
// payload as resp.
|
||||
func (a *netAPI) doRequest(requestPath string, req proto.Message, resp proto.Message) error {
|
||||
p, err := proto.Marshal(req)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
u := *a.url // Make a copy of URL
|
||||
u.Path = requestPath
|
||||
httpReq, err := http.NewRequest("POST", u.String(), bytes.NewReader(p))
|
||||
httpReq.Header.Add("Content-Type", "application/x-protobuf")
|
||||
httpResp, err := a.client.Do(httpReq)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer httpResp.Body.Close()
|
||||
if httpResp.StatusCode != 200 {
|
||||
return fmt.Errorf("safebrowsing: unexpected server response code: %d", httpResp.StatusCode)
|
||||
}
|
||||
body, err := ioutil.ReadAll(httpResp.Body)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return proto.Unmarshal(body, resp)
|
||||
}
|
||||
|
||||
// ListUpdate issues a FetchThreatListUpdates API call and returns the response.
|
||||
func (a *netAPI) ListUpdate(req *pb.FetchThreatListUpdatesRequest) (*pb.FetchThreatListUpdatesResponse, error) {
|
||||
resp := new(pb.FetchThreatListUpdatesResponse)
|
||||
return resp, a.doRequest(fetchUpdatePath, req, resp)
|
||||
}
|
||||
|
||||
// HashLookup issues a FindFullHashes API call and returns the response.
|
||||
func (a *netAPI) HashLookup(req *pb.FindFullHashesRequest) (*pb.FindFullHashesResponse, error) {
|
||||
resp := new(pb.FindFullHashesResponse)
|
||||
return resp, a.doRequest(findHashPath, req, resp)
|
||||
}
|
|
@ -0,0 +1,179 @@
|
|||
// Copyright 2016 Google Inc. All Rights Reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package safebrowsing
|
||||
|
||||
import (
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
pb "github.com/google/safebrowsing/internal/safebrowsing_proto"
|
||||
)
|
||||
|
||||
type cacheResult int
|
||||
|
||||
const (
|
||||
// positiveCacheHit indicates that the given hash matched an entry in the cache.
|
||||
// The caller must consider the match a threat and not contact the server.
|
||||
positiveCacheHit cacheResult = iota
|
||||
|
||||
// negativeCacheHit indicates that the given hash did not match any entries
|
||||
// in the cache but its prefix matches the negative cache. The caller must
|
||||
// consider the given hash to be safe and not contact the server.
|
||||
negativeCacheHit
|
||||
|
||||
// cacheMiss indicates that the given hash did not match any entry
|
||||
// in the cache. The caller should make a follow-up query to the server.
|
||||
cacheMiss
|
||||
)
|
||||
|
||||
// cache caches results from API calls to FindFullHashesRequest to reduce
|
||||
// network calls for recently requested items. Since the global blacklist is
|
||||
// constantly changing, the Safe Browsing API defines TTLs for how long entries
|
||||
// can stay alive in the cache.
|
||||
type cache struct {
|
||||
sync.RWMutex
|
||||
|
||||
// pttls maps full hashes and a ThreatDescriptor to a positive time-to-live.
|
||||
// For a given full hash, the known threats are all ThreatDescriptors that
|
||||
// map to valid TTLs (i.e. in the future).
|
||||
pttls map[hashPrefix]map[ThreatDescriptor]time.Time
|
||||
|
||||
// nttls maps partial hashes to a negative time-to-live.
|
||||
// If this is still valid (i.e. in the future), then this indicates that
|
||||
// there are *no* threats under the given partial hash, unless there exist
|
||||
// ThreatDescriptors with a valid positive TTL for that hash.
|
||||
nttls map[hashPrefix]time.Time
|
||||
|
||||
now func() time.Time
|
||||
}
|
||||
|
||||
// Update updates the cache according to the request that was made to the server
|
||||
// and the response given back.
|
||||
func (c *cache) Update(req *pb.FindFullHashesRequest, resp *pb.FindFullHashesResponse) {
|
||||
c.Lock()
|
||||
defer c.Unlock()
|
||||
now := c.now()
|
||||
|
||||
if c.pttls == nil {
|
||||
c.pttls = make(map[hashPrefix]map[ThreatDescriptor]time.Time)
|
||||
c.nttls = make(map[hashPrefix]time.Time)
|
||||
}
|
||||
|
||||
// Insert each threat match into the cache by full hash.
|
||||
for _, tm := range resp.GetMatches() {
|
||||
fullHash := hashPrefix(tm.GetThreat().Hash)
|
||||
if !fullHash.IsFull() {
|
||||
continue
|
||||
}
|
||||
if c.pttls[fullHash] == nil {
|
||||
c.pttls[fullHash] = make(map[ThreatDescriptor]time.Time)
|
||||
}
|
||||
dur := time.Duration(tm.GetCacheDuration().Seconds) * time.Second
|
||||
td := ThreatDescriptor{
|
||||
ThreatType: ThreatType(tm.ThreatType),
|
||||
PlatformType: PlatformType(tm.PlatformType),
|
||||
ThreatEntryType: ThreatEntryType(tm.ThreatEntryType),
|
||||
}
|
||||
c.pttls[fullHash][td] = now.Add(dur)
|
||||
}
|
||||
|
||||
// Insert negative TTLs for partial hashes.
|
||||
if resp.GetNegativeCacheDuration() != nil {
|
||||
dur := time.Duration(resp.GetNegativeCacheDuration().Seconds) * time.Second
|
||||
nttl := now.Add(dur)
|
||||
for _, te := range req.GetThreatInfo().GetThreatEntries() {
|
||||
partialHash := hashPrefix(te.Hash)
|
||||
c.nttls[partialHash] = nttl
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Lookup looks up a full hash and returns a set of ThreatDescriptors and the
|
||||
// validity of the result.
|
||||
func (c *cache) Lookup(hash hashPrefix) (map[ThreatDescriptor]bool, cacheResult) {
|
||||
if !hash.IsFull() {
|
||||
panic("hash is not full")
|
||||
}
|
||||
|
||||
c.Lock()
|
||||
defer c.Unlock()
|
||||
now := c.now()
|
||||
|
||||
// Check all entries to see if there *is* a threat.
|
||||
threats := make(map[ThreatDescriptor]bool)
|
||||
threatTTLs := c.pttls[hash]
|
||||
for td, pttl := range threatTTLs {
|
||||
if pttl.After(now) {
|
||||
threats[td] = true
|
||||
} else {
|
||||
// The PTTL has expired, we should ask the server what's going on.
|
||||
return nil, cacheMiss
|
||||
}
|
||||
}
|
||||
if len(threats) > 0 {
|
||||
// So long as there are valid threats, we report them. The positive TTL
|
||||
// takes precedence over the negative TTL at the partial hash level.
|
||||
return threats, positiveCacheHit
|
||||
}
|
||||
|
||||
// Check the negative TTLs to see if there are *no* threats.
|
||||
for i := minHashPrefixLength; i <= maxHashPrefixLength; i++ {
|
||||
if nttl, ok := c.nttls[hash[:i]]; ok {
|
||||
if nttl.After(now) {
|
||||
return nil, negativeCacheHit
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// The cache has no information; it is a *possible* threat.
|
||||
return nil, cacheMiss
|
||||
}
|
||||
|
||||
// Purge purges all expired entries from the cache.
|
||||
func (c *cache) Purge() {
|
||||
c.Lock()
|
||||
defer c.Unlock()
|
||||
now := c.now()
|
||||
|
||||
// Nuke all threat entries based on their positive TTL.
|
||||
for fullHash, threatTTLs := range c.pttls {
|
||||
for td, pttl := range threatTTLs {
|
||||
if now.After(pttl) {
|
||||
del := true
|
||||
for i := minHashPrefixLength; i <= maxHashPrefixLength; i++ {
|
||||
if nttl, ok := c.nttls[fullHash[:i]]; ok {
|
||||
if nttl.After(pttl) {
|
||||
del = false
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
if del {
|
||||
delete(threatTTLs, td)
|
||||
}
|
||||
}
|
||||
}
|
||||
if len(threatTTLs) == 0 {
|
||||
delete(c.pttls, fullHash)
|
||||
}
|
||||
}
|
||||
|
||||
// Nuke all partial hashes based on their negative TTL.
|
||||
for partialHash, nttl := range c.nttls {
|
||||
if now.After(nttl) {
|
||||
delete(c.nttls, partialHash)
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,436 @@
|
|||
// Copyright 2016 Google Inc. All Rights Reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package safebrowsing
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"compress/gzip"
|
||||
"encoding/gob"
|
||||
"errors"
|
||||
"log"
|
||||
"os"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
pb "github.com/google/safebrowsing/internal/safebrowsing_proto"
|
||||
)
|
||||
|
||||
// jitter is the maximum amount of time that we expect an API list update to
|
||||
// actually take. We add this time to the update period time to give some
|
||||
// leeway before declaring the database as stale.
|
||||
const jitter = 30 * time.Second
|
||||
|
||||
// database tracks the state of the threat lists published by the Safe Browsing
|
||||
// API. Since the global blacklist is constantly changing, the contents of the
|
||||
// database needs to be periodically synced with the Safe Browsing servers in
|
||||
// order to provide protection for the latest threats.
|
||||
//
|
||||
// The process for updating the database is as follows:
|
||||
// * At startup, if a database file is provided, then load it. If loaded
|
||||
// properly (not corrupted and not stale), then set tfu as the contents.
|
||||
// Otherwise, pull a new threat list from the Safe Browsing API.
|
||||
// * Periodically, synchronize the database with the Safe Browsing API.
|
||||
// This uses the State fields to update only parts of the threat list that have
|
||||
// changed since the last sync.
|
||||
// * Anytime tfu is updated, generate a new tfl.
|
||||
//
|
||||
// The process for querying the database is as follows:
|
||||
// * Check if the requested full hash matches any partial hash in tfl.
|
||||
// If a match is found, return a set of ThreatDescriptors with a partial match.
|
||||
type database struct {
|
||||
config *Config
|
||||
|
||||
// threatsForUpdate maps ThreatDescriptors to lists of partial hashes.
|
||||
// This data structure is in a format that is easily updated by the API.
|
||||
// It is also the form that is written to disk.
|
||||
tfu threatsForUpdate
|
||||
mu sync.Mutex // Protects tfu
|
||||
|
||||
// threatsForLookup maps ThreatDescriptors to sets of partial hashes.
|
||||
// This data structure is in a format that is easily queried.
|
||||
tfl threatsForLookup
|
||||
ml sync.RWMutex // Protects tfl, err, and last
|
||||
|
||||
err error // Last error encountered
|
||||
last time.Time // Last time the threat list were synced
|
||||
|
||||
log *log.Logger
|
||||
}
|
||||
|
||||
type threatsForUpdate map[ThreatDescriptor]partialHashes
|
||||
type partialHashes struct {
|
||||
// Since the Hashes field is only needed when storing to disk and when
|
||||
// updating, this field is cleared except for when it is in use.
|
||||
// This is done to reduce memory usage as the contents of this can be
|
||||
// regenerated from the tfl.
|
||||
Hashes hashPrefixes
|
||||
|
||||
SHA256 []byte // The SHA256 over Hashes
|
||||
State []byte // Arbitrary binary blob to synchronize state with API
|
||||
}
|
||||
|
||||
type threatsForLookup map[ThreatDescriptor]hashSet
|
||||
|
||||
// databaseFormat is a light struct used only for gob encoding and decoding.
|
||||
// As written to disk, the format of the database file is basically the gzip
|
||||
// compressed version of the gob encoding of databaseFormat.
|
||||
type databaseFormat struct {
|
||||
Table threatsForUpdate
|
||||
Time time.Time
|
||||
}
|
||||
|
||||
// Init initializes the database from the specified file in config.DBPath.
|
||||
// It reports true if the database was successfully loaded.
|
||||
func (db *database) Init(config *Config, logger *log.Logger) bool {
|
||||
db.config = config
|
||||
db.log = logger
|
||||
if db.config.DBPath == "" {
|
||||
db.log.Printf("no database file specified")
|
||||
db.setError(errStale)
|
||||
return false
|
||||
}
|
||||
dbf, err := loadDatabase(db.config.DBPath)
|
||||
if err != nil {
|
||||
db.log.Printf("load failure: %v", err)
|
||||
db.setError(err)
|
||||
return false
|
||||
}
|
||||
|
||||
// Validate that the database threat list stored on disk is at least a
|
||||
// superset of the specified configuration.
|
||||
if db.config.now().Sub(dbf.Time) > (db.config.UpdatePeriod + jitter) {
|
||||
db.log.Printf("database loaded is stale")
|
||||
db.setError(errStale)
|
||||
return false
|
||||
}
|
||||
tfuNew := make(threatsForUpdate)
|
||||
for _, td := range db.config.ThreatLists {
|
||||
if row, ok := dbf.Table[td]; ok {
|
||||
tfuNew[td] = row
|
||||
} else {
|
||||
db.log.Printf("database configuration mismatch")
|
||||
db.setError(errStale)
|
||||
return false
|
||||
}
|
||||
}
|
||||
db.tfu = tfuNew
|
||||
db.generateThreatsForLookups(dbf.Time)
|
||||
return true
|
||||
}
|
||||
|
||||
// Status reports the health of the database. If in a faulted state, the db
|
||||
// may repair itself on the next Update.
|
||||
func (db *database) Status() error {
|
||||
db.ml.RLock()
|
||||
defer db.ml.RUnlock()
|
||||
|
||||
if db.err != nil {
|
||||
return db.err
|
||||
}
|
||||
if db.config.now().Sub(db.last) > (db.config.UpdatePeriod + jitter) {
|
||||
return errStale
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// Update synchronizes the local threat lists with those maintained by the
|
||||
// global Safe Browsing API servers. If the update is successful, Status should
|
||||
// report a nil error.
|
||||
func (db *database) Update(api api) {
|
||||
db.mu.Lock()
|
||||
defer db.mu.Unlock()
|
||||
|
||||
// Construct the request.
|
||||
var numTypes int
|
||||
var s []*pb.FetchThreatListUpdatesRequest_ListUpdateRequest
|
||||
for _, td := range db.config.ThreatLists {
|
||||
var state []byte
|
||||
if row, ok := db.tfu[td]; ok {
|
||||
state = row.State
|
||||
}
|
||||
|
||||
s = append(s, &pb.FetchThreatListUpdatesRequest_ListUpdateRequest{
|
||||
ThreatType: pb.ThreatType(td.ThreatType),
|
||||
PlatformType: pb.PlatformType(td.PlatformType),
|
||||
ThreatEntryType: pb.ThreatEntryType(td.ThreatEntryType),
|
||||
Constraints: &pb.FetchThreatListUpdatesRequest_ListUpdateRequest_Constraints{
|
||||
SupportedCompressions: db.config.compressionTypes},
|
||||
State: state,
|
||||
})
|
||||
numTypes++
|
||||
}
|
||||
req := &pb.FetchThreatListUpdatesRequest{
|
||||
Client: &pb.ClientInfo{
|
||||
ClientId: db.config.ID,
|
||||
ClientVersion: db.config.Version,
|
||||
},
|
||||
ListUpdateRequests: s,
|
||||
}
|
||||
|
||||
// Query the API for the threat list and update the database.
|
||||
last := db.config.now()
|
||||
resp, err := api.ListUpdate(req)
|
||||
if err != nil {
|
||||
db.log.Printf("ListUpdate failure: %v", err)
|
||||
db.setError(err)
|
||||
return
|
||||
}
|
||||
if len(resp.ListUpdateResponses) != numTypes {
|
||||
db.log.Printf("invalid server response: got %d, want %d threat lists",
|
||||
len(resp.ListUpdateResponses), numTypes)
|
||||
db.setError(errors.New("safebrowsing: threat list count mismatch"))
|
||||
return
|
||||
}
|
||||
|
||||
// Update the threat database with the response.
|
||||
db.generateThreatsForUpdate()
|
||||
if err := db.tfu.update(resp); err != nil {
|
||||
db.log.Printf("update failure: %v", err)
|
||||
db.setError(err)
|
||||
return
|
||||
}
|
||||
dbf := databaseFormat{make(threatsForUpdate), last}
|
||||
for td, phs := range db.tfu {
|
||||
// Copy of partialHashes before generateThreatsForLookups clobbers it.
|
||||
dbf.Table[td] = phs
|
||||
}
|
||||
db.generateThreatsForLookups(last)
|
||||
|
||||
// Regenerate the database and store it.
|
||||
if db.config.DBPath != "" {
|
||||
// Semantically, we ignore save errors, but we do log them.
|
||||
if err := saveDatabase(db.config.DBPath, dbf); err != nil {
|
||||
db.log.Printf("save failure: %v", err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Lookup looks up the full hash in the threat list and returns a partial
|
||||
// hash and a set of ThreatDescriptors that may match the full hash.
|
||||
func (db *database) Lookup(hash hashPrefix) (h hashPrefix, tds []ThreatDescriptor) {
|
||||
if !hash.IsFull() {
|
||||
panic("hash is not full")
|
||||
}
|
||||
|
||||
db.ml.RLock()
|
||||
for td, hs := range db.tfl {
|
||||
if n := hs.Lookup(hash); n > 0 {
|
||||
h = hash[:n]
|
||||
tds = append(tds, td)
|
||||
}
|
||||
}
|
||||
db.ml.RUnlock()
|
||||
return h, tds
|
||||
}
|
||||
|
||||
// setError clears the database state and sets the last error to be err.
|
||||
//
|
||||
// This assumes that the db.mu lock is already held.
|
||||
func (db *database) setError(err error) {
|
||||
db.tfu = nil
|
||||
|
||||
db.ml.Lock()
|
||||
db.tfl, db.err, db.last = nil, err, time.Time{}
|
||||
db.ml.Unlock()
|
||||
}
|
||||
|
||||
// generateThreatsForUpdate regenerates the threatsForUpdate hashes from
|
||||
// the threatsForLookup. We do this to avoid holding onto the hash lists for
|
||||
// a long time, needlessly occupying lots of memory.
|
||||
//
|
||||
// This assumes that the db.mu lock is already held.
|
||||
func (db *database) generateThreatsForUpdate() {
|
||||
if db.tfu == nil {
|
||||
db.tfu = make(threatsForUpdate)
|
||||
}
|
||||
|
||||
db.ml.RLock()
|
||||
for td, hs := range db.tfl {
|
||||
phs := db.tfu[td]
|
||||
phs.Hashes = hs.Export()
|
||||
db.tfu[td] = phs
|
||||
}
|
||||
db.ml.RUnlock()
|
||||
}
|
||||
|
||||
// generateThreatsForLookups regenerates the threatsForLookup data structure
|
||||
// from the threatsForUpdate data structure and stores the last timestamp.
|
||||
// Since the hashes are effectively stored as a set inside the threatsForLookup,
|
||||
// we clear out the hashes slice in threatsForUpdate so that it can be GCed.
|
||||
//
|
||||
// This assumes that the db.mu lock is already held.
|
||||
func (db *database) generateThreatsForLookups(last time.Time) {
|
||||
tfl := make(threatsForLookup)
|
||||
for td, phs := range db.tfu {
|
||||
var hs hashSet
|
||||
hs.Import(phs.Hashes)
|
||||
tfl[td] = hs
|
||||
|
||||
phs.Hashes = nil // Clear hashes to keep memory usage low
|
||||
db.tfu[td] = phs
|
||||
}
|
||||
|
||||
db.ml.Lock()
|
||||
wasBad := db.err != nil
|
||||
db.tfl, db.err, db.last = tfl, nil, last
|
||||
db.ml.Unlock()
|
||||
|
||||
if wasBad {
|
||||
db.log.Printf("database is now healthy")
|
||||
}
|
||||
}
|
||||
|
||||
// saveDatabase saves the database threat list to a file.
|
||||
func saveDatabase(path string, db databaseFormat) (err error) {
|
||||
var file *os.File
|
||||
file, err = os.Create(path)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer func() {
|
||||
if cerr := file.Close(); err == nil {
|
||||
err = cerr
|
||||
}
|
||||
}()
|
||||
|
||||
gz, err := gzip.NewWriterLevel(file, gzip.BestCompression)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer func() {
|
||||
if zerr := gz.Close(); err == nil {
|
||||
err = zerr
|
||||
}
|
||||
}()
|
||||
|
||||
encoder := gob.NewEncoder(gz)
|
||||
if err = encoder.Encode(db); err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// loadDatabase loads the database state from a file.
|
||||
func loadDatabase(path string) (db databaseFormat, err error) {
|
||||
var file *os.File
|
||||
file, err = os.Open(path)
|
||||
if err != nil {
|
||||
return db, err
|
||||
}
|
||||
defer func() {
|
||||
if cerr := file.Close(); err == nil {
|
||||
err = cerr
|
||||
}
|
||||
}()
|
||||
|
||||
gz, err := gzip.NewReader(file)
|
||||
if err != nil {
|
||||
return db, err
|
||||
}
|
||||
defer func() {
|
||||
if zerr := gz.Close(); err == nil {
|
||||
err = zerr
|
||||
}
|
||||
}()
|
||||
|
||||
decoder := gob.NewDecoder(gz)
|
||||
if err = decoder.Decode(&db); err != nil {
|
||||
return db, err
|
||||
}
|
||||
for _, dv := range db.Table {
|
||||
if !bytes.Equal(dv.SHA256, dv.Hashes.SHA256()) {
|
||||
return db, errors.New("safebrowsing: threat list SHA256 mismatch")
|
||||
}
|
||||
}
|
||||
return db, nil
|
||||
}
|
||||
|
||||
// update updates the threat list according to the API response.
|
||||
func (tfu threatsForUpdate) update(resp *pb.FetchThreatListUpdatesResponse) error {
|
||||
// For each update response do the removes and adds
|
||||
for _, m := range resp.GetListUpdateResponses() {
|
||||
td := ThreatDescriptor{
|
||||
PlatformType: PlatformType(m.PlatformType),
|
||||
ThreatType: ThreatType(m.ThreatType),
|
||||
ThreatEntryType: ThreatEntryType(m.ThreatEntryType),
|
||||
}
|
||||
|
||||
phs, ok := tfu[td]
|
||||
switch m.ResponseType {
|
||||
case pb.FetchThreatListUpdatesResponse_ListUpdateResponse_PARTIAL_UPDATE:
|
||||
if !ok {
|
||||
return errors.New("safebrowsing: partial update received for non-existent key")
|
||||
}
|
||||
case pb.FetchThreatListUpdatesResponse_ListUpdateResponse_FULL_UPDATE:
|
||||
if len(m.Removals) > 0 {
|
||||
return errors.New("safebrowsing: indices to be removed included in a full update")
|
||||
}
|
||||
phs = partialHashes{}
|
||||
default:
|
||||
return errors.New("safebrowsing: unknown response type")
|
||||
}
|
||||
|
||||
// Hashes must be sorted for removal logic to work properly.
|
||||
phs.Hashes.Sort()
|
||||
|
||||
for _, removal := range m.Removals {
|
||||
idxs, err := decodeIndices(removal)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
for _, i := range idxs {
|
||||
if i < 0 || i >= int32(len(phs.Hashes)) {
|
||||
return errors.New("safebrowsing: invalid removal index")
|
||||
}
|
||||
phs.Hashes[i] = ""
|
||||
}
|
||||
}
|
||||
|
||||
// If any removal was performed, compact the list of hashes.
|
||||
if len(m.Removals) > 0 {
|
||||
compactHashes := phs.Hashes[:0]
|
||||
for _, h := range phs.Hashes {
|
||||
if h != "" {
|
||||
compactHashes = append(compactHashes, h)
|
||||
}
|
||||
}
|
||||
phs.Hashes = compactHashes
|
||||
}
|
||||
|
||||
for _, addition := range m.Additions {
|
||||
hashes, err := decodeHashes(addition)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
phs.Hashes = append(phs.Hashes, hashes...)
|
||||
}
|
||||
|
||||
// Hashes must be sorted for SHA256 checksum to be correct.
|
||||
phs.Hashes.Sort()
|
||||
if err := phs.Hashes.Validate(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
phs.SHA256 = m.GetChecksum().Sha256
|
||||
if !bytes.Equal(phs.SHA256, phs.Hashes.SHA256()) {
|
||||
return errors.New("safebrowsing: threat list SHA256 mismatch")
|
||||
}
|
||||
|
||||
phs.State = m.NewClientState
|
||||
tfu[td] = phs
|
||||
}
|
||||
return nil
|
||||
}
|
|
@ -0,0 +1,19 @@
|
|||
#!/bin/bash
|
||||
# Copyright 2016 Google Inc. All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
|
||||
|
||||
$DIR/cmd/sbserver/generate.sh
|
||||
$DIR/internal/safebrowsing_proto/generate.sh
|
|
@ -0,0 +1,334 @@
|
|||
// Copyright 2016 Google Inc. All Rights Reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package safebrowsing
|
||||
|
||||
import (
|
||||
"crypto/sha256"
|
||||
"encoding/binary"
|
||||
"errors"
|
||||
"io"
|
||||
"sort"
|
||||
"strings"
|
||||
|
||||
pb "github.com/google/safebrowsing/internal/safebrowsing_proto"
|
||||
)
|
||||
|
||||
const (
|
||||
minHashPrefixLength = 4
|
||||
maxHashPrefixLength = sha256.Size
|
||||
)
|
||||
|
||||
// hashPrefix represents a SHA256 hash. It may either be
|
||||
// be full, where len(Hash) == maxHashPrefixLength, or
|
||||
// be partial, where len(Hash) >= minHashPrefixLength.
|
||||
type hashPrefix string
|
||||
|
||||
// hashFromPattern returns a full hash for the given URL pattern.
|
||||
func hashFromPattern(pattern string) hashPrefix {
|
||||
hash := sha256.New()
|
||||
hash.Write([]byte(pattern))
|
||||
return hashPrefix(hash.Sum(nil))
|
||||
}
|
||||
|
||||
// HasPrefix reports whether other is a prefix of h.
|
||||
func (h hashPrefix) HasPrefix(other hashPrefix) bool {
|
||||
return strings.HasPrefix(string(h), string(other))
|
||||
}
|
||||
|
||||
// IsFull reports whether the hash is a full SHA256 hash.
|
||||
func (h hashPrefix) IsFull() bool {
|
||||
return len(h) == maxHashPrefixLength
|
||||
}
|
||||
|
||||
// IsValid reports whether the hash is a valid partial or full hash.
|
||||
func (h hashPrefix) IsValid() bool {
|
||||
return len(h) >= minHashPrefixLength && len(h) <= maxHashPrefixLength
|
||||
}
|
||||
|
||||
type hashPrefixes []hashPrefix
|
||||
|
||||
func (p hashPrefixes) Len() int { return len(p) }
|
||||
func (p hashPrefixes) Less(i, j int) bool { return p[i] < p[j] }
|
||||
func (p hashPrefixes) Swap(i, j int) { p[i], p[j] = p[j], p[i] }
|
||||
func (p hashPrefixes) Sort() { sort.Sort(p) }
|
||||
|
||||
// Validate checks that the list of hash prefixes is valid. It checks the
|
||||
// following parameters:
|
||||
// * That each hash prefix is valid; that is, it has a length within
|
||||
// minHashPrefixLength and maxHashPrefixLength.
|
||||
// * That the list of prefixes is sorted.
|
||||
// * That none of the hashes are prefixes of each other.
|
||||
func (p hashPrefixes) Validate() error {
|
||||
var hp hashPrefix // Previous hash
|
||||
for _, h := range p {
|
||||
switch {
|
||||
case !h.IsValid():
|
||||
return errors.New("safebrowsing: invalid hash")
|
||||
case hp >= h:
|
||||
return errors.New("safebrowsing: unsorted hash list")
|
||||
case h.HasPrefix(hp) && hp != "":
|
||||
return errors.New("safebrowsing: non-unique hash prefix")
|
||||
}
|
||||
hp = h
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (p hashPrefixes) SHA256() []byte {
|
||||
hash := sha256.New()
|
||||
for _, b := range p {
|
||||
hash.Write([]byte(b))
|
||||
}
|
||||
return hash.Sum(nil)
|
||||
}
|
||||
|
||||
// hashSet is a set of hash prefixes optimized for the fact that most hashes
|
||||
// are only 4 bytes in length.
|
||||
type hashSet struct {
|
||||
h4 map[[minHashPrefixLength]byte]uint8 // Value is maximum length prefix
|
||||
hx map[hashPrefix]struct{}
|
||||
n int
|
||||
}
|
||||
|
||||
func byte4(h hashPrefix) (b [4]byte) {
|
||||
b[0], b[1], b[2], b[3] = h[0], h[1], h[2], h[3]
|
||||
return b
|
||||
}
|
||||
|
||||
func (hs *hashSet) Len() int { return hs.n }
|
||||
|
||||
func (hs *hashSet) Import(phs hashPrefixes) {
|
||||
hs.h4 = make(map[[minHashPrefixLength]byte]uint8, len(phs))
|
||||
hs.hx = make(map[hashPrefix]struct{})
|
||||
hs.n = len(phs)
|
||||
for _, h := range phs {
|
||||
n := hs.h4[byte4(h)]
|
||||
if len(h) > int(n) {
|
||||
hs.h4[byte4(h)] = uint8(len(h))
|
||||
}
|
||||
if len(h) > 4 {
|
||||
hs.hx[h] = struct{}{}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (hs *hashSet) Export() hashPrefixes {
|
||||
phs := make(hashPrefixes, 0, hs.n)
|
||||
for h, n := range hs.h4 {
|
||||
if n == minHashPrefixLength {
|
||||
phs = append(phs, hashPrefix(h[:]))
|
||||
}
|
||||
}
|
||||
for h := range hs.hx {
|
||||
phs = append(phs, h)
|
||||
}
|
||||
return phs
|
||||
}
|
||||
|
||||
func (hs *hashSet) Lookup(h hashPrefix) int {
|
||||
n := int(hs.h4[byte4(h)])
|
||||
if n <= minHashPrefixLength {
|
||||
return n
|
||||
}
|
||||
if n > len(h) {
|
||||
n = len(h)
|
||||
}
|
||||
for i := minHashPrefixLength; i <= n; i++ {
|
||||
if _, ok := hs.hx[h[:i]]; ok {
|
||||
return i
|
||||
}
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
// decodeHashes takes a ThreatEntrySet and returns a list of hashes that should
|
||||
// be added to the local database.
|
||||
func decodeHashes(input *pb.ThreatEntrySet) ([]hashPrefix, error) {
|
||||
switch input.CompressionType {
|
||||
case pb.CompressionType_RAW:
|
||||
raw := input.GetRawHashes()
|
||||
if raw == nil {
|
||||
return nil, errors.New("safebrowsing: nil raw hashes")
|
||||
}
|
||||
if raw.PrefixSize < minHashPrefixLength || raw.PrefixSize > maxHashPrefixLength {
|
||||
return nil, errors.New("safebrowsing: invalid hash prefix length")
|
||||
}
|
||||
if len(raw.RawHashes)%int(raw.PrefixSize) != 0 {
|
||||
return nil, errors.New("safebrowsing: invalid raw hashes")
|
||||
}
|
||||
hashes := make([]hashPrefix, len(raw.RawHashes)/int(raw.PrefixSize))
|
||||
for i := range hashes {
|
||||
hashes[i] = hashPrefix(raw.RawHashes[:raw.PrefixSize])
|
||||
raw.RawHashes = raw.RawHashes[raw.PrefixSize:]
|
||||
}
|
||||
return hashes, nil
|
||||
case pb.CompressionType_RICE:
|
||||
values, err := decodeRiceIntegers(input.GetRiceHashes())
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
hashes := make([]hashPrefix, 0, len(values))
|
||||
var buf [4]byte
|
||||
for _, h := range values {
|
||||
binary.LittleEndian.PutUint32(buf[:], h)
|
||||
hashes = append(hashes, hashPrefix(buf[:]))
|
||||
}
|
||||
return hashes, nil
|
||||
default:
|
||||
return nil, errors.New("safebrowsing: invalid compression type")
|
||||
}
|
||||
}
|
||||
|
||||
// decodeIndices takes a ThreatEntrySet for removals returned by the server and
|
||||
// returns a list of indices that the client should remove from its database.
|
||||
func decodeIndices(input *pb.ThreatEntrySet) ([]int32, error) {
|
||||
switch input.CompressionType {
|
||||
case pb.CompressionType_RAW:
|
||||
raw := input.GetRawIndices()
|
||||
if raw == nil {
|
||||
return nil, errors.New("safebrowsing: invalid raw indices")
|
||||
}
|
||||
return raw.Indices, nil
|
||||
case pb.CompressionType_RICE:
|
||||
values, err := decodeRiceIntegers(input.GetRiceIndices())
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
indices := make([]int32, 0, len(values))
|
||||
for _, v := range values {
|
||||
indices = append(indices, int32(v))
|
||||
}
|
||||
return indices, nil
|
||||
default:
|
||||
return nil, errors.New("safebrowsing: invalid compression type")
|
||||
}
|
||||
}
|
||||
|
||||
// decodeRiceIntegers decodes a list of Golomb-Rice encoded integers.
|
||||
func decodeRiceIntegers(rice *pb.RiceDeltaEncoding) ([]uint32, error) {
|
||||
if rice == nil {
|
||||
return nil, errors.New("safebrowsing: missing rice encoded data")
|
||||
}
|
||||
if rice.RiceParameter < 0 || rice.RiceParameter > 32 {
|
||||
return nil, errors.New("safebrowsing: invalid k parameter")
|
||||
}
|
||||
|
||||
values := []uint32{uint32(rice.FirstValue)}
|
||||
br := newBitReader(rice.EncodedData)
|
||||
rd := newRiceDecoder(br, uint32(rice.RiceParameter))
|
||||
for i := 0; i < int(rice.NumEntries); i++ {
|
||||
delta, err := rd.ReadValue()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
values = append(values, values[i]+delta)
|
||||
}
|
||||
|
||||
if br.BitsRemaining() >= 8 {
|
||||
return nil, errors.New("safebrowsing: unconsumed rice encoded data")
|
||||
}
|
||||
return values, nil
|
||||
}
|
||||
|
||||
// riceDecoder implements Golomb-Rice decoding for the Safe Browsing API.
|
||||
//
|
||||
// In a Rice decoder every number n is encoded as q and r where n = (q<<k) + r.
|
||||
// k is a constant and a parameter of the Rice decoder and can have values in
|
||||
// 0..32 inclusive. The values for q and r are encoded in the bit stream using
|
||||
// different encoding schemes. The quotient comes before the remainder.
|
||||
//
|
||||
// The quotient q is encoded in unary coding followed by a 0. E.g., 3 would be
|
||||
// encoded as 1110, 4 as 11110, and 7 as 11111110.
|
||||
//
|
||||
// The remainder r is encoded using k bits as an unsigned integer with the
|
||||
// least-significant bits coming first in the bit stream.
|
||||
//
|
||||
// For more information, see the following:
|
||||
// https://en.wikipedia.org/wiki/Golomb_coding
|
||||
type riceDecoder struct {
|
||||
br *bitReader
|
||||
k uint32 // Golomb-Rice parameter
|
||||
}
|
||||
|
||||
func newRiceDecoder(br *bitReader, k uint32) *riceDecoder {
|
||||
return &riceDecoder{br, k}
|
||||
}
|
||||
|
||||
func (rd *riceDecoder) ReadValue() (uint32, error) {
|
||||
var q uint32
|
||||
for {
|
||||
bit, err := rd.br.ReadBits(1)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
q += bit
|
||||
if bit == 0 {
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
r, err := rd.br.ReadBits(int(rd.k))
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
|
||||
return q<<rd.k + r, nil
|
||||
}
|
||||
|
||||
// The bitReader provides functionality to read bits from a slice of bytes.
|
||||
//
|
||||
// Logically, the bit stream is constructed such that the first byte of buf
|
||||
// represent the first bits in the stream. Within a byte, the least-significant
|
||||
// bits come before the most-significant bits in the bit stream.
|
||||
//
|
||||
// This is the same bit stream format as DEFLATE (RFC 1951).
|
||||
type bitReader struct {
|
||||
buf []byte
|
||||
mask byte
|
||||
}
|
||||
|
||||
func newBitReader(buf []byte) *bitReader {
|
||||
return &bitReader{buf, 0x01}
|
||||
}
|
||||
|
||||
func (br *bitReader) ReadBits(n int) (uint32, error) {
|
||||
if n < 0 || n > 32 {
|
||||
panic("invalid number of bits")
|
||||
}
|
||||
|
||||
var v uint32
|
||||
for i := 0; i < n; i++ {
|
||||
if len(br.buf) == 0 {
|
||||
return v, io.ErrUnexpectedEOF
|
||||
}
|
||||
if br.buf[0]&br.mask > 0 {
|
||||
v |= 1 << uint(i)
|
||||
}
|
||||
br.mask <<= 1
|
||||
if br.mask == 0 {
|
||||
br.buf, br.mask = br.buf[1:], 0x01
|
||||
}
|
||||
}
|
||||
return v, nil
|
||||
}
|
||||
|
||||
// BitsRemaining reports the number of bits left to read.
|
||||
func (br *bitReader) BitsRemaining() int {
|
||||
n := 8 * len(br.buf)
|
||||
for m := br.mask | 1; m != 1; m >>= 1 {
|
||||
n--
|
||||
}
|
||||
return n
|
||||
}
|
29
vendor/github.com/google/safebrowsing/internal/safebrowsing_proto/generate.sh
generated
vendored
Executable file
29
vendor/github.com/google/safebrowsing/internal/safebrowsing_proto/generate.sh
generated
vendored
Executable file
|
@ -0,0 +1,29 @@
|
|||
#!/bin/bash
|
||||
# Copyright 2016 Google Inc. All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
set -e
|
||||
|
||||
# This script builds the generated Go code for the protocol buffers.
|
||||
# The protoc and protoc-gen-go tools must be installed. The recommended versions are:
|
||||
#
|
||||
# github.com/google/protobuf: v3.0.0-beta-3
|
||||
# github.com/golang/protobuf: 7cc19b78d562895b13596ddce7aafb59dd789318
|
||||
for TOOL in protoc protoc-gen-go; do
|
||||
command -v $TOOL >/dev/null 2>&1 || { echo "Could not locate $TOOL. Aborting." >&2; exit 1; }
|
||||
done
|
||||
|
||||
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
|
||||
cd $DIR
|
||||
|
||||
protoc --go_out=. *.proto
|
961
vendor/github.com/google/safebrowsing/internal/safebrowsing_proto/safebrowsing.pb.go
generated
vendored
Normal file
961
vendor/github.com/google/safebrowsing/internal/safebrowsing_proto/safebrowsing.pb.go
generated
vendored
Normal file
|
@ -0,0 +1,961 @@
|
|||
// Code generated by protoc-gen-go.
|
||||
// source: safebrowsing.proto
|
||||
// DO NOT EDIT!
|
||||
|
||||
/*
|
||||
Package safebrowsing_proto is a generated protocol buffer package.
|
||||
|
||||
It is generated from these files:
|
||||
safebrowsing.proto
|
||||
|
||||
It has these top-level messages:
|
||||
ThreatInfo
|
||||
ThreatMatch
|
||||
FindThreatMatchesRequest
|
||||
FindThreatMatchesResponse
|
||||
FetchThreatListUpdatesRequest
|
||||
FetchThreatListUpdatesResponse
|
||||
FindFullHashesRequest
|
||||
FindFullHashesResponse
|
||||
ClientInfo
|
||||
Checksum
|
||||
ThreatEntry
|
||||
ThreatEntrySet
|
||||
RawIndices
|
||||
RawHashes
|
||||
RiceDeltaEncoding
|
||||
ThreatEntryMetadata
|
||||
ThreatListDescriptor
|
||||
ListThreatListsResponse
|
||||
Duration
|
||||
*/
|
||||
package safebrowsing_proto
|
||||
|
||||
import proto "github.com/golang/protobuf/proto"
|
||||
import fmt "fmt"
|
||||
import math "math"
|
||||
|
||||
// Reference imports to suppress errors if they are not otherwise used.
|
||||
var _ = proto.Marshal
|
||||
var _ = fmt.Errorf
|
||||
var _ = math.Inf
|
||||
|
||||
// This is a compile-time assertion to ensure that this generated file
|
||||
// is compatible with the proto package it is being compiled against.
|
||||
const _ = proto.ProtoPackageIsVersion1
|
||||
|
||||
// Types of threats.
|
||||
type ThreatType int32
|
||||
|
||||
const (
|
||||
// Unknown.
|
||||
ThreatType_THREAT_TYPE_UNSPECIFIED ThreatType = 0
|
||||
// Malware threat type.
|
||||
ThreatType_MALWARE ThreatType = 1
|
||||
// Social engineering threat type.
|
||||
ThreatType_SOCIAL_ENGINEERING ThreatType = 2
|
||||
// Unwanted software threat type.
|
||||
ThreatType_UNWANTED_SOFTWARE ThreatType = 3
|
||||
// Potentially harmful application threat type.
|
||||
ThreatType_POTENTIALLY_HARMFUL_APPLICATION ThreatType = 4
|
||||
)
|
||||
|
||||
var ThreatType_name = map[int32]string{
|
||||
0: "THREAT_TYPE_UNSPECIFIED",
|
||||
1: "MALWARE",
|
||||
2: "SOCIAL_ENGINEERING",
|
||||
3: "UNWANTED_SOFTWARE",
|
||||
4: "POTENTIALLY_HARMFUL_APPLICATION",
|
||||
}
|
||||
var ThreatType_value = map[string]int32{
|
||||
"THREAT_TYPE_UNSPECIFIED": 0,
|
||||
"MALWARE": 1,
|
||||
"SOCIAL_ENGINEERING": 2,
|
||||
"UNWANTED_SOFTWARE": 3,
|
||||
"POTENTIALLY_HARMFUL_APPLICATION": 4,
|
||||
}
|
||||
|
||||
func (x ThreatType) String() string {
|
||||
return proto.EnumName(ThreatType_name, int32(x))
|
||||
}
|
||||
func (ThreatType) EnumDescriptor() ([]byte, []int) { return fileDescriptor0, []int{0} }
|
||||
|
||||
// Types of platforms.
|
||||
type PlatformType int32
|
||||
|
||||
const (
|
||||
// Unknown platform.
|
||||
PlatformType_PLATFORM_TYPE_UNSPECIFIED PlatformType = 0
|
||||
// Threat posed to Windows.
|
||||
PlatformType_WINDOWS PlatformType = 1
|
||||
// Threat posed to Linux.
|
||||
PlatformType_LINUX PlatformType = 2
|
||||
// Threat posed to Android.
|
||||
PlatformType_ANDROID PlatformType = 3
|
||||
// Threat posed to OSX.
|
||||
PlatformType_OSX PlatformType = 4
|
||||
// Threat posed to iOS.
|
||||
PlatformType_IOS PlatformType = 5
|
||||
// Threat posed to at least one of the defined platforms.
|
||||
PlatformType_ANY_PLATFORM PlatformType = 6
|
||||
// Threat posed to all defined platforms.
|
||||
PlatformType_ALL_PLATFORMS PlatformType = 7
|
||||
// Threat posed to Chrome.
|
||||
PlatformType_CHROME PlatformType = 8
|
||||
)
|
||||
|
||||
var PlatformType_name = map[int32]string{
|
||||
0: "PLATFORM_TYPE_UNSPECIFIED",
|
||||
1: "WINDOWS",
|
||||
2: "LINUX",
|
||||
3: "ANDROID",
|
||||
4: "OSX",
|
||||
5: "IOS",
|
||||
6: "ANY_PLATFORM",
|
||||
7: "ALL_PLATFORMS",
|
||||
8: "CHROME",
|
||||
}
|
||||
var PlatformType_value = map[string]int32{
|
||||
"PLATFORM_TYPE_UNSPECIFIED": 0,
|
||||
"WINDOWS": 1,
|
||||
"LINUX": 2,
|
||||
"ANDROID": 3,
|
||||
"OSX": 4,
|
||||
"IOS": 5,
|
||||
"ANY_PLATFORM": 6,
|
||||
"ALL_PLATFORMS": 7,
|
||||
"CHROME": 8,
|
||||
}
|
||||
|
||||
func (x PlatformType) String() string {
|
||||
return proto.EnumName(PlatformType_name, int32(x))
|
||||
}
|
||||
func (PlatformType) EnumDescriptor() ([]byte, []int) { return fileDescriptor0, []int{1} }
|
||||
|
||||
// The ways in which threat entry sets can be compressed.
|
||||
type CompressionType int32
|
||||
|
||||
const (
|
||||
// Unknown.
|
||||
CompressionType_COMPRESSION_TYPE_UNSPECIFIED CompressionType = 0
|
||||
// Raw, uncompressed data.
|
||||
CompressionType_RAW CompressionType = 1
|
||||
// Rice-Golomb encoded data.
|
||||
CompressionType_RICE CompressionType = 2
|
||||
)
|
||||
|
||||
var CompressionType_name = map[int32]string{
|
||||
0: "COMPRESSION_TYPE_UNSPECIFIED",
|
||||
1: "RAW",
|
||||
2: "RICE",
|
||||
}
|
||||
var CompressionType_value = map[string]int32{
|
||||
"COMPRESSION_TYPE_UNSPECIFIED": 0,
|
||||
"RAW": 1,
|
||||
"RICE": 2,
|
||||
}
|
||||
|
||||
func (x CompressionType) String() string {
|
||||
return proto.EnumName(CompressionType_name, int32(x))
|
||||
}
|
||||
func (CompressionType) EnumDescriptor() ([]byte, []int) { return fileDescriptor0, []int{2} }
|
||||
|
||||
// Types of entries that pose threats. Threat lists are collections of entries
|
||||
// of a single type.
|
||||
type ThreatEntryType int32
|
||||
|
||||
const (
|
||||
// Unspecified.
|
||||
ThreatEntryType_THREAT_ENTRY_TYPE_UNSPECIFIED ThreatEntryType = 0
|
||||
// A URL.
|
||||
ThreatEntryType_URL ThreatEntryType = 1
|
||||
// An executable program.
|
||||
ThreatEntryType_EXECUTABLE ThreatEntryType = 2
|
||||
// An IP range.
|
||||
ThreatEntryType_IP_RANGE ThreatEntryType = 3
|
||||
)
|
||||
|
||||
var ThreatEntryType_name = map[int32]string{
|
||||
0: "THREAT_ENTRY_TYPE_UNSPECIFIED",
|
||||
1: "URL",
|
||||
2: "EXECUTABLE",
|
||||
3: "IP_RANGE",
|
||||
}
|
||||
var ThreatEntryType_value = map[string]int32{
|
||||
"THREAT_ENTRY_TYPE_UNSPECIFIED": 0,
|
||||
"URL": 1,
|
||||
"EXECUTABLE": 2,
|
||||
"IP_RANGE": 3,
|
||||
}
|
||||
|
||||
func (x ThreatEntryType) String() string {
|
||||
return proto.EnumName(ThreatEntryType_name, int32(x))
|
||||
}
|
||||
func (ThreatEntryType) EnumDescriptor() ([]byte, []int) { return fileDescriptor0, []int{3} }
|
||||
|
||||
// The type of response sent to the client.
|
||||
type FetchThreatListUpdatesResponse_ListUpdateResponse_ResponseType int32
|
||||
|
||||
const (
|
||||
// Unknown.
|
||||
FetchThreatListUpdatesResponse_ListUpdateResponse_RESPONSE_TYPE_UNSPECIFIED FetchThreatListUpdatesResponse_ListUpdateResponse_ResponseType = 0
|
||||
// Partial updates are applied to the client's existing local database.
|
||||
FetchThreatListUpdatesResponse_ListUpdateResponse_PARTIAL_UPDATE FetchThreatListUpdatesResponse_ListUpdateResponse_ResponseType = 1
|
||||
// Full updates replace the client's entire local database. This means
|
||||
// that either the client was seriously out-of-date or the client is
|
||||
// believed to be corrupt.
|
||||
FetchThreatListUpdatesResponse_ListUpdateResponse_FULL_UPDATE FetchThreatListUpdatesResponse_ListUpdateResponse_ResponseType = 2
|
||||
)
|
||||
|
||||
var FetchThreatListUpdatesResponse_ListUpdateResponse_ResponseType_name = map[int32]string{
|
||||
0: "RESPONSE_TYPE_UNSPECIFIED",
|
||||
1: "PARTIAL_UPDATE",
|
||||
2: "FULL_UPDATE",
|
||||
}
|
||||
var FetchThreatListUpdatesResponse_ListUpdateResponse_ResponseType_value = map[string]int32{
|
||||
"RESPONSE_TYPE_UNSPECIFIED": 0,
|
||||
"PARTIAL_UPDATE": 1,
|
||||
"FULL_UPDATE": 2,
|
||||
}
|
||||
|
||||
func (x FetchThreatListUpdatesResponse_ListUpdateResponse_ResponseType) String() string {
|
||||
return proto.EnumName(FetchThreatListUpdatesResponse_ListUpdateResponse_ResponseType_name, int32(x))
|
||||
}
|
||||
func (FetchThreatListUpdatesResponse_ListUpdateResponse_ResponseType) EnumDescriptor() ([]byte, []int) {
|
||||
return fileDescriptor0, []int{5, 0, 0}
|
||||
}
|
||||
|
||||
// The information regarding one or more threats that a client submits when
|
||||
// checking for matches in threat lists.
|
||||
type ThreatInfo struct {
|
||||
// The threat types to be checked.
|
||||
ThreatTypes []ThreatType `protobuf:"varint,1,rep,name=threat_types,json=threatTypes,enum=safebrowsing_proto.ThreatType" json:"threat_types,omitempty"`
|
||||
// The platform types to be checked.
|
||||
PlatformTypes []PlatformType `protobuf:"varint,2,rep,name=platform_types,json=platformTypes,enum=safebrowsing_proto.PlatformType" json:"platform_types,omitempty"`
|
||||
// The entry types to be checked.
|
||||
ThreatEntryTypes []ThreatEntryType `protobuf:"varint,4,rep,name=threat_entry_types,json=threatEntryTypes,enum=safebrowsing_proto.ThreatEntryType" json:"threat_entry_types,omitempty"`
|
||||
// The threat entries to be checked.
|
||||
ThreatEntries []*ThreatEntry `protobuf:"bytes,3,rep,name=threat_entries,json=threatEntries" json:"threat_entries,omitempty"`
|
||||
}
|
||||
|
||||
func (m *ThreatInfo) Reset() { *m = ThreatInfo{} }
|
||||
func (m *ThreatInfo) String() string { return proto.CompactTextString(m) }
|
||||
func (*ThreatInfo) ProtoMessage() {}
|
||||
func (*ThreatInfo) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{0} }
|
||||
|
||||
func (m *ThreatInfo) GetThreatEntries() []*ThreatEntry {
|
||||
if m != nil {
|
||||
return m.ThreatEntries
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// A match when checking a threat entry in the Safe Browsing threat lists.
|
||||
type ThreatMatch struct {
|
||||
// The threat type matching this threat.
|
||||
ThreatType ThreatType `protobuf:"varint,1,opt,name=threat_type,json=threatType,enum=safebrowsing_proto.ThreatType" json:"threat_type,omitempty"`
|
||||
// The platform type matching this threat.
|
||||
PlatformType PlatformType `protobuf:"varint,2,opt,name=platform_type,json=platformType,enum=safebrowsing_proto.PlatformType" json:"platform_type,omitempty"`
|
||||
// The threat entry type matching this threat.
|
||||
ThreatEntryType ThreatEntryType `protobuf:"varint,6,opt,name=threat_entry_type,json=threatEntryType,enum=safebrowsing_proto.ThreatEntryType" json:"threat_entry_type,omitempty"`
|
||||
// The threat matching this threat.
|
||||
Threat *ThreatEntry `protobuf:"bytes,3,opt,name=threat" json:"threat,omitempty"`
|
||||
// Optional metadata associated with this threat.
|
||||
ThreatEntryMetadata *ThreatEntryMetadata `protobuf:"bytes,4,opt,name=threat_entry_metadata,json=threatEntryMetadata" json:"threat_entry_metadata,omitempty"`
|
||||
// The cache lifetime for the returned match. Clients must not cache this
|
||||
// response for more than this duration to avoid false positives.
|
||||
CacheDuration *Duration `protobuf:"bytes,5,opt,name=cache_duration,json=cacheDuration" json:"cache_duration,omitempty"`
|
||||
}
|
||||
|
||||
func (m *ThreatMatch) Reset() { *m = ThreatMatch{} }
|
||||
func (m *ThreatMatch) String() string { return proto.CompactTextString(m) }
|
||||
func (*ThreatMatch) ProtoMessage() {}
|
||||
func (*ThreatMatch) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{1} }
|
||||
|
||||
func (m *ThreatMatch) GetThreat() *ThreatEntry {
|
||||
if m != nil {
|
||||
return m.Threat
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (m *ThreatMatch) GetThreatEntryMetadata() *ThreatEntryMetadata {
|
||||
if m != nil {
|
||||
return m.ThreatEntryMetadata
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (m *ThreatMatch) GetCacheDuration() *Duration {
|
||||
if m != nil {
|
||||
return m.CacheDuration
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// Request to check entries against lists.
|
||||
type FindThreatMatchesRequest struct {
|
||||
// The client metadata.
|
||||
Client *ClientInfo `protobuf:"bytes,1,opt,name=client" json:"client,omitempty"`
|
||||
// The lists and entries to be checked for matches.
|
||||
ThreatInfo *ThreatInfo `protobuf:"bytes,2,opt,name=threat_info,json=threatInfo" json:"threat_info,omitempty"`
|
||||
}
|
||||
|
||||
func (m *FindThreatMatchesRequest) Reset() { *m = FindThreatMatchesRequest{} }
|
||||
func (m *FindThreatMatchesRequest) String() string { return proto.CompactTextString(m) }
|
||||
func (*FindThreatMatchesRequest) ProtoMessage() {}
|
||||
func (*FindThreatMatchesRequest) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{2} }
|
||||
|
||||
func (m *FindThreatMatchesRequest) GetClient() *ClientInfo {
|
||||
if m != nil {
|
||||
return m.Client
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (m *FindThreatMatchesRequest) GetThreatInfo() *ThreatInfo {
|
||||
if m != nil {
|
||||
return m.ThreatInfo
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// Response type for requests to find threat matches.
|
||||
type FindThreatMatchesResponse struct {
|
||||
// The threat list matches.
|
||||
Matches []*ThreatMatch `protobuf:"bytes,1,rep,name=matches" json:"matches,omitempty"`
|
||||
}
|
||||
|
||||
func (m *FindThreatMatchesResponse) Reset() { *m = FindThreatMatchesResponse{} }
|
||||
func (m *FindThreatMatchesResponse) String() string { return proto.CompactTextString(m) }
|
||||
func (*FindThreatMatchesResponse) ProtoMessage() {}
|
||||
func (*FindThreatMatchesResponse) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{3} }
|
||||
|
||||
func (m *FindThreatMatchesResponse) GetMatches() []*ThreatMatch {
|
||||
if m != nil {
|
||||
return m.Matches
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// Describes a Safe Browsing API update request. Clients can request updates for
|
||||
// multiple lists in a single request.
|
||||
// NOTE: Field index 2 is unused.
|
||||
type FetchThreatListUpdatesRequest struct {
|
||||
// The client metadata.
|
||||
Client *ClientInfo `protobuf:"bytes,1,opt,name=client" json:"client,omitempty"`
|
||||
// The requested threat list updates.
|
||||
ListUpdateRequests []*FetchThreatListUpdatesRequest_ListUpdateRequest `protobuf:"bytes,3,rep,name=list_update_requests,json=listUpdateRequests" json:"list_update_requests,omitempty"`
|
||||
}
|
||||
|
||||
func (m *FetchThreatListUpdatesRequest) Reset() { *m = FetchThreatListUpdatesRequest{} }
|
||||
func (m *FetchThreatListUpdatesRequest) String() string { return proto.CompactTextString(m) }
|
||||
func (*FetchThreatListUpdatesRequest) ProtoMessage() {}
|
||||
func (*FetchThreatListUpdatesRequest) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{4} }
|
||||
|
||||
func (m *FetchThreatListUpdatesRequest) GetClient() *ClientInfo {
|
||||
if m != nil {
|
||||
return m.Client
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (m *FetchThreatListUpdatesRequest) GetListUpdateRequests() []*FetchThreatListUpdatesRequest_ListUpdateRequest {
|
||||
if m != nil {
|
||||
return m.ListUpdateRequests
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// A single list update request.
|
||||
type FetchThreatListUpdatesRequest_ListUpdateRequest struct {
|
||||
// The type of threat posed by entries present in the list.
|
||||
ThreatType ThreatType `protobuf:"varint,1,opt,name=threat_type,json=threatType,enum=safebrowsing_proto.ThreatType" json:"threat_type,omitempty"`
|
||||
// The type of platform at risk by entries present in the list.
|
||||
PlatformType PlatformType `protobuf:"varint,2,opt,name=platform_type,json=platformType,enum=safebrowsing_proto.PlatformType" json:"platform_type,omitempty"`
|
||||
// The types of entries present in the list.
|
||||
ThreatEntryType ThreatEntryType `protobuf:"varint,5,opt,name=threat_entry_type,json=threatEntryType,enum=safebrowsing_proto.ThreatEntryType" json:"threat_entry_type,omitempty"`
|
||||
// The current state of the client for the requested list (the encrypted
|
||||
// ClientState that was sent to the client from the previous update
|
||||
// request).
|
||||
State []byte `protobuf:"bytes,3,opt,name=state,proto3" json:"state,omitempty"`
|
||||
// The constraints associated with this request.
|
||||
Constraints *FetchThreatListUpdatesRequest_ListUpdateRequest_Constraints `protobuf:"bytes,4,opt,name=constraints" json:"constraints,omitempty"`
|
||||
}
|
||||
|
||||
func (m *FetchThreatListUpdatesRequest_ListUpdateRequest) Reset() {
|
||||
*m = FetchThreatListUpdatesRequest_ListUpdateRequest{}
|
||||
}
|
||||
func (m *FetchThreatListUpdatesRequest_ListUpdateRequest) String() string {
|
||||
return proto.CompactTextString(m)
|
||||
}
|
||||
func (*FetchThreatListUpdatesRequest_ListUpdateRequest) ProtoMessage() {}
|
||||
func (*FetchThreatListUpdatesRequest_ListUpdateRequest) Descriptor() ([]byte, []int) {
|
||||
return fileDescriptor0, []int{4, 0}
|
||||
}
|
||||
|
||||
func (m *FetchThreatListUpdatesRequest_ListUpdateRequest) GetConstraints() *FetchThreatListUpdatesRequest_ListUpdateRequest_Constraints {
|
||||
if m != nil {
|
||||
return m.Constraints
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// The constraints for this update.
|
||||
type FetchThreatListUpdatesRequest_ListUpdateRequest_Constraints struct {
|
||||
// The maximum size in number of entries. The update will not contain more
|
||||
// entries than this value. This should be a power of 2 between 2**10 and
|
||||
// 2**20. If zero, no update size limit is set.
|
||||
MaxUpdateEntries int32 `protobuf:"varint,1,opt,name=max_update_entries,json=maxUpdateEntries" json:"max_update_entries,omitempty"`
|
||||
// Sets the maximum number of entries that the client is willing to have
|
||||
// in the local database. This should be a power of 2 between 2**10 and
|
||||
// 2**20. If zero, no database size limit is set.
|
||||
MaxDatabaseEntries int32 `protobuf:"varint,2,opt,name=max_database_entries,json=maxDatabaseEntries" json:"max_database_entries,omitempty"`
|
||||
// Requests the list for a specific geographic location. If not set the
|
||||
// server may pick that value based on the user's IP address. Expects ISO
|
||||
// 3166-1 alpha-2 format.
|
||||
Region string `protobuf:"bytes,3,opt,name=region" json:"region,omitempty"`
|
||||
// The compression types supported by the client.
|
||||
SupportedCompressions []CompressionType `protobuf:"varint,4,rep,name=supported_compressions,json=supportedCompressions,enum=safebrowsing_proto.CompressionType" json:"supported_compressions,omitempty"`
|
||||
}
|
||||
|
||||
func (m *FetchThreatListUpdatesRequest_ListUpdateRequest_Constraints) Reset() {
|
||||
*m = FetchThreatListUpdatesRequest_ListUpdateRequest_Constraints{}
|
||||
}
|
||||
func (m *FetchThreatListUpdatesRequest_ListUpdateRequest_Constraints) String() string {
|
||||
return proto.CompactTextString(m)
|
||||
}
|
||||
func (*FetchThreatListUpdatesRequest_ListUpdateRequest_Constraints) ProtoMessage() {}
|
||||
func (*FetchThreatListUpdatesRequest_ListUpdateRequest_Constraints) Descriptor() ([]byte, []int) {
|
||||
return fileDescriptor0, []int{4, 0, 0}
|
||||
}
|
||||
|
||||
// Response type for threat list update requests.
|
||||
type FetchThreatListUpdatesResponse struct {
|
||||
// The list updates requested by the clients.
|
||||
ListUpdateResponses []*FetchThreatListUpdatesResponse_ListUpdateResponse `protobuf:"bytes,1,rep,name=list_update_responses,json=listUpdateResponses" json:"list_update_responses,omitempty"`
|
||||
// The minimum duration the client must wait before issuing any update
|
||||
// request. If this field is not set clients may update as soon as they want.
|
||||
MinimumWaitDuration *Duration `protobuf:"bytes,2,opt,name=minimum_wait_duration,json=minimumWaitDuration" json:"minimum_wait_duration,omitempty"`
|
||||
}
|
||||
|
||||
func (m *FetchThreatListUpdatesResponse) Reset() { *m = FetchThreatListUpdatesResponse{} }
|
||||
func (m *FetchThreatListUpdatesResponse) String() string { return proto.CompactTextString(m) }
|
||||
func (*FetchThreatListUpdatesResponse) ProtoMessage() {}
|
||||
func (*FetchThreatListUpdatesResponse) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{5} }
|
||||
|
||||
func (m *FetchThreatListUpdatesResponse) GetListUpdateResponses() []*FetchThreatListUpdatesResponse_ListUpdateResponse {
|
||||
if m != nil {
|
||||
return m.ListUpdateResponses
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (m *FetchThreatListUpdatesResponse) GetMinimumWaitDuration() *Duration {
|
||||
if m != nil {
|
||||
return m.MinimumWaitDuration
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// An update to an individual list.
|
||||
type FetchThreatListUpdatesResponse_ListUpdateResponse struct {
|
||||
// The threat type for which data is returned.
|
||||
ThreatType ThreatType `protobuf:"varint,1,opt,name=threat_type,json=threatType,enum=safebrowsing_proto.ThreatType" json:"threat_type,omitempty"`
|
||||
// The format of the threats.
|
||||
ThreatEntryType ThreatEntryType `protobuf:"varint,2,opt,name=threat_entry_type,json=threatEntryType,enum=safebrowsing_proto.ThreatEntryType" json:"threat_entry_type,omitempty"`
|
||||
// The platform type for which data is returned.
|
||||
PlatformType PlatformType `protobuf:"varint,3,opt,name=platform_type,json=platformType,enum=safebrowsing_proto.PlatformType" json:"platform_type,omitempty"`
|
||||
// The type of response. This may indicate that an action is required by the
|
||||
// client when the response is received.
|
||||
ResponseType FetchThreatListUpdatesResponse_ListUpdateResponse_ResponseType `protobuf:"varint,4,opt,name=response_type,json=responseType,enum=safebrowsing_proto.FetchThreatListUpdatesResponse_ListUpdateResponse_ResponseType" json:"response_type,omitempty"`
|
||||
// A set of entries to add to a local threat type's list. Repeated to allow
|
||||
// for a combination of compressed and raw data to be sent in a single
|
||||
// response.
|
||||
Additions []*ThreatEntrySet `protobuf:"bytes,5,rep,name=additions" json:"additions,omitempty"`
|
||||
// A set of entries to remove from a local threat type's list. Repeated for
|
||||
// the same reason as above.
|
||||
Removals []*ThreatEntrySet `protobuf:"bytes,6,rep,name=removals" json:"removals,omitempty"`
|
||||
// The new client state, in encrypted format. Opaque to clients.
|
||||
NewClientState []byte `protobuf:"bytes,7,opt,name=new_client_state,json=newClientState,proto3" json:"new_client_state,omitempty"`
|
||||
// The expected SHA256 hash of the client state; that is, of the sorted list
|
||||
// of all hashes present in the database after applying the provided update.
|
||||
// If the client state doesn't match the expected state, the client must
|
||||
// disregard this update and retry later.
|
||||
Checksum *Checksum `protobuf:"bytes,8,opt,name=checksum" json:"checksum,omitempty"`
|
||||
}
|
||||
|
||||
func (m *FetchThreatListUpdatesResponse_ListUpdateResponse) Reset() {
|
||||
*m = FetchThreatListUpdatesResponse_ListUpdateResponse{}
|
||||
}
|
||||
func (m *FetchThreatListUpdatesResponse_ListUpdateResponse) String() string {
|
||||
return proto.CompactTextString(m)
|
||||
}
|
||||
func (*FetchThreatListUpdatesResponse_ListUpdateResponse) ProtoMessage() {}
|
||||
func (*FetchThreatListUpdatesResponse_ListUpdateResponse) Descriptor() ([]byte, []int) {
|
||||
return fileDescriptor0, []int{5, 0}
|
||||
}
|
||||
|
||||
func (m *FetchThreatListUpdatesResponse_ListUpdateResponse) GetAdditions() []*ThreatEntrySet {
|
||||
if m != nil {
|
||||
return m.Additions
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (m *FetchThreatListUpdatesResponse_ListUpdateResponse) GetRemovals() []*ThreatEntrySet {
|
||||
if m != nil {
|
||||
return m.Removals
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (m *FetchThreatListUpdatesResponse_ListUpdateResponse) GetChecksum() *Checksum {
|
||||
if m != nil {
|
||||
return m.Checksum
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// Request to return full hashes matched by the provided hash prefixes.
|
||||
type FindFullHashesRequest struct {
|
||||
// The client metadata.
|
||||
Client *ClientInfo `protobuf:"bytes,1,opt,name=client" json:"client,omitempty"`
|
||||
// The current client states for each of the client's local threat lists.
|
||||
ClientStates [][]byte `protobuf:"bytes,2,rep,name=client_states,json=clientStates,proto3" json:"client_states,omitempty"`
|
||||
// The lists and hashes to be checked.
|
||||
ThreatInfo *ThreatInfo `protobuf:"bytes,3,opt,name=threat_info,json=threatInfo" json:"threat_info,omitempty"`
|
||||
}
|
||||
|
||||
func (m *FindFullHashesRequest) Reset() { *m = FindFullHashesRequest{} }
|
||||
func (m *FindFullHashesRequest) String() string { return proto.CompactTextString(m) }
|
||||
func (*FindFullHashesRequest) ProtoMessage() {}
|
||||
func (*FindFullHashesRequest) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{6} }
|
||||
|
||||
func (m *FindFullHashesRequest) GetClient() *ClientInfo {
|
||||
if m != nil {
|
||||
return m.Client
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (m *FindFullHashesRequest) GetThreatInfo() *ThreatInfo {
|
||||
if m != nil {
|
||||
return m.ThreatInfo
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// Response type for requests to find full hashes.
|
||||
type FindFullHashesResponse struct {
|
||||
// The full hashes that matched the requested prefixes.
|
||||
Matches []*ThreatMatch `protobuf:"bytes,1,rep,name=matches" json:"matches,omitempty"`
|
||||
// The minimum duration the client must wait before issuing any find hashes
|
||||
// request. If this field is not set, clients can issue a request as soon as
|
||||
// they want.
|
||||
MinimumWaitDuration *Duration `protobuf:"bytes,2,opt,name=minimum_wait_duration,json=minimumWaitDuration" json:"minimum_wait_duration,omitempty"`
|
||||
// For requested entities that did not match the threat list, how long to
|
||||
// cache the response.
|
||||
NegativeCacheDuration *Duration `protobuf:"bytes,3,opt,name=negative_cache_duration,json=negativeCacheDuration" json:"negative_cache_duration,omitempty"`
|
||||
}
|
||||
|
||||
func (m *FindFullHashesResponse) Reset() { *m = FindFullHashesResponse{} }
|
||||
func (m *FindFullHashesResponse) String() string { return proto.CompactTextString(m) }
|
||||
func (*FindFullHashesResponse) ProtoMessage() {}
|
||||
func (*FindFullHashesResponse) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{7} }
|
||||
|
||||
func (m *FindFullHashesResponse) GetMatches() []*ThreatMatch {
|
||||
if m != nil {
|
||||
return m.Matches
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (m *FindFullHashesResponse) GetMinimumWaitDuration() *Duration {
|
||||
if m != nil {
|
||||
return m.MinimumWaitDuration
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (m *FindFullHashesResponse) GetNegativeCacheDuration() *Duration {
|
||||
if m != nil {
|
||||
return m.NegativeCacheDuration
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// The client metadata associated with Safe Browsing API requests.
|
||||
type ClientInfo struct {
|
||||
// A client ID that (hopefully) uniquely identifies the client implementation
|
||||
// of the Safe Browsing API.
|
||||
ClientId string `protobuf:"bytes,1,opt,name=client_id,json=clientId" json:"client_id,omitempty"`
|
||||
// The version of the client implementation.
|
||||
ClientVersion string `protobuf:"bytes,2,opt,name=client_version,json=clientVersion" json:"client_version,omitempty"`
|
||||
}
|
||||
|
||||
func (m *ClientInfo) Reset() { *m = ClientInfo{} }
|
||||
func (m *ClientInfo) String() string { return proto.CompactTextString(m) }
|
||||
func (*ClientInfo) ProtoMessage() {}
|
||||
func (*ClientInfo) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{8} }
|
||||
|
||||
// The expected state of a client's local database.
|
||||
type Checksum struct {
|
||||
// The SHA256 hash of the client state; that is, of the sorted list of all
|
||||
// hashes present in the database.
|
||||
Sha256 []byte `protobuf:"bytes,1,opt,name=sha256,proto3" json:"sha256,omitempty"`
|
||||
}
|
||||
|
||||
func (m *Checksum) Reset() { *m = Checksum{} }
|
||||
func (m *Checksum) String() string { return proto.CompactTextString(m) }
|
||||
func (*Checksum) ProtoMessage() {}
|
||||
func (*Checksum) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{9} }
|
||||
|
||||
// An individual threat; for example, a malicious URL or its hash
|
||||
// representation. Only one of these fields should be set.
|
||||
type ThreatEntry struct {
|
||||
// A hash prefix, consisting of the most significant 4-32 bytes of a SHA256
|
||||
// hash.
|
||||
Hash []byte `protobuf:"bytes,1,opt,name=hash,proto3" json:"hash,omitempty"`
|
||||
// A URL.
|
||||
Url string `protobuf:"bytes,2,opt,name=url" json:"url,omitempty"`
|
||||
}
|
||||
|
||||
func (m *ThreatEntry) Reset() { *m = ThreatEntry{} }
|
||||
func (m *ThreatEntry) String() string { return proto.CompactTextString(m) }
|
||||
func (*ThreatEntry) ProtoMessage() {}
|
||||
func (*ThreatEntry) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{10} }
|
||||
|
||||
// A set of threats that should be added or removed from a client's local
|
||||
// database.
|
||||
type ThreatEntrySet struct {
|
||||
// The compression type for the entries in this set.
|
||||
CompressionType CompressionType `protobuf:"varint,1,opt,name=compression_type,json=compressionType,enum=safebrowsing_proto.CompressionType" json:"compression_type,omitempty"`
|
||||
// The raw SHA256-formatted entries.
|
||||
RawHashes *RawHashes `protobuf:"bytes,2,opt,name=raw_hashes,json=rawHashes" json:"raw_hashes,omitempty"`
|
||||
// The raw removal indices for a local list.
|
||||
RawIndices *RawIndices `protobuf:"bytes,3,opt,name=raw_indices,json=rawIndices" json:"raw_indices,omitempty"`
|
||||
// The encoded 4-byte prefixes of SHA256-formatted entries, using a
|
||||
// Golomb-Rice encoding.
|
||||
RiceHashes *RiceDeltaEncoding `protobuf:"bytes,4,opt,name=rice_hashes,json=riceHashes" json:"rice_hashes,omitempty"`
|
||||
// The encoded local, lexicographically-sorted list indices, using a
|
||||
// Golomb-Rice encoding. Used for sending compressed removal indices.
|
||||
RiceIndices *RiceDeltaEncoding `protobuf:"bytes,5,opt,name=rice_indices,json=riceIndices" json:"rice_indices,omitempty"`
|
||||
}
|
||||
|
||||
func (m *ThreatEntrySet) Reset() { *m = ThreatEntrySet{} }
|
||||
func (m *ThreatEntrySet) String() string { return proto.CompactTextString(m) }
|
||||
func (*ThreatEntrySet) ProtoMessage() {}
|
||||
func (*ThreatEntrySet) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{11} }
|
||||
|
||||
func (m *ThreatEntrySet) GetRawHashes() *RawHashes {
|
||||
if m != nil {
|
||||
return m.RawHashes
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (m *ThreatEntrySet) GetRawIndices() *RawIndices {
|
||||
if m != nil {
|
||||
return m.RawIndices
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (m *ThreatEntrySet) GetRiceHashes() *RiceDeltaEncoding {
|
||||
if m != nil {
|
||||
return m.RiceHashes
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (m *ThreatEntrySet) GetRiceIndices() *RiceDeltaEncoding {
|
||||
if m != nil {
|
||||
return m.RiceIndices
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// A set of raw indices to remove from a local list.
|
||||
type RawIndices struct {
|
||||
// The indices to remove from a lexicographically-sorted local list.
|
||||
Indices []int32 `protobuf:"varint,1,rep,name=indices" json:"indices,omitempty"`
|
||||
}
|
||||
|
||||
func (m *RawIndices) Reset() { *m = RawIndices{} }
|
||||
func (m *RawIndices) String() string { return proto.CompactTextString(m) }
|
||||
func (*RawIndices) ProtoMessage() {}
|
||||
func (*RawIndices) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{12} }
|
||||
|
||||
// The uncompressed threat entries in hash format of a particular prefix length.
|
||||
// Hashes can be anywhere from 4 to 32 bytes in size. A large majority are 4
|
||||
// bytes, but some hashes are lengthened if they collide with the hash of a
|
||||
// popular URL.
|
||||
//
|
||||
// Used for sending ThreatEntrySet to clients that do not support compression,
|
||||
// or when sending non-4-byte hashes to clients that do support compression.
|
||||
type RawHashes struct {
|
||||
// The number of bytes for each prefix encoded below. This field can be
|
||||
// anywhere from 4 (shortest prefix) to 32 (full SHA256 hash).
|
||||
PrefixSize int32 `protobuf:"varint,1,opt,name=prefix_size,json=prefixSize" json:"prefix_size,omitempty"`
|
||||
// The hashes, all concatenated into one long string. Each hash has a prefix
|
||||
// size of |prefix_size| above. Hashes are sorted in lexicographic order.
|
||||
RawHashes []byte `protobuf:"bytes,2,opt,name=raw_hashes,json=rawHashes,proto3" json:"raw_hashes,omitempty"`
|
||||
}
|
||||
|
||||
func (m *RawHashes) Reset() { *m = RawHashes{} }
|
||||
func (m *RawHashes) String() string { return proto.CompactTextString(m) }
|
||||
func (*RawHashes) ProtoMessage() {}
|
||||
func (*RawHashes) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{13} }
|
||||
|
||||
// The Rice-Golomb encoded data. Used for sending compressed 4-byte hashes or
|
||||
// compressed removal indices.
|
||||
type RiceDeltaEncoding struct {
|
||||
// The offset of the first entry in the encoded data, or, if only a single
|
||||
// integer was encoded, that single integer's value.
|
||||
FirstValue int64 `protobuf:"varint,1,opt,name=first_value,json=firstValue" json:"first_value,omitempty"`
|
||||
// The Golomb-Rice parameter which is a number between 2 and 28. This field
|
||||
// is missing (that is, zero) if num_entries is zero.
|
||||
RiceParameter int32 `protobuf:"varint,2,opt,name=rice_parameter,json=riceParameter" json:"rice_parameter,omitempty"`
|
||||
// The number of entries that are delta encoded in the encoded data. If only a
|
||||
// single integer was encoded, this will be zero and the single value will be
|
||||
// stored in first_value.
|
||||
NumEntries int32 `protobuf:"varint,3,opt,name=num_entries,json=numEntries" json:"num_entries,omitempty"`
|
||||
// The encoded deltas that are encoded using the Golomb-Rice coder.
|
||||
EncodedData []byte `protobuf:"bytes,4,opt,name=encoded_data,json=encodedData,proto3" json:"encoded_data,omitempty"`
|
||||
}
|
||||
|
||||
func (m *RiceDeltaEncoding) Reset() { *m = RiceDeltaEncoding{} }
|
||||
func (m *RiceDeltaEncoding) String() string { return proto.CompactTextString(m) }
|
||||
func (*RiceDeltaEncoding) ProtoMessage() {}
|
||||
func (*RiceDeltaEncoding) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{14} }
|
||||
|
||||
// The metadata associated with a specific threat entry. The client is expected
|
||||
// to know the metadata key/value pairs associated with each threat type.
|
||||
type ThreatEntryMetadata struct {
|
||||
// The metadata entries.
|
||||
Entries []*ThreatEntryMetadata_MetadataEntry `protobuf:"bytes,1,rep,name=entries" json:"entries,omitempty"`
|
||||
}
|
||||
|
||||
func (m *ThreatEntryMetadata) Reset() { *m = ThreatEntryMetadata{} }
|
||||
func (m *ThreatEntryMetadata) String() string { return proto.CompactTextString(m) }
|
||||
func (*ThreatEntryMetadata) ProtoMessage() {}
|
||||
func (*ThreatEntryMetadata) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{15} }
|
||||
|
||||
func (m *ThreatEntryMetadata) GetEntries() []*ThreatEntryMetadata_MetadataEntry {
|
||||
if m != nil {
|
||||
return m.Entries
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// A single metadata entry.
|
||||
type ThreatEntryMetadata_MetadataEntry struct {
|
||||
// The metadata entry key.
|
||||
Key []byte `protobuf:"bytes,1,opt,name=key,proto3" json:"key,omitempty"`
|
||||
// The metadata entry value.
|
||||
Value []byte `protobuf:"bytes,2,opt,name=value,proto3" json:"value,omitempty"`
|
||||
}
|
||||
|
||||
func (m *ThreatEntryMetadata_MetadataEntry) Reset() { *m = ThreatEntryMetadata_MetadataEntry{} }
|
||||
func (m *ThreatEntryMetadata_MetadataEntry) String() string { return proto.CompactTextString(m) }
|
||||
func (*ThreatEntryMetadata_MetadataEntry) ProtoMessage() {}
|
||||
func (*ThreatEntryMetadata_MetadataEntry) Descriptor() ([]byte, []int) {
|
||||
return fileDescriptor0, []int{15, 0}
|
||||
}
|
||||
|
||||
// Describes an individual threat list. A list is defined by three parameters:
|
||||
// the type of threat posed, the type of platform targeted by the threat, and
|
||||
// the type of entries in the list.
|
||||
type ThreatListDescriptor struct {
|
||||
// The threat type posed by the list's entries.
|
||||
ThreatType ThreatType `protobuf:"varint,1,opt,name=threat_type,json=threatType,enum=safebrowsing_proto.ThreatType" json:"threat_type,omitempty"`
|
||||
// The platform type targeted by the list's entries.
|
||||
PlatformType PlatformType `protobuf:"varint,2,opt,name=platform_type,json=platformType,enum=safebrowsing_proto.PlatformType" json:"platform_type,omitempty"`
|
||||
// The entry types contained in the list.
|
||||
ThreatEntryType ThreatEntryType `protobuf:"varint,3,opt,name=threat_entry_type,json=threatEntryType,enum=safebrowsing_proto.ThreatEntryType" json:"threat_entry_type,omitempty"`
|
||||
}
|
||||
|
||||
func (m *ThreatListDescriptor) Reset() { *m = ThreatListDescriptor{} }
|
||||
func (m *ThreatListDescriptor) String() string { return proto.CompactTextString(m) }
|
||||
func (*ThreatListDescriptor) ProtoMessage() {}
|
||||
func (*ThreatListDescriptor) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{16} }
|
||||
|
||||
// A collection of lists available for download by the client.
|
||||
type ListThreatListsResponse struct {
|
||||
// The lists available for download by the client.
|
||||
ThreatLists []*ThreatListDescriptor `protobuf:"bytes,1,rep,name=threat_lists,json=threatLists" json:"threat_lists,omitempty"`
|
||||
}
|
||||
|
||||
func (m *ListThreatListsResponse) Reset() { *m = ListThreatListsResponse{} }
|
||||
func (m *ListThreatListsResponse) String() string { return proto.CompactTextString(m) }
|
||||
func (*ListThreatListsResponse) ProtoMessage() {}
|
||||
func (*ListThreatListsResponse) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{17} }
|
||||
|
||||
func (m *ListThreatListsResponse) GetThreatLists() []*ThreatListDescriptor {
|
||||
if m != nil {
|
||||
return m.ThreatLists
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// A Duration represents a signed, fixed-length span of time represented
|
||||
// as a count of seconds and fractions of seconds at nanosecond
|
||||
// resolution. It is independent of any calendar and concepts like "day"
|
||||
// or "month". It is related to Timestamp in that the difference between
|
||||
// two Timestamp values is a Duration and it can be added or subtracted
|
||||
// from a Timestamp. Range is approximately +-10,000 years.
|
||||
type Duration struct {
|
||||
// Signed seconds of the span of time. Must be from -315,576,000,000
|
||||
// to +315,576,000,000 inclusive.
|
||||
Seconds int64 `protobuf:"varint,1,opt,name=seconds" json:"seconds,omitempty"`
|
||||
// Signed fractions of a second at nanosecond resolution of the span
|
||||
// of time. Durations less than one second are represented with a 0
|
||||
// `seconds` field and a positive or negative `nanos` field. For durations
|
||||
// of one second or more, a non-zero value for the `nanos` field must be
|
||||
// of the same sign as the `seconds` field. Must be from -999,999,999
|
||||
// to +999,999,999 inclusive.
|
||||
Nanos int32 `protobuf:"varint,2,opt,name=nanos" json:"nanos,omitempty"`
|
||||
}
|
||||
|
||||
func (m *Duration) Reset() { *m = Duration{} }
|
||||
func (m *Duration) String() string { return proto.CompactTextString(m) }
|
||||
func (*Duration) ProtoMessage() {}
|
||||
func (*Duration) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{18} }
|
||||
|
||||
func init() {
|
||||
proto.RegisterType((*ThreatInfo)(nil), "safebrowsing_proto.ThreatInfo")
|
||||
proto.RegisterType((*ThreatMatch)(nil), "safebrowsing_proto.ThreatMatch")
|
||||
proto.RegisterType((*FindThreatMatchesRequest)(nil), "safebrowsing_proto.FindThreatMatchesRequest")
|
||||
proto.RegisterType((*FindThreatMatchesResponse)(nil), "safebrowsing_proto.FindThreatMatchesResponse")
|
||||
proto.RegisterType((*FetchThreatListUpdatesRequest)(nil), "safebrowsing_proto.FetchThreatListUpdatesRequest")
|
||||
proto.RegisterType((*FetchThreatListUpdatesRequest_ListUpdateRequest)(nil), "safebrowsing_proto.FetchThreatListUpdatesRequest.ListUpdateRequest")
|
||||
proto.RegisterType((*FetchThreatListUpdatesRequest_ListUpdateRequest_Constraints)(nil), "safebrowsing_proto.FetchThreatListUpdatesRequest.ListUpdateRequest.Constraints")
|
||||
proto.RegisterType((*FetchThreatListUpdatesResponse)(nil), "safebrowsing_proto.FetchThreatListUpdatesResponse")
|
||||
proto.RegisterType((*FetchThreatListUpdatesResponse_ListUpdateResponse)(nil), "safebrowsing_proto.FetchThreatListUpdatesResponse.ListUpdateResponse")
|
||||
proto.RegisterType((*FindFullHashesRequest)(nil), "safebrowsing_proto.FindFullHashesRequest")
|
||||
proto.RegisterType((*FindFullHashesResponse)(nil), "safebrowsing_proto.FindFullHashesResponse")
|
||||
proto.RegisterType((*ClientInfo)(nil), "safebrowsing_proto.ClientInfo")
|
||||
proto.RegisterType((*Checksum)(nil), "safebrowsing_proto.Checksum")
|
||||
proto.RegisterType((*ThreatEntry)(nil), "safebrowsing_proto.ThreatEntry")
|
||||
proto.RegisterType((*ThreatEntrySet)(nil), "safebrowsing_proto.ThreatEntrySet")
|
||||
proto.RegisterType((*RawIndices)(nil), "safebrowsing_proto.RawIndices")
|
||||
proto.RegisterType((*RawHashes)(nil), "safebrowsing_proto.RawHashes")
|
||||
proto.RegisterType((*RiceDeltaEncoding)(nil), "safebrowsing_proto.RiceDeltaEncoding")
|
||||
proto.RegisterType((*ThreatEntryMetadata)(nil), "safebrowsing_proto.ThreatEntryMetadata")
|
||||
proto.RegisterType((*ThreatEntryMetadata_MetadataEntry)(nil), "safebrowsing_proto.ThreatEntryMetadata.MetadataEntry")
|
||||
proto.RegisterType((*ThreatListDescriptor)(nil), "safebrowsing_proto.ThreatListDescriptor")
|
||||
proto.RegisterType((*ListThreatListsResponse)(nil), "safebrowsing_proto.ListThreatListsResponse")
|
||||
proto.RegisterType((*Duration)(nil), "safebrowsing_proto.Duration")
|
||||
proto.RegisterEnum("safebrowsing_proto.ThreatType", ThreatType_name, ThreatType_value)
|
||||
proto.RegisterEnum("safebrowsing_proto.PlatformType", PlatformType_name, PlatformType_value)
|
||||
proto.RegisterEnum("safebrowsing_proto.CompressionType", CompressionType_name, CompressionType_value)
|
||||
proto.RegisterEnum("safebrowsing_proto.ThreatEntryType", ThreatEntryType_name, ThreatEntryType_value)
|
||||
proto.RegisterEnum("safebrowsing_proto.FetchThreatListUpdatesResponse_ListUpdateResponse_ResponseType", FetchThreatListUpdatesResponse_ListUpdateResponse_ResponseType_name, FetchThreatListUpdatesResponse_ListUpdateResponse_ResponseType_value)
|
||||
}
|
||||
|
||||
var fileDescriptor0 = []byte{
|
||||
// 1628 bytes of a gzipped FileDescriptorProto
|
||||
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x09, 0x6e, 0x88, 0x02, 0xff, 0xcc, 0x58, 0x5b, 0x6f, 0xdb, 0x46,
|
||||
0x16, 0x5e, 0x49, 0xd6, 0xed, 0x48, 0xb2, 0xe5, 0xf1, 0x25, 0x8e, 0x13, 0x27, 0x59, 0x06, 0xd9,
|
||||
0x0d, 0x8c, 0x85, 0xb1, 0x48, 0x90, 0x64, 0x77, 0xb1, 0xd8, 0x2d, 0x2b, 0x51, 0x36, 0x11, 0x99,
|
||||
0x52, 0x47, 0x72, 0x1c, 0xb7, 0x0f, 0x04, 0x23, 0xd1, 0x31, 0x11, 0xdd, 0x42, 0x52, 0x71, 0xdc,
|
||||
0x9f, 0xd0, 0xa7, 0x02, 0x45, 0x9f, 0xdb, 0xe7, 0xbe, 0x16, 0xfd, 0x01, 0xfd, 0x19, 0x7d, 0xec,
|
||||
0x2f, 0x28, 0xfa, 0xdc, 0x97, 0x9e, 0x19, 0xce, 0x88, 0xd4, 0xc5, 0x8e, 0x12, 0xfb, 0x21, 0x6f,
|
||||
0x33, 0x67, 0xce, 0x7c, 0x73, 0xf8, 0xcd, 0xb9, 0x0d, 0x81, 0x78, 0xd6, 0xb1, 0xfd, 0xc2, 0xed,
|
||||
0x9f, 0x7a, 0x4e, 0xef, 0xe5, 0xce, 0xc0, 0xed, 0xfb, 0x7d, 0x32, 0x26, 0x33, 0xb9, 0x4c, 0xf9,
|
||||
0x31, 0x0e, 0xd0, 0x3c, 0x71, 0x6d, 0xcb, 0xd7, 0x7b, 0xc7, 0x7d, 0xa2, 0x42, 0xde, 0xe7, 0x33,
|
||||
0xd3, 0x3f, 0x1b, 0xd8, 0xde, 0x46, 0xec, 0x4e, 0xe2, 0xfe, 0xe2, 0x83, 0x5b, 0x3b, 0xd3, 0x3b,
|
||||
0x77, 0x82, 0x5d, 0x4d, 0x54, 0xa3, 0x39, 0x7f, 0x34, 0xf6, 0xc8, 0x2e, 0x2c, 0x0e, 0x3a, 0x96,
|
||||
0x7f, 0xdc, 0x77, 0xbb, 0x02, 0x24, 0xce, 0x41, 0xee, 0xcc, 0x02, 0xa9, 0x0b, 0x4d, 0x0e, 0x53,
|
||||
0x18, 0x44, 0x66, 0x1e, 0xf9, 0x0c, 0x88, 0xb0, 0xc5, 0xee, 0xf9, 0xee, 0x99, 0x00, 0x5b, 0xe0,
|
||||
0x60, 0x77, 0xcf, 0xb7, 0x48, 0x63, 0xca, 0x1c, 0xaf, 0xe8, 0x8f, 0x0b, 0x3c, 0x52, 0x81, 0xc5,
|
||||
0x08, 0xa4, 0x83, 0x70, 0x09, 0x84, 0xcb, 0x3d, 0xb8, 0xfd, 0x0e, 0x38, 0x5a, 0x08, 0xa1, 0x70,
|
||||
0x97, 0xf2, 0x73, 0x02, 0x72, 0xc1, 0xf2, 0xbe, 0xe5, 0xb7, 0x4e, 0xc8, 0xff, 0x21, 0x17, 0xa1,
|
||||
0x0d, 0x59, 0x8b, 0xcd, 0xc1, 0x1a, 0x84, 0xac, 0x11, 0x0d, 0x0a, 0x63, 0xa4, 0x21, 0x67, 0xb1,
|
||||
0xb9, 0x38, 0xcb, 0x47, 0x39, 0x23, 0x35, 0x58, 0x9e, 0xa2, 0x6c, 0x23, 0xc5, 0xa1, 0xe6, 0x62,
|
||||
0x6c, 0x69, 0x82, 0x31, 0xf2, 0x04, 0x52, 0x81, 0x08, 0x89, 0x8a, 0xcd, 0x43, 0x94, 0x50, 0x27,
|
||||
0x5f, 0xc0, 0xda, 0x98, 0x25, 0x5d, 0xdb, 0xb7, 0xda, 0x96, 0x6f, 0xe1, 0xfd, 0x31, 0x9c, 0xbf,
|
||||
0xbf, 0x03, 0x67, 0x5f, 0xa8, 0xd3, 0x15, 0x7f, 0x5a, 0x48, 0x4a, 0xb0, 0xd8, 0xb2, 0x5a, 0x27,
|
||||
0xb6, 0xd9, 0x1e, 0xba, 0x96, 0xef, 0xf4, 0x7b, 0x1b, 0x49, 0x8e, 0x7a, 0x73, 0x16, 0x6a, 0x59,
|
||||
0xe8, 0xd0, 0x02, 0xdf, 0x23, 0xa7, 0xca, 0x37, 0x31, 0xd8, 0xa8, 0x38, 0xbd, 0x76, 0xe4, 0x1e,
|
||||
0x6d, 0x8f, 0xda, 0xaf, 0x87, 0xb6, 0xe7, 0x93, 0xc7, 0x90, 0x6a, 0x75, 0x1c, 0x34, 0x9d, 0xdf,
|
||||
0x65, 0x6e, 0xf6, 0x5d, 0x96, 0xb8, 0x06, 0x8b, 0x1b, 0x2a, 0xb4, 0x23, 0x8e, 0xe0, 0xa0, 0x98,
|
||||
0xdf, 0x62, 0xee, 0x22, 0x47, 0xe0, 0x9b, 0x85, 0x23, 0xb0, 0xb1, 0xf2, 0x0c, 0xae, 0xcf, 0x30,
|
||||
0xca, 0x1b, 0xf4, 0x7b, 0x9e, 0x4d, 0xfe, 0x0d, 0xe9, 0x6e, 0x20, 0xe2, 0x81, 0x79, 0xe1, 0x75,
|
||||
0xf0, 0xbd, 0x54, 0xea, 0x2b, 0x3f, 0xa4, 0x60, 0xab, 0x62, 0xe3, 0x38, 0x58, 0xad, 0x3a, 0x9e,
|
||||
0x7f, 0x30, 0x40, 0x32, 0x2f, 0xff, 0xc9, 0x43, 0x58, 0xed, 0x20, 0x9a, 0x39, 0xe4, 0x70, 0xa6,
|
||||
0x1b, 0xc0, 0xc9, 0xc8, 0x2a, 0xcd, 0x42, 0xb9, 0xd0, 0x90, 0x9d, 0x50, 0x24, 0x24, 0x94, 0x74,
|
||||
0x26, 0x45, 0xde, 0xe6, 0x2f, 0x0b, 0xb0, 0x3c, 0xa5, 0xf9, 0x71, 0x07, 0x62, 0xf2, 0x12, 0x81,
|
||||
0xb8, 0x0a, 0x49, 0xcf, 0xc7, 0x0f, 0xe5, 0x71, 0x98, 0xa7, 0xc1, 0x84, 0xbc, 0x86, 0x5c, 0x0b,
|
||||
0x3d, 0xc3, 0x77, 0x2d, 0xa7, 0xe7, 0x7b, 0x22, 0xb6, 0x6a, 0x57, 0x40, 0xf9, 0x4e, 0x29, 0x84,
|
||||
0xa5, 0xd1, 0x33, 0x36, 0x7f, 0x8d, 0x41, 0x2e, 0xb2, 0x48, 0xfe, 0x01, 0xa4, 0x6b, 0xbd, 0x95,
|
||||
0xb7, 0x2f, 0xd3, 0x2a, 0x23, 0x3e, 0x49, 0x8b, 0xb8, 0x12, 0xc0, 0x8a, 0xc4, 0x49, 0xfe, 0x09,
|
||||
0xab, 0x4c, 0x9b, 0x45, 0xf1, 0x0b, 0xcb, 0x0b, 0xf5, 0xe3, 0x5c, 0x9f, 0x21, 0x95, 0xc5, 0x92,
|
||||
0xdc, 0xb1, 0x0e, 0x29, 0xd7, 0x7e, 0xc9, 0x62, 0x9c, 0x7d, 0x79, 0x96, 0x8a, 0x19, 0xf9, 0x1c,
|
||||
0xd6, 0xbd, 0xe1, 0x60, 0xd0, 0x77, 0x7d, 0xbb, 0x6d, 0xb6, 0xfa, 0xdd, 0x81, 0x6b, 0x7b, 0x1e,
|
||||
0x2e, 0x5c, 0x58, 0x21, 0x4a, 0xa1, 0x1e, 0xa7, 0x79, 0x6d, 0x04, 0x11, 0x59, 0xf1, 0x94, 0xaf,
|
||||
0xd3, 0x70, 0xeb, 0x3c, 0xc2, 0x44, 0x28, 0x9e, 0xc1, 0xda, 0xb8, 0xd7, 0x07, 0x72, 0x19, 0x98,
|
||||
0xda, 0xfb, 0xdc, 0x41, 0xb0, 0x75, 0xec, 0x12, 0x02, 0x11, 0x5d, 0xe9, 0x4c, 0xc9, 0x3c, 0x52,
|
||||
0x87, 0xb5, 0xae, 0xd3, 0x73, 0xba, 0xc3, 0xae, 0x79, 0x6a, 0x39, 0x7e, 0x98, 0x04, 0xe3, 0x73,
|
||||
0x24, 0xc1, 0x15, 0xb1, 0xf5, 0x10, 0x77, 0x4a, 0xe1, 0xe6, 0xf7, 0x49, 0x20, 0xd3, 0xa7, 0x5f,
|
||||
0x3e, 0x98, 0x66, 0x46, 0x41, 0xfc, 0x12, 0x51, 0x30, 0x15, 0x9d, 0x89, 0x0f, 0x8a, 0xce, 0x53,
|
||||
0x28, 0xc8, 0x0b, 0x0b, 0x60, 0x16, 0x38, 0x0c, 0xbd, 0x92, 0x4b, 0xdb, 0x91, 0x83, 0xe0, 0x60,
|
||||
0x37, 0x32, 0x23, 0x9f, 0x40, 0xd6, 0x6a, 0xb7, 0x1d, 0x9f, 0xfb, 0x69, 0x92, 0x7b, 0x8a, 0xf2,
|
||||
0x0e, 0x22, 0x1a, 0xb6, 0x4f, 0xc3, 0x4d, 0xe4, 0x7f, 0x90, 0x71, 0xed, 0x6e, 0xff, 0x8d, 0xd5,
|
||||
0xf1, 0xb0, 0xb0, 0xcf, 0x0b, 0x30, 0xda, 0x43, 0xee, 0x43, 0xb1, 0x67, 0x9f, 0x9a, 0x41, 0xee,
|
||||
0x36, 0x83, 0x94, 0x92, 0xe6, 0x29, 0x65, 0x11, 0xe5, 0x41, 0x7a, 0x6f, 0xf0, 0xdc, 0xf2, 0x2f,
|
||||
0xc8, 0x60, 0xe5, 0x68, 0xbd, 0xf2, 0x86, 0xdd, 0x8d, 0xcc, 0xf9, 0x9e, 0x55, 0x12, 0x3a, 0x74,
|
||||
0xa4, 0xad, 0x50, 0xc8, 0x47, 0x39, 0x20, 0x5b, 0x70, 0x9d, 0x6a, 0x8d, 0x7a, 0xcd, 0x68, 0x68,
|
||||
0x66, 0xf3, 0xa8, 0xae, 0x99, 0x07, 0x46, 0xa3, 0xae, 0x95, 0xf4, 0x8a, 0xae, 0x95, 0x8b, 0x7f,
|
||||
0x21, 0x04, 0x16, 0xeb, 0x2a, 0x6d, 0xea, 0x6a, 0xd5, 0x3c, 0xa8, 0x97, 0xd5, 0xa6, 0x56, 0x8c,
|
||||
0x91, 0x25, 0xc8, 0x55, 0x0e, 0xaa, 0x23, 0x41, 0x5c, 0xf9, 0x29, 0x06, 0x6b, 0xac, 0x30, 0x56,
|
||||
0x86, 0x9d, 0xce, 0x9e, 0xe5, 0x5d, 0x41, 0xa9, 0xbe, 0x0b, 0x85, 0x28, 0x0b, 0x41, 0x9b, 0x9a,
|
||||
0xa7, 0xf9, 0x56, 0xc8, 0x81, 0x37, 0x59, 0xcf, 0x13, 0xef, 0x5d, 0xcf, 0xff, 0x88, 0xc1, 0xfa,
|
||||
0xa4, 0xdd, 0x97, 0xae, 0xe6, 0x57, 0x9f, 0x02, 0x48, 0x13, 0xae, 0xf5, 0xec, 0x97, 0x38, 0x7e,
|
||||
0x63, 0x9b, 0x13, 0xbd, 0x55, 0x62, 0x0e, 0xcc, 0x35, 0xb9, 0xb9, 0x34, 0xd6, 0x63, 0xd5, 0x01,
|
||||
0x42, 0xe6, 0xc9, 0x0d, 0xc8, 0x0a, 0xc6, 0x9d, 0x36, 0xbf, 0xac, 0x2c, 0x3a, 0x4d, 0xb0, 0xdc,
|
||||
0x26, 0xf7, 0xb0, 0xa7, 0x0b, 0x16, 0xdf, 0xd8, 0xae, 0x27, 0xbf, 0x25, 0x4b, 0xc5, 0x25, 0x3d,
|
||||
0x0b, 0x84, 0x8a, 0x02, 0x19, 0xe9, 0x71, 0xac, 0x34, 0x78, 0x27, 0xd6, 0x83, 0x47, 0x8f, 0x39,
|
||||
0x58, 0x9e, 0x8a, 0x99, 0xf2, 0x50, 0x36, 0xe7, 0xdc, 0xff, 0xd1, 0xbf, 0x16, 0x4e, 0x90, 0x79,
|
||||
0xa1, 0xc4, 0xc7, 0xa4, 0x08, 0x89, 0xa1, 0xdb, 0x11, 0x47, 0xb0, 0xa1, 0xf2, 0x7b, 0x1c, 0x16,
|
||||
0xc7, 0xa3, 0x86, 0x18, 0x50, 0x8c, 0x14, 0x96, 0x68, 0x12, 0x9c, 0xab, 0xb8, 0x2c, 0xb5, 0xc6,
|
||||
0x05, 0xe4, 0xbf, 0x00, 0xae, 0x75, 0x6a, 0x9e, 0x70, 0x37, 0x10, 0x57, 0xb5, 0x35, 0x0b, 0x89,
|
||||
0x5a, 0xa7, 0xc2, 0x57, 0xb2, 0xae, 0x1c, 0x32, 0x57, 0x64, 0xbb, 0xd1, 0x97, 0x9c, 0x16, 0x7f,
|
||||
0xb8, 0x9c, 0xeb, 0x8a, 0xb8, 0x5d, 0x0f, 0xb4, 0x28, 0x3b, 0x50, 0x8c, 0xf1, 0xf1, 0x93, 0x73,
|
||||
0x71, 0x20, 0xcf, 0x0f, 0x9a, 0x85, 0x7b, 0x33, 0x01, 0x50, 0xad, 0x6c, 0x77, 0x7c, 0x4b, 0xeb,
|
||||
0xb5, 0xfa, 0x6d, 0x94, 0x23, 0x0e, 0x8a, 0x84, 0x21, 0x7b, 0x90, 0xe7, 0x38, 0xd2, 0x92, 0xe4,
|
||||
0xfb, 0x00, 0x71, 0x13, 0x84, 0x45, 0xca, 0xdf, 0x00, 0x42, 0x5b, 0xc9, 0x06, 0xa4, 0x25, 0x24,
|
||||
0x8b, 0x87, 0x24, 0x95, 0x53, 0xe5, 0x29, 0x64, 0x47, 0x94, 0x90, 0xdb, 0x90, 0x43, 0x56, 0x8f,
|
||||
0x9d, 0xb7, 0xa6, 0xe7, 0x7c, 0x69, 0x8b, 0x4e, 0x03, 0x02, 0x51, 0x03, 0x25, 0x98, 0x6e, 0x26,
|
||||
0x69, 0xce, 0x47, 0x78, 0x54, 0xbe, 0x8b, 0xc1, 0xf2, 0x94, 0x5d, 0x0c, 0xf5, 0xd8, 0x71, 0xb1,
|
||||
0xa0, 0x63, 0x96, 0x1c, 0x06, 0xa8, 0x09, 0x0a, 0x5c, 0xf4, 0x8c, 0x49, 0x98, 0x7f, 0xf2, 0xaf,
|
||||
0x1e, 0x58, 0xae, 0x85, 0x8f, 0x19, 0xdb, 0x15, 0x3d, 0x4b, 0x81, 0x49, 0xeb, 0x52, 0xc8, 0x70,
|
||||
0x7a, 0x18, 0x95, 0xe1, 0xf3, 0x92, 0x5b, 0x87, 0x22, 0xd9, 0xcf, 0xfc, 0x15, 0xf2, 0x36, 0x3b,
|
||||
0x14, 0xbb, 0x96, 0xd1, 0x7b, 0x28, 0x4f, 0x73, 0x42, 0xc6, 0xba, 0x1f, 0x66, 0xe1, 0xca, 0x8c,
|
||||
0xb7, 0x10, 0x96, 0xd3, 0x74, 0xd8, 0x5f, 0xb1, 0x84, 0xf1, 0x68, 0xce, 0x57, 0xd4, 0x8e, 0x1c,
|
||||
0x04, 0x6f, 0x34, 0x89, 0xb2, 0xf9, 0x04, 0x0a, 0x63, 0x2b, 0x2c, 0x2c, 0x5e, 0xd9, 0x67, 0x22,
|
||||
0x52, 0xd8, 0x90, 0xf5, 0x9d, 0x01, 0x23, 0x01, 0x8f, 0xc1, 0x44, 0xf9, 0x2d, 0x06, 0xab, 0x61,
|
||||
0x4d, 0x2c, 0xdb, 0x5e, 0xcb, 0x75, 0x06, 0x7e, 0xdf, 0xfd, 0xb8, 0xfb, 0xef, 0xc4, 0x87, 0x77,
|
||||
0x1e, 0xca, 0x31, 0x5c, 0x63, 0x9f, 0x1a, 0x7e, 0x74, 0x98, 0xc7, 0x9f, 0x8e, 0xfe, 0x99, 0xb0,
|
||||
0x6e, 0x4d, 0xde, 0xcd, 0xfd, 0xf3, 0x8f, 0x19, 0xe7, 0x4c, 0xfe, 0x3d, 0xe1, 0xa0, 0xca, 0x7f,
|
||||
0x20, 0x33, 0xca, 0xc9, 0x18, 0x10, 0x9e, 0x8d, 0xbd, 0x77, 0xdb, 0x13, 0xfe, 0x28, 0xa7, 0xec,
|
||||
0x56, 0x7a, 0x56, 0xaf, 0x2f, 0xfb, 0xe6, 0x60, 0xb2, 0xfd, 0x55, 0x4c, 0xfe, 0xcb, 0xe1, 0x1c,
|
||||
0xdc, 0x80, 0x6b, 0xcd, 0x3d, 0xaa, 0xa9, 0xcd, 0x59, 0x45, 0x37, 0x07, 0xe9, 0x7d, 0xb5, 0x7a,
|
||||
0xa8, 0x52, 0x56, 0x6d, 0xd7, 0x81, 0x34, 0x6a, 0x25, 0x56, 0x80, 0x35, 0x63, 0x57, 0x37, 0x34,
|
||||
0x8d, 0xea, 0xc6, 0x6e, 0x31, 0x4e, 0xd6, 0x60, 0xf9, 0xc0, 0x38, 0x54, 0x8d, 0xa6, 0x56, 0x36,
|
||||
0x1b, 0xb5, 0x4a, 0x93, 0xab, 0x27, 0xb0, 0x72, 0xde, 0xae, 0xd7, 0x9a, 0x9a, 0xc1, 0x4a, 0x76,
|
||||
0xf5, 0xc8, 0xdc, 0x53, 0xe9, 0x3e, 0x16, 0x6b, 0x53, 0xad, 0xd7, 0xab, 0x7a, 0x49, 0x6d, 0xea,
|
||||
0x35, 0xa3, 0xb8, 0xb0, 0xfd, 0x6d, 0x0c, 0xf2, 0xd1, 0x0b, 0x62, 0x5d, 0x40, 0xbd, 0xaa, 0x36,
|
||||
0x2b, 0x35, 0xba, 0x7f, 0x8e, 0x41, 0x87, 0xba, 0x51, 0xae, 0x1d, 0x36, 0xd0, 0xa0, 0x2c, 0x24,
|
||||
0xab, 0xba, 0x71, 0xf0, 0x1c, 0x6d, 0x40, 0xb9, 0x6a, 0x94, 0x69, 0x4d, 0x2f, 0xe3, 0xc9, 0x69,
|
||||
0x48, 0xd4, 0x1a, 0xcf, 0x8b, 0x0b, 0x6c, 0xa0, 0xd7, 0x1a, 0xc5, 0x24, 0x7a, 0x6c, 0x5e, 0x35,
|
||||
0x8e, 0x4c, 0x89, 0x5c, 0x4c, 0x91, 0x65, 0x28, 0xa0, 0x59, 0x23, 0x49, 0xa3, 0x98, 0x26, 0x00,
|
||||
0xa9, 0xd2, 0x1e, 0xad, 0xed, 0x6b, 0xc5, 0xcc, 0x76, 0x05, 0x96, 0x26, 0x12, 0x35, 0xb9, 0x03,
|
||||
0x37, 0x4b, 0xb5, 0xfd, 0x3a, 0xf6, 0x28, 0x0d, 0xb4, 0x7d, 0x96, 0x71, 0x78, 0x1c, 0x55, 0x0f,
|
||||
0xd1, 0xb0, 0x0c, 0x2c, 0x50, 0xbd, 0x84, 0x0d, 0xc9, 0xb6, 0x03, 0x4b, 0x13, 0x4e, 0x83, 0xa1,
|
||||
0xbd, 0x25, 0x08, 0x47, 0x6e, 0xe8, 0xd1, 0x39, 0xbd, 0xce, 0x01, 0x45, 0x9a, 0x9f, 0xcb, 0xd3,
|
||||
0x10, 0x13, 0x0d, 0xfe, 0x54, 0x37, 0x54, 0xdc, 0x50, 0xd6, 0x77, 0xb5, 0x46, 0x13, 0x3f, 0x3a,
|
||||
0x0f, 0x19, 0xbd, 0x6e, 0x52, 0xd5, 0xd8, 0x45, 0xbe, 0x5f, 0xa4, 0xb8, 0xf3, 0x3c, 0xfc, 0x33,
|
||||
0x00, 0x00, 0xff, 0xff, 0x08, 0xe3, 0x60, 0x1f, 0xd4, 0x13, 0x00, 0x00,
|
||||
}
|
467
vendor/github.com/google/safebrowsing/internal/safebrowsing_proto/safebrowsing.proto
generated
vendored
Normal file
467
vendor/github.com/google/safebrowsing/internal/safebrowsing_proto/safebrowsing.proto
generated
vendored
Normal file
|
@ -0,0 +1,467 @@
|
|||
// Copyright 2016 Google Inc. All Rights Reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
syntax = "proto3";
|
||||
|
||||
package safebrowsing_proto;
|
||||
|
||||
// The Safe Browsing APIs enable clients to check web resources (most commonly
|
||||
// URLs) against Google's constantly updated lists of unsafe web resources. The
|
||||
// Safe Browsing APIs (v4) feature the Update API and the Lookup API.
|
||||
//
|
||||
// The Update API is designed for our larger clients and includes the
|
||||
// FindFullHashes and FetchThreatListUpdates methods. The Update API requires
|
||||
// that clients maintain downloaded threat lists in a local database.
|
||||
//
|
||||
// First, clients match against their local lists to determine the state (safe
|
||||
// or unsafe) of a given web resource. Most commonly, lists are comprised of
|
||||
// hash prefixes of blacklisted URL expressions. To check a URL, clients
|
||||
// generate the hashes of a given URL and check for prefix collisions in their
|
||||
// local lists; if a prefix match is found, the client obtains the full hashes
|
||||
// associated with the matched hash prefix via the FindFullHashes method. The
|
||||
// client then compares the local full hash with the returned full hashes; a match
|
||||
// indicates that the URL is unsafe.
|
||||
//
|
||||
// Second, clients obtain updates to their local databases via the
|
||||
// FetchThreatListUpdates method, which takes the current state of the client
|
||||
// and returns an updated client state along with the changes that the client
|
||||
// should apply to their local threat lists.
|
||||
//
|
||||
// The Lookup API is designed for our smaller clients and allows them to match
|
||||
// resources directly against the Safe Browsing threat lists via the
|
||||
// FindThreatMatches method.
|
||||
//
|
||||
// Clients using either the Update API or the Lookup API can obtain a list of
|
||||
// the Safe Browsing threat lists available for download via the ListThreatLists
|
||||
// method.
|
||||
|
||||
// The information regarding one or more threats that a client submits when
|
||||
// checking for matches in threat lists.
|
||||
message ThreatInfo {
|
||||
// The threat types to be checked.
|
||||
repeated ThreatType threat_types = 1;
|
||||
|
||||
// The platform types to be checked.
|
||||
repeated PlatformType platform_types = 2;
|
||||
|
||||
// The entry types to be checked.
|
||||
repeated ThreatEntryType threat_entry_types = 4;
|
||||
|
||||
// The threat entries to be checked.
|
||||
repeated ThreatEntry threat_entries = 3;
|
||||
}
|
||||
|
||||
// A match when checking a threat entry in the Safe Browsing threat lists.
|
||||
message ThreatMatch {
|
||||
// The threat type matching this threat.
|
||||
ThreatType threat_type = 1;
|
||||
|
||||
// The platform type matching this threat.
|
||||
PlatformType platform_type = 2;
|
||||
|
||||
// The threat entry type matching this threat.
|
||||
ThreatEntryType threat_entry_type = 6;
|
||||
|
||||
// The threat matching this threat.
|
||||
ThreatEntry threat = 3;
|
||||
|
||||
// Optional metadata associated with this threat.
|
||||
ThreatEntryMetadata threat_entry_metadata = 4;
|
||||
|
||||
// The cache lifetime for the returned match. Clients must not cache this
|
||||
// response for more than this duration to avoid false positives.
|
||||
Duration cache_duration = 5;
|
||||
}
|
||||
|
||||
// Request to check entries against lists.
|
||||
message FindThreatMatchesRequest {
|
||||
// The client metadata.
|
||||
ClientInfo client = 1;
|
||||
|
||||
// The lists and entries to be checked for matches.
|
||||
ThreatInfo threat_info = 2;
|
||||
}
|
||||
|
||||
// Response type for requests to find threat matches.
|
||||
message FindThreatMatchesResponse {
|
||||
// The threat list matches.
|
||||
repeated ThreatMatch matches = 1;
|
||||
}
|
||||
|
||||
// Describes a Safe Browsing API update request. Clients can request updates for
|
||||
// multiple lists in a single request.
|
||||
// NOTE: Field index 2 is unused.
|
||||
message FetchThreatListUpdatesRequest {
|
||||
// The client metadata.
|
||||
ClientInfo client = 1;
|
||||
|
||||
// A single list update request.
|
||||
message ListUpdateRequest {
|
||||
// The type of threat posed by entries present in the list.
|
||||
ThreatType threat_type = 1;
|
||||
|
||||
// The type of platform at risk by entries present in the list.
|
||||
PlatformType platform_type = 2;
|
||||
|
||||
// The types of entries present in the list.
|
||||
ThreatEntryType threat_entry_type = 5;
|
||||
|
||||
// The current state of the client for the requested list (the encrypted
|
||||
// ClientState that was sent to the client from the previous update
|
||||
// request).
|
||||
bytes state = 3;
|
||||
|
||||
// The constraints for this update.
|
||||
message Constraints {
|
||||
// The maximum size in number of entries. The update will not contain more
|
||||
// entries than this value. This should be a power of 2 between 2**10 and
|
||||
// 2**20. If zero, no update size limit is set.
|
||||
int32 max_update_entries = 1;
|
||||
|
||||
// Sets the maximum number of entries that the client is willing to have
|
||||
// in the local database. This should be a power of 2 between 2**10 and
|
||||
// 2**20. If zero, no database size limit is set.
|
||||
int32 max_database_entries = 2;
|
||||
|
||||
// Requests the list for a specific geographic location. If not set the
|
||||
// server may pick that value based on the user's IP address. Expects ISO
|
||||
// 3166-1 alpha-2 format.
|
||||
string region = 3;
|
||||
|
||||
// The compression types supported by the client.
|
||||
repeated CompressionType supported_compressions = 4;
|
||||
}
|
||||
|
||||
// The constraints associated with this request.
|
||||
Constraints constraints = 4;
|
||||
}
|
||||
|
||||
// Index 2 is unused.
|
||||
|
||||
// The requested threat list updates.
|
||||
repeated ListUpdateRequest list_update_requests = 3;
|
||||
}
|
||||
|
||||
// Response type for threat list update requests.
|
||||
message FetchThreatListUpdatesResponse {
|
||||
// An update to an individual list.
|
||||
message ListUpdateResponse {
|
||||
// The threat type for which data is returned.
|
||||
ThreatType threat_type = 1;
|
||||
|
||||
// The format of the threats.
|
||||
ThreatEntryType threat_entry_type = 2;
|
||||
|
||||
// The platform type for which data is returned.
|
||||
PlatformType platform_type = 3;
|
||||
|
||||
// The type of response sent to the client.
|
||||
enum ResponseType {
|
||||
// Unknown.
|
||||
RESPONSE_TYPE_UNSPECIFIED = 0;
|
||||
|
||||
// Partial updates are applied to the client's existing local database.
|
||||
PARTIAL_UPDATE = 1;
|
||||
|
||||
// Full updates replace the client's entire local database. This means
|
||||
// that either the client was seriously out-of-date or the client is
|
||||
// believed to be corrupt.
|
||||
FULL_UPDATE = 2;
|
||||
}
|
||||
|
||||
// The type of response. This may indicate that an action is required by the
|
||||
// client when the response is received.
|
||||
ResponseType response_type = 4;
|
||||
|
||||
// A set of entries to add to a local threat type's list. Repeated to allow
|
||||
// for a combination of compressed and raw data to be sent in a single
|
||||
// response.
|
||||
repeated ThreatEntrySet additions = 5;
|
||||
|
||||
// A set of entries to remove from a local threat type's list. Repeated for
|
||||
// the same reason as above.
|
||||
repeated ThreatEntrySet removals = 6;
|
||||
|
||||
// The new client state, in encrypted format. Opaque to clients.
|
||||
bytes new_client_state = 7;
|
||||
|
||||
// The expected SHA256 hash of the client state; that is, of the sorted list
|
||||
// of all hashes present in the database after applying the provided update.
|
||||
// If the client state doesn't match the expected state, the client must
|
||||
// disregard this update and retry later.
|
||||
Checksum checksum = 8;
|
||||
}
|
||||
|
||||
// The list updates requested by the clients.
|
||||
repeated ListUpdateResponse list_update_responses = 1;
|
||||
|
||||
// The minimum duration the client must wait before issuing any update
|
||||
// request. If this field is not set clients may update as soon as they want.
|
||||
Duration minimum_wait_duration = 2;
|
||||
}
|
||||
|
||||
// Request to return full hashes matched by the provided hash prefixes.
|
||||
message FindFullHashesRequest {
|
||||
// The client metadata.
|
||||
ClientInfo client = 1;
|
||||
|
||||
// The current client states for each of the client's local threat lists.
|
||||
repeated bytes client_states = 2;
|
||||
|
||||
// The lists and hashes to be checked.
|
||||
ThreatInfo threat_info = 3;
|
||||
}
|
||||
|
||||
// Response type for requests to find full hashes.
|
||||
message FindFullHashesResponse {
|
||||
// The full hashes that matched the requested prefixes.
|
||||
repeated ThreatMatch matches = 1;
|
||||
|
||||
// The minimum duration the client must wait before issuing any find hashes
|
||||
// request. If this field is not set, clients can issue a request as soon as
|
||||
// they want.
|
||||
Duration minimum_wait_duration = 2;
|
||||
|
||||
// For requested entities that did not match the threat list, how long to
|
||||
// cache the response.
|
||||
Duration negative_cache_duration = 3;
|
||||
}
|
||||
|
||||
|
||||
// Types of threats.
|
||||
enum ThreatType {
|
||||
// Unknown.
|
||||
THREAT_TYPE_UNSPECIFIED = 0;
|
||||
|
||||
// Malware threat type.
|
||||
MALWARE = 1;
|
||||
|
||||
// Social engineering threat type.
|
||||
SOCIAL_ENGINEERING = 2;
|
||||
|
||||
// Unwanted software threat type.
|
||||
UNWANTED_SOFTWARE = 3;
|
||||
|
||||
// Potentially harmful application threat type.
|
||||
POTENTIALLY_HARMFUL_APPLICATION = 4;
|
||||
}
|
||||
|
||||
// Types of platforms.
|
||||
enum PlatformType {
|
||||
// Unknown platform.
|
||||
PLATFORM_TYPE_UNSPECIFIED = 0;
|
||||
|
||||
// Threat posed to Windows.
|
||||
WINDOWS = 1;
|
||||
|
||||
// Threat posed to Linux.
|
||||
LINUX = 2;
|
||||
|
||||
// Threat posed to Android.
|
||||
ANDROID = 3;
|
||||
|
||||
// Threat posed to OSX.
|
||||
OSX = 4;
|
||||
|
||||
// Threat posed to iOS.
|
||||
IOS = 5;
|
||||
|
||||
// Threat posed to at least one of the defined platforms.
|
||||
ANY_PLATFORM = 6;
|
||||
|
||||
// Threat posed to all defined platforms.
|
||||
ALL_PLATFORMS = 7;
|
||||
|
||||
// Threat posed to Chrome.
|
||||
CHROME = 8;
|
||||
}
|
||||
|
||||
// The client metadata associated with Safe Browsing API requests.
|
||||
message ClientInfo {
|
||||
// A client ID that (hopefully) uniquely identifies the client implementation
|
||||
// of the Safe Browsing API.
|
||||
string client_id = 1;
|
||||
|
||||
// The version of the client implementation.
|
||||
string client_version = 2;
|
||||
}
|
||||
|
||||
// The expected state of a client's local database.
|
||||
message Checksum {
|
||||
// The SHA256 hash of the client state; that is, of the sorted list of all
|
||||
// hashes present in the database.
|
||||
bytes sha256 = 1;
|
||||
}
|
||||
|
||||
// The ways in which threat entry sets can be compressed.
|
||||
enum CompressionType {
|
||||
// Unknown.
|
||||
COMPRESSION_TYPE_UNSPECIFIED = 0;
|
||||
|
||||
// Raw, uncompressed data.
|
||||
RAW = 1;
|
||||
|
||||
// Rice-Golomb encoded data.
|
||||
RICE = 2;
|
||||
}
|
||||
|
||||
// An individual threat; for example, a malicious URL or its hash
|
||||
// representation. Only one of these fields should be set.
|
||||
message ThreatEntry {
|
||||
// A hash prefix, consisting of the most significant 4-32 bytes of a SHA256
|
||||
// hash.
|
||||
bytes hash = 1;
|
||||
|
||||
// A URL.
|
||||
string url = 2;
|
||||
}
|
||||
|
||||
// Types of entries that pose threats. Threat lists are collections of entries
|
||||
// of a single type.
|
||||
enum ThreatEntryType {
|
||||
// Unspecified.
|
||||
THREAT_ENTRY_TYPE_UNSPECIFIED = 0;
|
||||
|
||||
// A URL.
|
||||
URL = 1;
|
||||
|
||||
// An executable program.
|
||||
EXECUTABLE = 2;
|
||||
|
||||
// An IP range.
|
||||
IP_RANGE = 3;
|
||||
}
|
||||
|
||||
// A set of threats that should be added or removed from a client's local
|
||||
// database.
|
||||
message ThreatEntrySet {
|
||||
// The compression type for the entries in this set.
|
||||
CompressionType compression_type = 1;
|
||||
|
||||
// At most one of the following fields should be set.
|
||||
|
||||
// The raw SHA256-formatted entries.
|
||||
RawHashes raw_hashes = 2;
|
||||
|
||||
// The raw removal indices for a local list.
|
||||
RawIndices raw_indices = 3;
|
||||
|
||||
// The encoded 4-byte prefixes of SHA256-formatted entries, using a
|
||||
// Golomb-Rice encoding.
|
||||
RiceDeltaEncoding rice_hashes = 4;
|
||||
|
||||
// The encoded local, lexicographically-sorted list indices, using a
|
||||
// Golomb-Rice encoding. Used for sending compressed removal indices.
|
||||
RiceDeltaEncoding rice_indices = 5;
|
||||
}
|
||||
|
||||
// A set of raw indices to remove from a local list.
|
||||
message RawIndices {
|
||||
// The indices to remove from a lexicographically-sorted local list.
|
||||
repeated int32 indices = 1;
|
||||
}
|
||||
|
||||
// The uncompressed threat entries in hash format of a particular prefix length.
|
||||
// Hashes can be anywhere from 4 to 32 bytes in size. A large majority are 4
|
||||
// bytes, but some hashes are lengthened if they collide with the hash of a
|
||||
// popular URL.
|
||||
//
|
||||
// Used for sending ThreatEntrySet to clients that do not support compression,
|
||||
// or when sending non-4-byte hashes to clients that do support compression.
|
||||
message RawHashes {
|
||||
// The number of bytes for each prefix encoded below. This field can be
|
||||
// anywhere from 4 (shortest prefix) to 32 (full SHA256 hash).
|
||||
int32 prefix_size = 1;
|
||||
|
||||
// The hashes, all concatenated into one long string. Each hash has a prefix
|
||||
// size of |prefix_size| above. Hashes are sorted in lexicographic order.
|
||||
bytes raw_hashes = 2;
|
||||
}
|
||||
|
||||
// The Rice-Golomb encoded data. Used for sending compressed 4-byte hashes or
|
||||
// compressed removal indices.
|
||||
message RiceDeltaEncoding {
|
||||
// The offset of the first entry in the encoded data, or, if only a single
|
||||
// integer was encoded, that single integer's value.
|
||||
int64 first_value = 1;
|
||||
|
||||
// The Golomb-Rice parameter which is a number between 2 and 28. This field
|
||||
// is missing (that is, zero) if num_entries is zero.
|
||||
int32 rice_parameter = 2;
|
||||
|
||||
// The number of entries that are delta encoded in the encoded data. If only a
|
||||
// single integer was encoded, this will be zero and the single value will be
|
||||
// stored in first_value.
|
||||
int32 num_entries = 3;
|
||||
|
||||
// The encoded deltas that are encoded using the Golomb-Rice coder.
|
||||
bytes encoded_data = 4;
|
||||
}
|
||||
|
||||
// The metadata associated with a specific threat entry. The client is expected
|
||||
// to know the metadata key/value pairs associated with each threat type.
|
||||
message ThreatEntryMetadata {
|
||||
// A single metadata entry.
|
||||
message MetadataEntry {
|
||||
// The metadata entry key.
|
||||
bytes key = 1;
|
||||
|
||||
// The metadata entry value.
|
||||
bytes value = 2;
|
||||
}
|
||||
|
||||
// The metadata entries.
|
||||
repeated MetadataEntry entries = 1;
|
||||
}
|
||||
|
||||
// Describes an individual threat list. A list is defined by three parameters:
|
||||
// the type of threat posed, the type of platform targeted by the threat, and
|
||||
// the type of entries in the list.
|
||||
message ThreatListDescriptor {
|
||||
// The threat type posed by the list's entries.
|
||||
ThreatType threat_type = 1;
|
||||
|
||||
// The platform type targeted by the list's entries.
|
||||
PlatformType platform_type = 2;
|
||||
|
||||
// The entry types contained in the list.
|
||||
ThreatEntryType threat_entry_type = 3;
|
||||
}
|
||||
|
||||
// A collection of lists available for download by the client.
|
||||
message ListThreatListsResponse {
|
||||
// The lists available for download by the client.
|
||||
repeated ThreatListDescriptor threat_lists = 1;
|
||||
}
|
||||
|
||||
// A Duration represents a signed, fixed-length span of time represented
|
||||
// as a count of seconds and fractions of seconds at nanosecond
|
||||
// resolution. It is independent of any calendar and concepts like "day"
|
||||
// or "month". It is related to Timestamp in that the difference between
|
||||
// two Timestamp values is a Duration and it can be added or subtracted
|
||||
// from a Timestamp. Range is approximately +-10,000 years.
|
||||
message Duration {
|
||||
|
||||
// Signed seconds of the span of time. Must be from -315,576,000,000
|
||||
// to +315,576,000,000 inclusive.
|
||||
int64 seconds = 1;
|
||||
|
||||
// Signed fractions of a second at nanosecond resolution of the span
|
||||
// of time. Durations less than one second are represented with a 0
|
||||
// `seconds` field and a positive or negative `nanos` field. For durations
|
||||
// of one second or more, a non-zero value for the `nanos` field must be
|
||||
// of the same sign as the `seconds` field. Must be from -999,999,999
|
||||
// to +999,999,999 inclusive.
|
||||
int32 nanos = 2;
|
||||
}
|
|
@ -0,0 +1,519 @@
|
|||
// Copyright 2016 Google Inc. All Rights Reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
// Package safebrowsing implements a client for the Safe Browsing API v4.
|
||||
//
|
||||
// API v4 emphasizes efficient usage of the network for bandwidth-constrained
|
||||
// applications such as mobile devices. It achieves this by maintaining a small
|
||||
// portion of the server state locally such that some queries can be answered
|
||||
// immediately without any network requests. Thus, fewer API calls made, means
|
||||
// less bandwidth is used.
|
||||
//
|
||||
// At a high-level, the implementation does the following:
|
||||
//
|
||||
// hash(query)
|
||||
// |
|
||||
// _____V_____
|
||||
// | | No
|
||||
// | Database |-----+
|
||||
// |___________| |
|
||||
// | |
|
||||
// | Maybe? |
|
||||
// _____V_____ |
|
||||
// Yes | | No V
|
||||
// +-----| Cache |---->+
|
||||
// | |___________| |
|
||||
// | | |
|
||||
// | | Maybe? |
|
||||
// | _____V_____ |
|
||||
// V Yes | | No V
|
||||
// +<----| API |---->+
|
||||
// | |___________| |
|
||||
// V V
|
||||
// (Yes, unsafe) (No, safe)
|
||||
//
|
||||
// Essentially the query is presented to three major components: The database,
|
||||
// the cache, and the API. Each of these may satisfy the query immediately,
|
||||
// or may say that it does not know and that the query should be satisfied by
|
||||
// the next component. The goal of the database and cache is to satisfy as many
|
||||
// queries as possible to avoid using the API.
|
||||
//
|
||||
// Starting with a user query, a hash of the query is performed to preserve
|
||||
// privacy regarded the exact nature of the query. For example, if the query
|
||||
// was for a URL, then this would be the SHA256 hash of the URL in question.
|
||||
//
|
||||
// Given a query hash, we first check the local database (which is periodically
|
||||
// synced with the global Safe Browsing API servers). This database will either
|
||||
// tell us that the query is definitely safe, or that it does not have
|
||||
// enough information.
|
||||
//
|
||||
// If we are unsure about the query, we check the local cache, which can be used
|
||||
// to satisfy queries immediately if the same query had been made recently.
|
||||
// The cache will tell us that the query is either safe, unsafe, or unknown
|
||||
// (because the it's not in the cache or the entry expired).
|
||||
//
|
||||
// If we are still unsure about the query, then we finally query the API server,
|
||||
// which is guaranteed to return to us an authoritative answer, assuming no
|
||||
// networking failures.
|
||||
//
|
||||
// For more information, see the API developer's guide:
|
||||
// https://developers.google.com/safe-browsing/
|
||||
package safebrowsing
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"io"
|
||||
"io/ioutil"
|
||||
"log"
|
||||
"sync/atomic"
|
||||
"time"
|
||||
|
||||
pb "github.com/google/safebrowsing/internal/safebrowsing_proto"
|
||||
)
|
||||
|
||||
const (
|
||||
// DefaultServerURL is the default URL for the Safe Browsing API.
|
||||
DefaultServerURL = "safebrowsing.googleapis.com"
|
||||
|
||||
// DefaultUpdatePeriod is the default period for how often SafeBrowser will
|
||||
// reload its blacklist database.
|
||||
DefaultUpdatePeriod = 30 * time.Minute
|
||||
|
||||
// DefaultID and DefaultVersion are the default client ID and Version
|
||||
// strings to send with every API call.
|
||||
DefaultID = "GoSafeBrowser"
|
||||
DefaultVersion = "1.0.0"
|
||||
)
|
||||
|
||||
// Errors specific to this package.
|
||||
var (
|
||||
errClosed = errors.New("safebrowsing: handler is closed")
|
||||
errStale = errors.New("safebrowsing: threat list is stale")
|
||||
)
|
||||
|
||||
// ThreatType is an enumeration type for threats classes. Examples of threat
|
||||
// classes are malware, social engineering, etc.
|
||||
type ThreatType uint16
|
||||
|
||||
func (tt ThreatType) String() string { return pb.ThreatType(tt).String() }
|
||||
|
||||
// List of ThreatType constants.
|
||||
const (
|
||||
ThreatType_Malware = ThreatType(pb.ThreatType_MALWARE)
|
||||
ThreatType_SocialEngineering = ThreatType(pb.ThreatType_SOCIAL_ENGINEERING)
|
||||
ThreatType_UnwantedSoftware = ThreatType(pb.ThreatType_UNWANTED_SOFTWARE)
|
||||
ThreatType_PotentiallyHarmfulApplication = ThreatType(pb.ThreatType_POTENTIALLY_HARMFUL_APPLICATION)
|
||||
)
|
||||
|
||||
// PlatformType is an enumeration type for platform classes. Examples of
|
||||
// platform classes are Windows, Linux, Android, etc.
|
||||
type PlatformType uint16
|
||||
|
||||
func (pt PlatformType) String() string { return pb.PlatformType(pt).String() }
|
||||
|
||||
// List of PlatformType constants.
|
||||
const (
|
||||
PlatformType_AnyPlatform = PlatformType(pb.PlatformType_ANY_PLATFORM)
|
||||
PlatformType_AllPlatforms = PlatformType(pb.PlatformType_ALL_PLATFORMS)
|
||||
|
||||
PlatformType_Windows = PlatformType(pb.PlatformType_WINDOWS)
|
||||
PlatformType_Linux = PlatformType(pb.PlatformType_LINUX)
|
||||
PlatformType_Android = PlatformType(pb.PlatformType_ANDROID)
|
||||
PlatformType_OSX = PlatformType(pb.PlatformType_OSX)
|
||||
PlatformType_iOS = PlatformType(pb.PlatformType_IOS)
|
||||
PlatformType_Chrome = PlatformType(pb.PlatformType_CHROME)
|
||||
)
|
||||
|
||||
// ThreatEntryType is an enumeration type for threat entries. Examples of
|
||||
// threat entries are via URLs, binary digests, and IP address ranges.
|
||||
type ThreatEntryType uint16
|
||||
|
||||
func (tet ThreatEntryType) String() string { return pb.ThreatEntryType(tet).String() }
|
||||
|
||||
// List of ThreatEntryType constants.
|
||||
const (
|
||||
ThreatEntryType_URL = ThreatEntryType(pb.ThreatEntryType_URL)
|
||||
|
||||
// These below are not supported yet.
|
||||
ThreatEntryType_Executable = ThreatEntryType(pb.ThreatEntryType_EXECUTABLE)
|
||||
ThreatEntryType_IPRange = ThreatEntryType(pb.ThreatEntryType_IP_RANGE)
|
||||
)
|
||||
|
||||
// DefaultThreatLists is the default list of threat lists that SafeBrowser
|
||||
// will maintain. Do not modify this variable.
|
||||
var DefaultThreatLists = []ThreatDescriptor{
|
||||
{ThreatType_Malware, PlatformType_AnyPlatform, ThreatEntryType_URL},
|
||||
{ThreatType_SocialEngineering, PlatformType_AnyPlatform, ThreatEntryType_URL},
|
||||
{ThreatType_UnwantedSoftware, PlatformType_AnyPlatform, ThreatEntryType_URL},
|
||||
}
|
||||
|
||||
// A ThreatDescriptor describes a given threat, which itself is composed of
|
||||
// several parameters along different dimensions: ThreatType, PlatformType, and
|
||||
// ThreatEntryType.
|
||||
type ThreatDescriptor struct {
|
||||
ThreatType ThreatType
|
||||
PlatformType PlatformType
|
||||
ThreatEntryType ThreatEntryType
|
||||
}
|
||||
|
||||
// A URLThreat is a specialized ThreatDescriptor for the URL threat
|
||||
// entry type.
|
||||
type URLThreat struct {
|
||||
Pattern string
|
||||
ThreatDescriptor
|
||||
}
|
||||
|
||||
// Config sets up the SafeBrowser object.
|
||||
type Config struct {
|
||||
// ServerURL is the URL for the Safe Browsing API server.
|
||||
// If empty, it defaults to DefaultServerURL.
|
||||
ServerURL string
|
||||
|
||||
// APIKey is the key used to authenticate with the Safe Browsing API
|
||||
// service. This field is required.
|
||||
APIKey string
|
||||
|
||||
// ID and Version are client metadata associated with each API request to
|
||||
// identify the specific implementation of the client.
|
||||
// They are similar in usage to the "User-Agent" in an HTTP request.
|
||||
// If empty, these default to DefaultID and DefaultVersion, respectively.
|
||||
ID string
|
||||
Version string
|
||||
|
||||
// DBPath is a path to a persistent database file.
|
||||
// If empty, SafeBrowser operates in a non-persistent manner.
|
||||
// This means that blacklist results will not be cached beyond the lifetime
|
||||
// of the SafeBrowser object.
|
||||
DBPath string
|
||||
|
||||
// UpdatePeriod determines how often we update the internal list database.
|
||||
// If zero value, it defaults to DefaultUpdatePeriod.
|
||||
UpdatePeriod time.Duration
|
||||
|
||||
// ThreatLists determines which threat lists that SafeBrowser should
|
||||
// subscribe to. The threats reported by LookupURLs will only be ones that
|
||||
// are specified by this list.
|
||||
// If empty, it defaults to DefaultThreatLists.
|
||||
ThreatLists []ThreatDescriptor
|
||||
|
||||
// Logger is an io.Writer that allows SafeBrowser to write debug information
|
||||
// intended for human consumption.
|
||||
// If empty, no logs will be written.
|
||||
Logger io.Writer
|
||||
|
||||
// compressionTypes indicates how the threat entry sets can be compressed.
|
||||
compressionTypes []pb.CompressionType
|
||||
|
||||
api api
|
||||
now func() time.Time
|
||||
}
|
||||
|
||||
// setDefaults configures Config to have default parameters.
|
||||
// It reports whether the current configuration is valid.
|
||||
func (c *Config) setDefaults() bool {
|
||||
if c.ServerURL == "" {
|
||||
c.ServerURL = DefaultServerURL
|
||||
}
|
||||
if len(c.ThreatLists) == 0 {
|
||||
c.ThreatLists = DefaultThreatLists
|
||||
}
|
||||
if c.UpdatePeriod <= 0 {
|
||||
c.UpdatePeriod = DefaultUpdatePeriod
|
||||
}
|
||||
if c.compressionTypes == nil {
|
||||
c.compressionTypes = []pb.CompressionType{pb.CompressionType_RAW, pb.CompressionType_RICE}
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
func (c Config) copy() Config {
|
||||
c2 := c
|
||||
c2.ThreatLists = append([]ThreatDescriptor(nil), c.ThreatLists...)
|
||||
c2.compressionTypes = append([]pb.CompressionType(nil), c.compressionTypes...)
|
||||
return c2
|
||||
}
|
||||
|
||||
// SafeBrowser is a client implementation of API v4.
|
||||
//
|
||||
// It provides a set of lookup methods that allows the user to query whether
|
||||
// certain entries are considered a threat. The implementation manages all of
|
||||
// local database and caching that would normally be needed to interact
|
||||
// with the API server.
|
||||
type SafeBrowser struct {
|
||||
config Config
|
||||
stats Stats
|
||||
api api
|
||||
db database
|
||||
c cache
|
||||
|
||||
lists map[ThreatDescriptor]bool
|
||||
|
||||
log *log.Logger
|
||||
|
||||
closed uint32
|
||||
done chan bool // Signals that the updater routine should stop
|
||||
}
|
||||
|
||||
// Stats records statistics regarding SafeBrowser's operation.
|
||||
type Stats struct {
|
||||
QueriesByDatabase int64 // Number of queries satisfied by the database alone
|
||||
QueriesByCache int64 // Number of queries satisfied by the cache alone
|
||||
QueriesByAPI int64 // Number of queries satisfied by an API call
|
||||
QueriesFail int64 // Number of queries that could not be satisfied
|
||||
}
|
||||
|
||||
// NewSafeBrowser creates a new SafeBrowser.
|
||||
//
|
||||
// The conf struct allows the user to configure many aspects of the
|
||||
// SafeBrowser's operation.
|
||||
func NewSafeBrowser(conf Config) (*SafeBrowser, error) {
|
||||
conf = conf.copy()
|
||||
if !conf.setDefaults() {
|
||||
return nil, errors.New("safebrowsing: invalid configuration")
|
||||
}
|
||||
|
||||
// Create the SafeBrowsing object.
|
||||
if conf.api == nil {
|
||||
var err error
|
||||
conf.api, err = newNetAPI(conf.ServerURL, conf.APIKey)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
if conf.now == nil {
|
||||
conf.now = time.Now
|
||||
}
|
||||
sb := &SafeBrowser{
|
||||
config: conf,
|
||||
api: conf.api,
|
||||
c: cache{now: conf.now},
|
||||
}
|
||||
|
||||
// TODO: Verify that config.ThreatLists is a subset of the list obtained
|
||||
// by "/v4/threatLists" API endpoint.
|
||||
|
||||
// Convert threat lists slice to a map for O(1) lookup.
|
||||
sb.lists = make(map[ThreatDescriptor]bool)
|
||||
for _, td := range conf.ThreatLists {
|
||||
sb.lists[td] = true
|
||||
}
|
||||
|
||||
// Setup the logger.
|
||||
w := conf.Logger
|
||||
if conf.Logger == nil {
|
||||
w = ioutil.Discard
|
||||
}
|
||||
sb.log = log.New(w, "safebrowsing: ", log.Ldate|log.Ltime|log.Lshortfile)
|
||||
|
||||
// If database file is provided, use that to initialize.
|
||||
if !sb.db.Init(&sb.config, sb.log) {
|
||||
sb.db.Update(sb.api)
|
||||
}
|
||||
|
||||
// Start the background list updater.
|
||||
sb.done = make(chan bool)
|
||||
go sb.updater(conf.UpdatePeriod)
|
||||
return sb, nil
|
||||
}
|
||||
|
||||
// Status reports the status of SafeBrowser. It returns some statistics
|
||||
// regarding the operation, and an error representing the status of its
|
||||
// internal state. Most errors are transient and will recover themselves
|
||||
// after some period.
|
||||
func (sb *SafeBrowser) Status() (Stats, error) {
|
||||
stats := Stats{
|
||||
QueriesByDatabase: atomic.LoadInt64(&sb.stats.QueriesByDatabase),
|
||||
QueriesByCache: atomic.LoadInt64(&sb.stats.QueriesByCache),
|
||||
QueriesByAPI: atomic.LoadInt64(&sb.stats.QueriesByAPI),
|
||||
QueriesFail: atomic.LoadInt64(&sb.stats.QueriesFail),
|
||||
}
|
||||
return stats, sb.db.Status()
|
||||
}
|
||||
|
||||
// LookupURLs looks up the provided URLs. It returns a list of threats, one for
|
||||
// every URL requested, and an error if any occurred. It is safe to call this
|
||||
// method concurrently.
|
||||
//
|
||||
// The outer dimension is across all URLs requested, and will always have the
|
||||
// same length as urls regardless of whether an error occurs or not.
|
||||
// The inner dimension is across every fragment that a given URL produces.
|
||||
// For some URL at index i, one can check for a hit on any blacklist by
|
||||
// checking if len(threats[i]) > 0.
|
||||
// The ThreatEntryType field in the inner ThreatDescriptor will be set to
|
||||
// ThreatEntryType_URL as this is a URL lookup.
|
||||
//
|
||||
// If an error occurs, the caller should treat the threats list returned as a
|
||||
// best-effort response to the query. The results may be stale or be partial.
|
||||
func (sb *SafeBrowser) LookupURLs(urls []string) (threats [][]URLThreat, err error) {
|
||||
threats = make([][]URLThreat, len(urls))
|
||||
|
||||
if atomic.LoadUint32(&sb.closed) != 0 {
|
||||
return threats, errClosed
|
||||
}
|
||||
if err := sb.db.Status(); err != nil {
|
||||
sb.log.Printf("inconsistent database: %v", err)
|
||||
atomic.AddInt64(&sb.stats.QueriesFail, int64(len(urls)))
|
||||
return threats, err
|
||||
}
|
||||
|
||||
// TODO: There are some optimizations to be made here:
|
||||
// 1.) We could force a database update if it is in error.
|
||||
// However, we must ensure that we perform some form of rate-limiting.
|
||||
// 2.) We should batch all of the partial hashes together such that we
|
||||
// call api.HashLookup only once.
|
||||
|
||||
for i, url := range urls {
|
||||
hashes, err := generateHashes(url)
|
||||
if err != nil {
|
||||
sb.log.Printf("error generating hashes: %v", err)
|
||||
atomic.AddInt64(&sb.stats.QueriesFail, int64(len(urls)-i))
|
||||
return threats, err
|
||||
}
|
||||
|
||||
// Construct the follow-up request being made to the server.
|
||||
// In the request, we only ask for partial hashes for privacy reasons.
|
||||
req := &pb.FindFullHashesRequest{
|
||||
Client: &pb.ClientInfo{
|
||||
ClientId: sb.config.ID,
|
||||
ClientVersion: sb.config.Version,
|
||||
},
|
||||
ThreatInfo: &pb.ThreatInfo{},
|
||||
}
|
||||
ttm := make(map[pb.ThreatType]bool)
|
||||
ptm := make(map[pb.PlatformType]bool)
|
||||
tetm := make(map[pb.ThreatEntryType]bool)
|
||||
for fullHash, pattern := range hashes {
|
||||
// Lookup in database according to threat list.
|
||||
partialHash, unsureThreats := sb.db.Lookup(fullHash)
|
||||
if len(unsureThreats) == 0 {
|
||||
atomic.AddInt64(&sb.stats.QueriesByDatabase, 1)
|
||||
continue // There are definitely no threats for this full hash
|
||||
}
|
||||
|
||||
// Lookup in cache according to recently seen values.
|
||||
cachedThreats, cr := sb.c.Lookup(fullHash)
|
||||
switch cr {
|
||||
case positiveCacheHit:
|
||||
// The cache remembers this full hash as a threat.
|
||||
// The threats we return to the client is the set intersection
|
||||
// of unsureThreats and cachedThreats.
|
||||
for _, td := range unsureThreats {
|
||||
if _, ok := cachedThreats[td]; ok {
|
||||
threats[i] = append(threats[i], URLThreat{
|
||||
Pattern: pattern,
|
||||
ThreatDescriptor: td,
|
||||
})
|
||||
}
|
||||
}
|
||||
case negativeCacheHit:
|
||||
// This is cached as a non-threat.
|
||||
atomic.AddInt64(&sb.stats.QueriesByCache, 1)
|
||||
continue
|
||||
default:
|
||||
// The cache knows nothing about this full hash, so we must make
|
||||
// a request for it.
|
||||
for _, td := range unsureThreats {
|
||||
ttm[pb.ThreatType(td.ThreatType)] = true
|
||||
ptm[pb.PlatformType(td.PlatformType)] = true
|
||||
tetm[pb.ThreatEntryType(td.ThreatEntryType)] = true
|
||||
}
|
||||
req.ThreatInfo.ThreatEntries = append(req.ThreatInfo.ThreatEntries,
|
||||
&pb.ThreatEntry{Hash: []byte(partialHash)})
|
||||
}
|
||||
}
|
||||
for tt := range ttm {
|
||||
req.ThreatInfo.ThreatTypes = append(req.ThreatInfo.ThreatTypes, tt)
|
||||
}
|
||||
for pt := range ptm {
|
||||
req.ThreatInfo.PlatformTypes = append(req.ThreatInfo.PlatformTypes, pt)
|
||||
}
|
||||
for tet := range tetm {
|
||||
req.ThreatInfo.ThreatEntryTypes = append(req.ThreatInfo.ThreatEntryTypes, tet)
|
||||
}
|
||||
|
||||
// All results are known, so just continue.
|
||||
if len(req.ThreatInfo.ThreatEntries) == 0 {
|
||||
atomic.AddInt64(&sb.stats.QueriesByCache, 1)
|
||||
continue
|
||||
}
|
||||
|
||||
// Actually query the Safe Browsing API for exact full hash matches.
|
||||
resp, err := sb.api.HashLookup(req)
|
||||
if err != nil {
|
||||
sb.log.Printf("HashLookup failure: %v", err)
|
||||
atomic.AddInt64(&sb.stats.QueriesFail, int64(len(urls)-i))
|
||||
return threats, err
|
||||
}
|
||||
|
||||
// Update the cache.
|
||||
sb.c.Update(req, resp)
|
||||
|
||||
// Pull the information the client cares about out of the response.
|
||||
for _, tm := range resp.GetMatches() {
|
||||
fullHash := hashPrefix(tm.GetThreat().Hash)
|
||||
if !fullHash.IsFull() {
|
||||
continue
|
||||
}
|
||||
if pattern, ok := hashes[fullHash]; ok {
|
||||
td := ThreatDescriptor{
|
||||
ThreatType: ThreatType(tm.ThreatType),
|
||||
PlatformType: PlatformType(tm.PlatformType),
|
||||
ThreatEntryType: ThreatEntryType(tm.ThreatEntryType),
|
||||
}
|
||||
if !sb.lists[td] {
|
||||
continue
|
||||
}
|
||||
threats[i] = append(threats[i], URLThreat{
|
||||
Pattern: pattern,
|
||||
ThreatDescriptor: td,
|
||||
})
|
||||
}
|
||||
}
|
||||
atomic.AddInt64(&sb.stats.QueriesByAPI, 1)
|
||||
}
|
||||
return threats, nil
|
||||
}
|
||||
|
||||
// TODO: Add other types of lookup when available.
|
||||
// func (sb *SafeBrowser) LookupBinaries(digests []string) (threats []BinaryThreat, err error)
|
||||
// func (sb *SafeBrowser) LookupAddresses(addrs []string) (threats [][]AddressThreat, err error)
|
||||
|
||||
// updater is a blocking method that periodically updates the local database.
|
||||
// This should be run as a separate goroutine and will be automatically stopped
|
||||
// when sb.Close is called.
|
||||
func (sb *SafeBrowser) updater(period time.Duration) {
|
||||
ticker := time.NewTicker(period)
|
||||
defer ticker.Stop()
|
||||
|
||||
for {
|
||||
select {
|
||||
case <-ticker.C:
|
||||
sb.log.Printf("background threat list update")
|
||||
sb.c.Purge()
|
||||
sb.db.Update(sb.api)
|
||||
case <-sb.done:
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Close cleans up all resources.
|
||||
// This method must not be called concurrently with other lookup methods.
|
||||
func (sb *SafeBrowser) Close() error {
|
||||
if atomic.LoadUint32(&sb.closed) == 0 {
|
||||
atomic.StoreUint32(&sb.closed, 1)
|
||||
close(sb.done)
|
||||
}
|
||||
return nil
|
||||
}
|
|
@ -0,0 +1,494 @@
|
|||
// Copyright 2016 Google Inc. All Rights Reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package safebrowsing
|
||||
|
||||
// The logic below deals with extracting patterns from a URL.
|
||||
// Patterns are all the possible host-suffix and path-prefix fragments for
|
||||
// the input URL.
|
||||
//
|
||||
// From example, the patterns for the given URL are the following:
|
||||
// input: "http://a.b.c/1/2.html?param=1/2"
|
||||
// patterns: [
|
||||
// "a.b.c/1/2.html?param=1/2",
|
||||
// "a.b.c/1/2.html",
|
||||
// "a.b.c/1/",
|
||||
// "a.b.c/",
|
||||
// "b.c/1/2.html?param=1/2",
|
||||
// "b.c/1/2.html",
|
||||
// "b.c/1/",
|
||||
// "b.c/"
|
||||
// ]
|
||||
//
|
||||
// The process that Safe Browsing uses predates Chrome and many RFC standards
|
||||
// and is partly based on how legacy browsers typically parse URLs. Thus, we
|
||||
// parse URLs in a way that is not strictly standards compliant.
|
||||
//
|
||||
// The parsing policy is documented here:
|
||||
// https://developers.google.com/safe-browsing/
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"errors"
|
||||
"fmt"
|
||||
"net"
|
||||
"net/url"
|
||||
"path"
|
||||
"regexp"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"golang.org/x/net/idna"
|
||||
)
|
||||
|
||||
var (
|
||||
dotsRegexp = regexp.MustCompile("[.]+")
|
||||
portRegexp = regexp.MustCompile(`:\d+$`)
|
||||
possibleIPRegexp = regexp.MustCompile(`^(?i)((?:0x[0-9a-f]+|[0-9\.])+)$`)
|
||||
trailingSpaceRegexp = regexp.MustCompile(`^(\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}) `)
|
||||
)
|
||||
|
||||
// generateHashes returns a set of full hashes for all patterns in the URL.
|
||||
func generateHashes(url string) (map[hashPrefix]string, error) {
|
||||
patterns, err := generatePatterns(url)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
hashes := make(map[hashPrefix]string)
|
||||
for _, p := range patterns {
|
||||
hashes[hashFromPattern(p)] = p
|
||||
}
|
||||
return hashes, nil
|
||||
}
|
||||
|
||||
// generatePatterns returns all possible host-suffix and path-prefix patterns
|
||||
// for the input URL.
|
||||
func generatePatterns(url string) ([]string, error) {
|
||||
hosts, err := generateLookupHosts(url)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
paths, err := generateLookupPaths(url)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
var patterns []string
|
||||
for _, h := range hosts {
|
||||
for _, p := range paths {
|
||||
patterns = append(patterns, h+p)
|
||||
}
|
||||
}
|
||||
return patterns, nil
|
||||
}
|
||||
|
||||
// isHex reports whether c is a hexadecimal character.
|
||||
func isHex(c byte) bool {
|
||||
switch {
|
||||
case '0' <= c && c <= '9':
|
||||
return true
|
||||
case 'a' <= c && c <= 'f':
|
||||
return true
|
||||
case 'A' <= c && c <= 'F':
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// unhex converts a hexadecimal character to byte value in 0..15, inclusive.
|
||||
func unhex(c byte) byte {
|
||||
switch {
|
||||
case '0' <= c && c <= '9':
|
||||
return c - '0'
|
||||
case 'a' <= c && c <= 'f':
|
||||
return c - 'a' + 10
|
||||
case 'A' <= c && c <= 'F':
|
||||
return c - 'A' + 10
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
// isUnicode reports whether s is a Unicode string.
|
||||
func isUnicode(s string) bool {
|
||||
for _, c := range []byte(s) {
|
||||
// For legacy reasons, 0x80 is not considered a Unicode character.
|
||||
if c > 0x80 {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// split splits the string s around the delimiter c.
|
||||
//
|
||||
// Let string s be of the form:
|
||||
// "%s%s%s" % (t, c, u)
|
||||
//
|
||||
// Then split returns (t, u) if cutc is set, otherwise, it returns (t, c+u).
|
||||
// If c does not exist in s, then (s, "") is returned.
|
||||
func split(s string, c string, cutc bool) (string, string) {
|
||||
i := strings.Index(s, c)
|
||||
if i < 0 {
|
||||
return s, ""
|
||||
}
|
||||
if cutc {
|
||||
return s[:i], s[i+len(c):]
|
||||
}
|
||||
return s[:i], s[i:]
|
||||
}
|
||||
|
||||
// escape returns the percent-encoded form of the string s.
|
||||
func escape(s string) string {
|
||||
var b bytes.Buffer
|
||||
for _, c := range []byte(s) {
|
||||
if c < 0x20 || c >= 0x7f || c == ' ' || c == '#' || c == '%' {
|
||||
b.WriteString(fmt.Sprintf("%%%02x", c))
|
||||
} else {
|
||||
b.WriteByte(c)
|
||||
}
|
||||
}
|
||||
return b.String()
|
||||
}
|
||||
|
||||
// unescape returns the decoded form of a percent-encoded string s.
|
||||
func unescape(s string) string {
|
||||
var b bytes.Buffer
|
||||
for len(s) > 0 {
|
||||
if len(s) >= 3 && s[0] == '%' && isHex(s[1]) && isHex(s[2]) {
|
||||
b.WriteByte(unhex(s[1])<<4 | unhex(s[2]))
|
||||
s = s[3:]
|
||||
} else {
|
||||
b.WriteByte(s[0])
|
||||
s = s[1:]
|
||||
}
|
||||
}
|
||||
return b.String()
|
||||
}
|
||||
|
||||
// recursiveUnescape unescapes the string s recursively until it cannot be
|
||||
// unescaped anymore. It reports an error if the unescaping process seemed to
|
||||
// have no end.
|
||||
func recursiveUnescape(s string) (string, error) {
|
||||
const maxDepth = 1024
|
||||
for i := 0; i < maxDepth; i++ {
|
||||
t := unescape(s)
|
||||
if t == s {
|
||||
return s, nil
|
||||
}
|
||||
s = t
|
||||
}
|
||||
return "", errors.New("safebrowsing: unescaping is too recursive")
|
||||
}
|
||||
|
||||
// normalizeEscape performs a recursive unescape and then escapes the string
|
||||
// exactly once. It reports an error if it was unable to unescape the string.
|
||||
func normalizeEscape(s string) (string, error) {
|
||||
u, err := recursiveUnescape(s)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
return escape(u), nil
|
||||
}
|
||||
|
||||
// getScheme splits the url into (scheme, path) where scheme is the protocol.
|
||||
// If the scheme cannot be determined ("", url) is returned.
|
||||
func getScheme(url string) (scheme, path string) {
|
||||
for i, c := range []byte(url) {
|
||||
switch {
|
||||
case 'a' <= c && c <= 'z' || 'A' <= c && c <= 'Z':
|
||||
// Do nothing.
|
||||
case '0' <= c && c <= '9' || c == '+' || c == '-' || c == '.':
|
||||
if i == 0 {
|
||||
return "", url
|
||||
}
|
||||
case c == ':':
|
||||
return url[:i], url[i+1:]
|
||||
default:
|
||||
// Invalid character, so there is no valid scheme.
|
||||
return "", url
|
||||
}
|
||||
}
|
||||
return "", url
|
||||
}
|
||||
|
||||
// parseHost parses a string to get host by the stripping the
|
||||
// username, password, and port.
|
||||
func parseHost(hostish string) (host string, err error) {
|
||||
i := strings.LastIndex(hostish, "@")
|
||||
if i < 0 {
|
||||
host = hostish
|
||||
} else {
|
||||
host = hostish[i+1:]
|
||||
}
|
||||
if strings.HasPrefix(host, "[") {
|
||||
// Parse an IP-Literal per RFC 3986 and RFC 6874.
|
||||
// For example: "[fe80::1] or "[fe80::1%25en0]"
|
||||
i := strings.LastIndex(host, "]")
|
||||
if i < 0 {
|
||||
return "", errors.New("safebrowsing: missing ']' in host")
|
||||
}
|
||||
}
|
||||
// Remove the port if it is there.
|
||||
host = portRegexp.ReplaceAllString(host, "")
|
||||
|
||||
// Convert internationalized hostnames to IDNA.
|
||||
u := unescape(host)
|
||||
if isUnicode(u) {
|
||||
host, err = idna.ToASCII(u)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
}
|
||||
|
||||
// Remove any superfluous '.' characters in the hostname.
|
||||
host = dotsRegexp.ReplaceAllString(host, ".")
|
||||
host = strings.Trim(host, ".")
|
||||
// Canonicalize IP addresses.
|
||||
if iphost := parseIPAddress(host); iphost != "" {
|
||||
host = iphost
|
||||
} else {
|
||||
host = strings.ToLower(host)
|
||||
}
|
||||
return host, nil
|
||||
}
|
||||
|
||||
// parseURL parses urlStr as a url.URL and reports an error if not possible.
|
||||
func parseURL(urlStr string) (parsedURL *url.URL, err error) {
|
||||
// For legacy reasons, this is a simplified version of the net/url logic.
|
||||
//
|
||||
// Few cases where net/url was not helpful:
|
||||
// 1. URLs are are expected to have no escaped encoding in the host but to
|
||||
// be escaped in the path. Safe Browsing allows escaped characters in both.
|
||||
// 2. Also it has different behavior with and without a scheme for absolute
|
||||
// paths. Safe Browsing test web URLs only; and a scheme is optional.
|
||||
// If missing, we assume that it is an "http".
|
||||
// 3. We strip off the fragment and the escaped query as they are not
|
||||
// required for building patterns for Safe Browsing.
|
||||
|
||||
parsedURL = new(url.URL)
|
||||
// Remove the URL fragment.
|
||||
// Also, we decode and encode the URL.
|
||||
// The '#' in a fragment is not friendly to that.
|
||||
rest, _ := split(urlStr, "#", true)
|
||||
// Start by stripping any leading and trailing whitespace.
|
||||
rest = strings.TrimSpace(rest)
|
||||
// Remove any embedded tabs and CR/LF characters which aren't escaped.
|
||||
rest = strings.Replace(rest, "\t", "", -1)
|
||||
rest = strings.Replace(rest, "\r", "", -1)
|
||||
rest = strings.Replace(rest, "\n", "", -1)
|
||||
rest, err = normalizeEscape(rest)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
parsedURL.Scheme, rest = getScheme(rest)
|
||||
rest, parsedURL.RawQuery = split(rest, "?", true)
|
||||
|
||||
// Add HTTP as scheme if none.
|
||||
var hostish string
|
||||
if !strings.HasPrefix(rest, "//") && parsedURL.Scheme != "" {
|
||||
return nil, errors.New("safebrowsing: invalid path")
|
||||
}
|
||||
if parsedURL.Scheme == "" {
|
||||
parsedURL.Scheme = "http"
|
||||
hostish, rest = split(rest, "/", false)
|
||||
} else {
|
||||
hostish, rest = split(rest[2:], "/", false)
|
||||
}
|
||||
if hostish == "" {
|
||||
return nil, errors.New("safebrowsing: missing hostname")
|
||||
}
|
||||
|
||||
parsedURL.Host, err = parseHost(hostish)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
// Format the path.
|
||||
p := path.Clean(rest)
|
||||
if p == "." {
|
||||
p = "/"
|
||||
} else if rest[len(rest)-1] == '/' && p[len(p)-1] != '/' {
|
||||
p += "/"
|
||||
}
|
||||
parsedURL.Path = p
|
||||
return parsedURL, nil
|
||||
}
|
||||
|
||||
func parseIPAddress(iphostname string) string {
|
||||
// The Windows resolver allows a 4-part dotted decimal IP address to have a
|
||||
// space followed by any old rubbish, so long as the total length of the
|
||||
// string doesn't get above 15 characters. So, "10.192.95.89 xy" is
|
||||
// resolved to 10.192.95.89. If the string length is greater than 15
|
||||
// characters, e.g. "10.192.95.89 xy.wildcard.example.com", it will be
|
||||
// resolved through DNS.
|
||||
if len(iphostname) <= 15 {
|
||||
match := trailingSpaceRegexp.FindString(iphostname)
|
||||
if match != "" {
|
||||
iphostname = strings.TrimSpace(match)
|
||||
}
|
||||
}
|
||||
if !possibleIPRegexp.MatchString(iphostname) {
|
||||
return ""
|
||||
}
|
||||
parts := strings.Split(iphostname, ".")
|
||||
if len(parts) > 4 {
|
||||
return ""
|
||||
}
|
||||
ss := make([]string, len(parts))
|
||||
for i, n := range parts {
|
||||
if i == len(parts)-1 {
|
||||
ss[i] = canonicalNum(n, 5-len(parts))
|
||||
} else {
|
||||
ss[i] = canonicalNum(n, 1)
|
||||
}
|
||||
if ss[i] == "" {
|
||||
return ""
|
||||
}
|
||||
}
|
||||
return strings.Join(ss, ".")
|
||||
}
|
||||
|
||||
// canonicalNum parses s as an integer and attempts to encode it as a '.'
|
||||
// separated string where each element is the base-10 encoded value of each byte
|
||||
// for the corresponding number, starting with the MSB. The result is one that
|
||||
// is usable as an IP address.
|
||||
//
|
||||
// For example:
|
||||
// s:"01234", n:2 => "2.156"
|
||||
// s:"0x10203040", n:4 => "16.32.48.64"
|
||||
func canonicalNum(s string, n int) string {
|
||||
if n <= 0 || n > 4 {
|
||||
return ""
|
||||
}
|
||||
v, err := strconv.ParseUint(s, 0, 32)
|
||||
if err != nil {
|
||||
return ""
|
||||
}
|
||||
ss := make([]string, n)
|
||||
for i := n - 1; i >= 0; i-- {
|
||||
ss[i] = strconv.Itoa(int(v) & 0xff)
|
||||
v = v >> 8
|
||||
}
|
||||
return strings.Join(ss, ".")
|
||||
}
|
||||
|
||||
// canonicalURL parses a URL string and returns it as scheme://hostname/path.
|
||||
// It strips off fragments and queries.
|
||||
func canonicalURL(u string) (string, error) {
|
||||
parsedURL, err := parseURL(u)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
// Assemble the URL ourselves to skip encodings from the net/url package.
|
||||
u = parsedURL.Scheme + "://" + parsedURL.Host
|
||||
if parsedURL.Path == "" {
|
||||
return u + "/", nil
|
||||
}
|
||||
u += parsedURL.Path
|
||||
return u, nil
|
||||
}
|
||||
|
||||
func canonicalHost(urlStr string) (string, error) {
|
||||
parsedURL, err := parseURL(urlStr)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
return parsedURL.Host, nil
|
||||
}
|
||||
|
||||
// generateLookupHosts returns a list of host-suffixes for the input URL.
|
||||
func generateLookupHosts(urlStr string) ([]string, error) {
|
||||
// Safe Browsing policy asks to generate lookup hosts for the URL.
|
||||
// Those are formed by the domain and also up to 4 hostnames suffixes.
|
||||
// The last component or sometimes the pair isn't examined alone,
|
||||
// since it's the TLD or country code. The database for TLDs is here:
|
||||
// https://publicsuffix.org/list/
|
||||
//
|
||||
// Note that we do not need to be clever about stopping at the "real" TLD.
|
||||
// We just check a few extra components regardless. It's not significantly
|
||||
// slower on the server side to check some extra hashes. Also the client
|
||||
// does not need to keep a database of TLDs.
|
||||
const maxHostComponents = 7
|
||||
|
||||
host, err := canonicalHost(urlStr)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
// handle IPv4 and IPv6 addresses.
|
||||
u, err := url.Parse(urlStr)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
ip := net.ParseIP(strings.Trim(u.Host, "[]"))
|
||||
if ip != nil {
|
||||
return []string{u.Host}, nil
|
||||
}
|
||||
hostComponents := strings.Split(host, ".")
|
||||
|
||||
numComponents := len(hostComponents) - maxHostComponents
|
||||
if numComponents < 1 {
|
||||
numComponents = 1
|
||||
}
|
||||
|
||||
hosts := []string{host}
|
||||
for i := numComponents; i < len(hostComponents)-1; i++ {
|
||||
hosts = append(hosts, strings.Join(hostComponents[i:], "."))
|
||||
}
|
||||
return hosts, nil
|
||||
}
|
||||
|
||||
func canonicalPath(urlStr string) (string, error) {
|
||||
// Note that this function is not used, but remains to ensure that the
|
||||
// parsedURL.Path output matches C++ implementation.
|
||||
parsedURL, err := parseURL(urlStr)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
return parsedURL.Path, nil
|
||||
}
|
||||
|
||||
// generateLookupPaths returns a list path-prefixes for the input URL.
|
||||
func generateLookupPaths(urlStr string) ([]string, error) {
|
||||
const maxPathComponents = 4
|
||||
|
||||
parsedURL, err := parseURL(urlStr)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
path := parsedURL.Path
|
||||
|
||||
paths := []string{"/"}
|
||||
var pathComponents []string
|
||||
for _, p := range strings.Split(path, "/") {
|
||||
if p != "" {
|
||||
pathComponents = append(pathComponents, p)
|
||||
}
|
||||
}
|
||||
|
||||
numComponents := len(pathComponents)
|
||||
if numComponents > maxPathComponents {
|
||||
numComponents = maxPathComponents
|
||||
}
|
||||
|
||||
for i := 1; i < numComponents; i++ {
|
||||
paths = append(paths, "/"+strings.Join(pathComponents[:i], "/")+"/")
|
||||
}
|
||||
if path != "/" {
|
||||
paths = append(paths, path)
|
||||
}
|
||||
if len(parsedURL.RawQuery) > 0 {
|
||||
paths = append(paths, path+"?"+parsedURL.RawQuery)
|
||||
}
|
||||
return paths, nil
|
||||
}
|
Loading…
Reference in New Issue