mirror of https://github.com/knative/pkg.git
				
				
				
			Update vegeta to a version supporting go modules (#1811)
This allows us to drop the explicit overrides in downstream repositories and it also looks so much nicer :). I'd hope the change is non-breaking to downstream repositories, given the interface is strictly widened here.
This commit is contained in:
		
							parent
							
								
									08db0e8fd7
								
							
						
					
					
						commit
						51839ea5e1
					
				
							
								
								
									
										2
									
								
								go.mod
								
								
								
								
							
							
						
						
									
										2
									
								
								go.mod
								
								
								
								
							|  | @ -31,7 +31,7 @@ require ( | ||||||
| 	github.com/prometheus/client_golang v1.6.0 | 	github.com/prometheus/client_golang v1.6.0 | ||||||
| 	github.com/prometheus/common v0.9.1 | 	github.com/prometheus/common v0.9.1 | ||||||
| 	github.com/spf13/pflag v1.0.5 | 	github.com/spf13/pflag v1.0.5 | ||||||
| 	github.com/tsenart/vegeta v12.7.1-0.20190725001342-b5f4fca92137+incompatible | 	github.com/tsenart/vegeta/v12 v12.8.4 | ||||||
| 	go.opencensus.io v0.22.4 | 	go.opencensus.io v0.22.4 | ||||||
| 	go.uber.org/atomic v1.6.0 | 	go.uber.org/atomic v1.6.0 | ||||||
| 	go.uber.org/automaxprocs v1.3.0 | 	go.uber.org/automaxprocs v1.3.0 | ||||||
|  |  | ||||||
							
								
								
									
										22
									
								
								go.sum
								
								
								
								
							
							
						
						
									
										22
									
								
								go.sum
								
								
								
								
							|  | @ -154,6 +154,7 @@ github.com/Shopify/sarama v1.23.1/go.mod h1:XLH1GYJnLVE0XCr6KdJGVJRTwY30moWNJ4sE | ||||||
| github.com/Shopify/toxiproxy v2.1.4+incompatible/go.mod h1:OXgGpZ6Cli1/URJOF1DMxUHB2q5Ap20/P/eIdh4G0pI= | github.com/Shopify/toxiproxy v2.1.4+incompatible/go.mod h1:OXgGpZ6Cli1/URJOF1DMxUHB2q5Ap20/P/eIdh4G0pI= | ||||||
| github.com/agnivade/levenshtein v1.0.1/go.mod h1:CURSv5d9Uaml+FovSIICkLbAUZ9S4RqaHDIsdSBg7lM= | github.com/agnivade/levenshtein v1.0.1/go.mod h1:CURSv5d9Uaml+FovSIICkLbAUZ9S4RqaHDIsdSBg7lM= | ||||||
| github.com/alcortesm/tgz v0.0.0-20161220082320-9c5fe88206d7/go.mod h1:6zEj6s6u/ghQa61ZWa/C2Aw3RkjiTBOix7dkqa1VLIs= | github.com/alcortesm/tgz v0.0.0-20161220082320-9c5fe88206d7/go.mod h1:6zEj6s6u/ghQa61ZWa/C2Aw3RkjiTBOix7dkqa1VLIs= | ||||||
|  | github.com/alecthomas/jsonschema v0.0.0-20180308105923-f2c93856175a/go.mod h1:qpebaTNSsyUn5rPSJMsfqEtDw71TTggXM6stUDI16HA= | ||||||
| github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= | github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= | ||||||
| github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751 h1:JYp7IbQjafoB+tBA3gMyHYHrpOtNuDiK/uB5uXxq5wM= | github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751 h1:JYp7IbQjafoB+tBA3gMyHYHrpOtNuDiK/uB5uXxq5wM= | ||||||
| github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= | github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= | ||||||
|  | @ -213,6 +214,7 @@ github.com/bugsnag/bugsnag-go v0.0.0-20141110184014-b1d153021fcd/go.mod h1:2oa8n | ||||||
| github.com/bugsnag/osext v0.0.0-20130617224835-0dd3f918b21b/go.mod h1:obH5gd0BsqsP2LwDJ9aOkm/6J86V6lyAXCoQWGw3K50= | github.com/bugsnag/osext v0.0.0-20130617224835-0dd3f918b21b/go.mod h1:obH5gd0BsqsP2LwDJ9aOkm/6J86V6lyAXCoQWGw3K50= | ||||||
| github.com/bugsnag/panicwrap v0.0.0-20151223152923-e2c28503fcd0/go.mod h1:D/8v3kj0zr8ZAKg1AQ6crr+5VwKN5eIywRkfhyM/+dE= | github.com/bugsnag/panicwrap v0.0.0-20151223152923-e2c28503fcd0/go.mod h1:D/8v3kj0zr8ZAKg1AQ6crr+5VwKN5eIywRkfhyM/+dE= | ||||||
| github.com/bwmarrin/snowflake v0.0.0/go.mod h1:NdZxfVWX+oR6y2K0o6qAYv6gIOP9rjG0/E9WsDpxqwE= | github.com/bwmarrin/snowflake v0.0.0/go.mod h1:NdZxfVWX+oR6y2K0o6qAYv6gIOP9rjG0/E9WsDpxqwE= | ||||||
|  | github.com/c2h5oh/datasize v0.0.0-20171227191756-4eba002a5eae/go.mod h1:S/7n9copUssQ56c7aAgHqftWO4LTf4xY6CGWt8Bc+3M= | ||||||
| github.com/census-instrumentation/opencensus-proto v0.2.0/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= | github.com/census-instrumentation/opencensus-proto v0.2.0/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= | ||||||
| github.com/census-instrumentation/opencensus-proto v0.2.1 h1:glEXhBS5PSLLv4IXzLA5yPRVX4bilULVyxxbrfOtDAk= | github.com/census-instrumentation/opencensus-proto v0.2.1 h1:glEXhBS5PSLLv4IXzLA5yPRVX4bilULVyxxbrfOtDAk= | ||||||
| github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= | github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= | ||||||
|  | @ -271,8 +273,10 @@ github.com/denverdino/aliyungo v0.0.0-20190125010748-a747050bb1ba/go.mod h1:dV8l | ||||||
| github.com/devigned/tab v0.1.1/go.mod h1:XG9mPq0dFghrYvoBF3xdRrJzSTX1b7IQrvaL9mzjeJY= | github.com/devigned/tab v0.1.1/go.mod h1:XG9mPq0dFghrYvoBF3xdRrJzSTX1b7IQrvaL9mzjeJY= | ||||||
| github.com/dgrijalva/jwt-go v0.0.0-20170104182250-a601269ab70c/go.mod h1:E3ru+11k8xSBh+hMPgOLZmtrrCbhqsmaPHjLKYnJCaQ= | github.com/dgrijalva/jwt-go v0.0.0-20170104182250-a601269ab70c/go.mod h1:E3ru+11k8xSBh+hMPgOLZmtrrCbhqsmaPHjLKYnJCaQ= | ||||||
| github.com/dgrijalva/jwt-go v3.2.0+incompatible/go.mod h1:E3ru+11k8xSBh+hMPgOLZmtrrCbhqsmaPHjLKYnJCaQ= | github.com/dgrijalva/jwt-go v3.2.0+incompatible/go.mod h1:E3ru+11k8xSBh+hMPgOLZmtrrCbhqsmaPHjLKYnJCaQ= | ||||||
|  | github.com/dgryski/go-gk v0.0.0-20140819190930-201884a44051/go.mod h1:qm+vckxRlDt0aOla0RYJJVeqHZlWfOm2UIxHaqPB46E= | ||||||
| github.com/dgryski/go-gk v0.0.0-20200319235926-a69029f61654 h1:XOPLOMn/zT4jIgxfxSsoXPxkrzz0FaCHwp33x5POJ+Q= | github.com/dgryski/go-gk v0.0.0-20200319235926-a69029f61654 h1:XOPLOMn/zT4jIgxfxSsoXPxkrzz0FaCHwp33x5POJ+Q= | ||||||
| github.com/dgryski/go-gk v0.0.0-20200319235926-a69029f61654/go.mod h1:qm+vckxRlDt0aOla0RYJJVeqHZlWfOm2UIxHaqPB46E= | github.com/dgryski/go-gk v0.0.0-20200319235926-a69029f61654/go.mod h1:qm+vckxRlDt0aOla0RYJJVeqHZlWfOm2UIxHaqPB46E= | ||||||
|  | github.com/dgryski/go-lttb v0.0.0-20180810165845-318fcdf10a77/go.mod h1:Va5MyIzkU0rAM92tn3hb3Anb7oz7KcnixF49+2wOMe4= | ||||||
| github.com/dimchansky/utfbom v1.1.0/go.mod h1:rO41eb7gLfo8SF1jd9F8HplJm1Fewwi4mQvIirEdv+8= | github.com/dimchansky/utfbom v1.1.0/go.mod h1:rO41eb7gLfo8SF1jd9F8HplJm1Fewwi4mQvIirEdv+8= | ||||||
| github.com/djherbis/atime v1.0.0/go.mod h1:5W+KBIuTwVGcqjIfaTwt+KSYX1o6uep8dtevevQP/f8= | github.com/djherbis/atime v1.0.0/go.mod h1:5W+KBIuTwVGcqjIfaTwt+KSYX1o6uep8dtevevQP/f8= | ||||||
| github.com/dnaeon/go-vcr v1.0.1/go.mod h1:aBB1+wY4s93YsC3HHjMBMrwTj2R9FHDzUr9KyGc8n1E= | github.com/dnaeon/go-vcr v1.0.1/go.mod h1:aBB1+wY4s93YsC3HHjMBMrwTj2R9FHDzUr9KyGc8n1E= | ||||||
|  | @ -476,6 +480,15 @@ github.com/golangplus/bytes v0.0.0-20160111154220-45c989fe5450/go.mod h1:Bk6SMAO | ||||||
| github.com/golangplus/fmt v0.0.0-20150411045040-2a5d6d7d2995/go.mod h1:lJgMEyOkYFkPcDKwRXegd+iM6E7matEszMG5HhwytU8= | github.com/golangplus/fmt v0.0.0-20150411045040-2a5d6d7d2995/go.mod h1:lJgMEyOkYFkPcDKwRXegd+iM6E7matEszMG5HhwytU8= | ||||||
| github.com/golangplus/testing v0.0.0-20180327235837-af21d9c3145e/go.mod h1:0AA//k/eakGydO4jKRoRL2j92ZKSzTgj9tclaCrvXHk= | github.com/golangplus/testing v0.0.0-20180327235837-af21d9c3145e/go.mod h1:0AA//k/eakGydO4jKRoRL2j92ZKSzTgj9tclaCrvXHk= | ||||||
| github.com/gomodule/redigo v1.7.0/go.mod h1:B4C85qUVwatsJoIUNIfCRsp7qO0iAmpGFZ4EELWSbC4= | github.com/gomodule/redigo v1.7.0/go.mod h1:B4C85qUVwatsJoIUNIfCRsp7qO0iAmpGFZ4EELWSbC4= | ||||||
|  | github.com/gonum/blas v0.0.0-20181208220705-f22b278b28ac/go.mod h1:P32wAyui1PQ58Oce/KYkOqQv8cVw1zAapXOl+dRFGbc= | ||||||
|  | github.com/gonum/diff v0.0.0-20181124234638-500114f11e71/go.mod h1:22dM4PLscQl+Nzf64qNBurVJvfyvZELT0iRW2l/NN70= | ||||||
|  | github.com/gonum/floats v0.0.0-20181209220543-c233463c7e82/go.mod h1:PxC8OnwL11+aosOB5+iEPoV3picfs8tUpkVd0pDo+Kg= | ||||||
|  | github.com/gonum/integrate v0.0.0-20181209220457-a422b5c0fdf2/go.mod h1:pDgmNM6seYpwvPos3q+zxlXMsbve6mOIPucUnUOrI7Y= | ||||||
|  | github.com/gonum/internal v0.0.0-20181124074243-f884aa714029/go.mod h1:Pu4dmpkhSyOzRwuXkOgAvijx4o+4YMUJJo9OvPYMkks= | ||||||
|  | github.com/gonum/lapack v0.0.0-20181123203213-e4cdc5a0bff9/go.mod h1:XA3DeT6rxh2EAE789SSiSJNqxPaC0aE9J8NTOI0Jo/A= | ||||||
|  | github.com/gonum/mathext v0.0.0-20181121095525-8a4bf007ea55/go.mod h1:fmo8aiSEWkJeiGXUJf+sPvuDgEFgqIoZSs843ePKrGg= | ||||||
|  | github.com/gonum/matrix v0.0.0-20181209220409-c518dec07be9/go.mod h1:0EXg4mc1CNP0HCqCz+K4ts155PXIlUywf0wqN+GfPZw= | ||||||
|  | github.com/gonum/stat v0.0.0-20181125101827-41a0da705a5b/go.mod h1:Z4GIJBJO3Wa4gD4vbwQxXXZ+WHmW6E9ixmNrwvs0iZs= | ||||||
| github.com/google/btree v0.0.0-20180124185431-e89373fe6b4a/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= | github.com/google/btree v0.0.0-20180124185431-e89373fe6b4a/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= | ||||||
| github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= | github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= | ||||||
| github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= | github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= | ||||||
|  | @ -619,6 +632,7 @@ github.com/imdario/mergo v0.3.9 h1:UauaLniWCFHWd+Jp9oCEkTBj8VO/9DKg3PV3VCNMDIg= | ||||||
| github.com/imdario/mergo v0.3.9/go.mod h1:2EnlNZ0deacrJVfApfmtdGgDfMuh/nq6Ok1EcJh5FfA= | github.com/imdario/mergo v0.3.9/go.mod h1:2EnlNZ0deacrJVfApfmtdGgDfMuh/nq6Ok1EcJh5FfA= | ||||||
| github.com/inconshreveable/mousetrap v1.0.0/go.mod h1:PxqpIevigyE2G7u3NXJIT2ANytuPF1OarO4DADm73n8= | github.com/inconshreveable/mousetrap v1.0.0/go.mod h1:PxqpIevigyE2G7u3NXJIT2ANytuPF1OarO4DADm73n8= | ||||||
| github.com/influxdata/influxdb v0.0.0-20161215172503-049f9b42e9a5/go.mod h1:qZna6X/4elxqT3yI9iZYdZrWWdeFOOprn86kgg4+IzY= | github.com/influxdata/influxdb v0.0.0-20161215172503-049f9b42e9a5/go.mod h1:qZna6X/4elxqT3yI9iZYdZrWWdeFOOprn86kgg4+IzY= | ||||||
|  | github.com/influxdata/tdigest v0.0.0-20180711151920-a7d76c6f093a/go.mod h1:9GkyshztGufsdPQWjH+ifgnIr3xNUL5syI70g2dzU1o= | ||||||
| github.com/influxdata/tdigest v0.0.0-20181121200506-bf2b5ad3c0a9 h1:MHTrDWmQpHq/hkq+7cw9oYAt2PqUw52TZazRA0N7PGE= | github.com/influxdata/tdigest v0.0.0-20181121200506-bf2b5ad3c0a9 h1:MHTrDWmQpHq/hkq+7cw9oYAt2PqUw52TZazRA0N7PGE= | ||||||
| github.com/influxdata/tdigest v0.0.0-20181121200506-bf2b5ad3c0a9/go.mod h1:Js0mqiSBE6Ffsg94weZZ2c+v/ciT8QRHFOap7EKDrR0= | github.com/influxdata/tdigest v0.0.0-20181121200506-bf2b5ad3c0a9/go.mod h1:Js0mqiSBE6Ffsg94weZZ2c+v/ciT8QRHFOap7EKDrR0= | ||||||
| github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99/go.mod h1:1lJo3i6rXxKeerYnT8Nvf0QmHCRC1n8sfWVwXF2Frvo= | github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99/go.mod h1:1lJo3i6rXxKeerYnT8Nvf0QmHCRC1n8sfWVwXF2Frvo= | ||||||
|  | @ -733,6 +747,7 @@ github.com/matttproud/golang_protobuf_extensions v1.0.1 h1:4hp9jkHxhMHkqkrB3Ix0j | ||||||
| github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0= | github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0= | ||||||
| github.com/maxbrunsfeld/counterfeiter/v6 v6.2.2/go.mod h1:eD9eIE7cdwcMi9rYluz88Jz2VyhSmden33/aXg4oVIY= | github.com/maxbrunsfeld/counterfeiter/v6 v6.2.2/go.mod h1:eD9eIE7cdwcMi9rYluz88Jz2VyhSmden33/aXg4oVIY= | ||||||
| github.com/mholt/archiver/v3 v3.3.0/go.mod h1:YnQtqsp+94Rwd0D/rk5cnLrxusUBUXg+08Ebtr1Mqao= | github.com/mholt/archiver/v3 v3.3.0/go.mod h1:YnQtqsp+94Rwd0D/rk5cnLrxusUBUXg+08Ebtr1Mqao= | ||||||
|  | github.com/miekg/dns v1.1.17/go.mod h1:WgzbA6oji13JREwiNsRDNfl7jYdPnmz+VEuLrA+/48M= | ||||||
| github.com/mitchellh/copystructure v1.0.0/go.mod h1:SNtv71yrdKgLRyLFxmLdkAbkKEFWgYaq1OVrnRcwhnw= | github.com/mitchellh/copystructure v1.0.0/go.mod h1:SNtv71yrdKgLRyLFxmLdkAbkKEFWgYaq1OVrnRcwhnw= | ||||||
| github.com/mitchellh/go-homedir v1.1.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0= | github.com/mitchellh/go-homedir v1.1.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0= | ||||||
| github.com/mitchellh/go-wordwrap v1.0.0/go.mod h1:ZXFpozHsX6DPmq2I0TCekCxypsnAUbP2oI0UX1GXzOo= | github.com/mitchellh/go-wordwrap v1.0.0/go.mod h1:ZXFpozHsX6DPmq2I0TCekCxypsnAUbP2oI0UX1GXzOo= | ||||||
|  | @ -958,8 +973,11 @@ github.com/tektoncd/plumbing/pipelinerun-logs v0.0.0-20191206114338-712d544c2c21 | ||||||
| github.com/tidwall/pretty v1.0.0/go.mod h1:XNkn88O1ChpSDQmQeStsy+sBenx6DDtFZJxhVysOjyk= | github.com/tidwall/pretty v1.0.0/go.mod h1:XNkn88O1ChpSDQmQeStsy+sBenx6DDtFZJxhVysOjyk= | ||||||
| github.com/tmc/grpc-websocket-proxy v0.0.0-20170815181823-89b8d40f7ca8/go.mod h1:ncp9v5uamzpCO7NfCPTXjqaC+bZgJeR0sMTm6dMHP7U= | github.com/tmc/grpc-websocket-proxy v0.0.0-20170815181823-89b8d40f7ca8/go.mod h1:ncp9v5uamzpCO7NfCPTXjqaC+bZgJeR0sMTm6dMHP7U= | ||||||
| github.com/tmc/grpc-websocket-proxy v0.0.0-20190109142713-0ad062ec5ee5/go.mod h1:ncp9v5uamzpCO7NfCPTXjqaC+bZgJeR0sMTm6dMHP7U= | github.com/tmc/grpc-websocket-proxy v0.0.0-20190109142713-0ad062ec5ee5/go.mod h1:ncp9v5uamzpCO7NfCPTXjqaC+bZgJeR0sMTm6dMHP7U= | ||||||
|  | github.com/tsenart/go-tsz v0.0.0-20180814232043-cdeb9e1e981e/go.mod h1:SWZznP1z5Ki7hDT2ioqiFKEse8K9tU2OUvaRI0NeGQo= | ||||||
| github.com/tsenart/vegeta v12.7.1-0.20190725001342-b5f4fca92137+incompatible h1:ErZrHhRveAoznVW80gbrxz+qxJNydpA2fcQxTPHkZbU= | github.com/tsenart/vegeta v12.7.1-0.20190725001342-b5f4fca92137+incompatible h1:ErZrHhRveAoznVW80gbrxz+qxJNydpA2fcQxTPHkZbU= | ||||||
| github.com/tsenart/vegeta v12.7.1-0.20190725001342-b5f4fca92137+incompatible/go.mod h1:Smz/ZWfhKRcyDDChZkG3CyTHdj87lHzio/HOCkbndXM= | github.com/tsenart/vegeta v12.7.1-0.20190725001342-b5f4fca92137+incompatible/go.mod h1:Smz/ZWfhKRcyDDChZkG3CyTHdj87lHzio/HOCkbndXM= | ||||||
|  | github.com/tsenart/vegeta/v12 v12.8.4 h1:UQ7tG7WkDorKj0wjx78Z4/vsMBP8RJQMGJqRVrkvngg= | ||||||
|  | github.com/tsenart/vegeta/v12 v12.8.4/go.mod h1:ZiJtwLn/9M4fTPdMY7bdbIeyNeFVE8/AHbWFqCsUuho= | ||||||
| github.com/ugorji/go v1.1.1/go.mod h1:hnLbHMwcvSihnDhEfx2/BzKp2xb0Y+ErdfYcrs9tkJQ= | github.com/ugorji/go v1.1.1/go.mod h1:hnLbHMwcvSihnDhEfx2/BzKp2xb0Y+ErdfYcrs9tkJQ= | ||||||
| github.com/ugorji/go v1.1.4/go.mod h1:uQMGLiO92mf5W77hV/PUCpI3pbzQx3CRekS0kk+RGrc= | github.com/ugorji/go v1.1.4/go.mod h1:uQMGLiO92mf5W77hV/PUCpI3pbzQx3CRekS0kk+RGrc= | ||||||
| github.com/ugorji/go/codec v0.0.0-20181204163529-d75b2dcb6bc8/go.mod h1:VFNgLljTbGfSG7qAOspJ7OScBnGdDN/yBr0sguwnwf0= | github.com/ugorji/go/codec v0.0.0-20181204163529-d75b2dcb6bc8/go.mod h1:VFNgLljTbGfSG7qAOspJ7OScBnGdDN/yBr0sguwnwf0= | ||||||
|  | @ -1052,6 +1070,7 @@ golang.org/x/crypto v0.0.0-20190611184440-5c40567a22f8/go.mod h1:yigFU9vqHzYiE8U | ||||||
| golang.org/x/crypto v0.0.0-20190617133340-57b3e21c3d56/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= | golang.org/x/crypto v0.0.0-20190617133340-57b3e21c3d56/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= | ||||||
| golang.org/x/crypto v0.0.0-20190701094942-4def268fd1a4/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= | golang.org/x/crypto v0.0.0-20190701094942-4def268fd1a4/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= | ||||||
| golang.org/x/crypto v0.0.0-20190820162420-60c769a6c586/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= | golang.org/x/crypto v0.0.0-20190820162420-60c769a6c586/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= | ||||||
|  | golang.org/x/crypto v0.0.0-20190829043050-9756ffdc2472/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= | ||||||
| golang.org/x/crypto v0.0.0-20190911031432-227b76d455e7/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= | golang.org/x/crypto v0.0.0-20190911031432-227b76d455e7/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= | ||||||
| golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550 h1:ObdrDkeb4kJdCP557AjRjq69pTHfNouLtWZG7j9rPN8= | golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550 h1:ObdrDkeb4kJdCP557AjRjq69pTHfNouLtWZG7j9rPN8= | ||||||
| golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= | golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= | ||||||
|  | @ -1278,6 +1297,7 @@ golang.org/x/tools v0.0.0-20190706070813-72ffa07ba3db/go.mod h1:jcCCGcm9btYwXyDq | ||||||
| golang.org/x/tools v0.0.0-20190729092621-ff9f1409240a/go.mod h1:jcCCGcm9btYwXyDqrUWc6MKQKKGJCWEQ3AfLSRIbEuI= | golang.org/x/tools v0.0.0-20190729092621-ff9f1409240a/go.mod h1:jcCCGcm9btYwXyDqrUWc6MKQKKGJCWEQ3AfLSRIbEuI= | ||||||
| golang.org/x/tools v0.0.0-20190807223507-b346f7fd45de/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= | golang.org/x/tools v0.0.0-20190807223507-b346f7fd45de/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= | ||||||
| golang.org/x/tools v0.0.0-20190816200558-6889da9d5479/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= | golang.org/x/tools v0.0.0-20190816200558-6889da9d5479/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= | ||||||
|  | golang.org/x/tools v0.0.0-20190907020128-2ca718005c18/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= | ||||||
| golang.org/x/tools v0.0.0-20190911174233-4f2ddba30aff/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= | golang.org/x/tools v0.0.0-20190911174233-4f2ddba30aff/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= | ||||||
| golang.org/x/tools v0.0.0-20190920225731-5eefd052ad72/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= | golang.org/x/tools v0.0.0-20190920225731-5eefd052ad72/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= | ||||||
| golang.org/x/tools v0.0.0-20190927191325-030b2cf1153e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= | golang.org/x/tools v0.0.0-20190927191325-030b2cf1153e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= | ||||||
|  | @ -1619,6 +1639,8 @@ knative.dev/test-infra v0.0.0-20201013100530-45e0761df397/go.mod h1:PwM5WCaFn0Hl | ||||||
| mvdan.cc/xurls/v2 v2.0.0/go.mod h1:2/webFPYOXN9jp/lzuj0zuAVlF+9g4KPFJANH1oJhRU= | mvdan.cc/xurls/v2 v2.0.0/go.mod h1:2/webFPYOXN9jp/lzuj0zuAVlF+9g4KPFJANH1oJhRU= | ||||||
| pack.ag/amqp v0.11.0/go.mod h1:4/cbmt4EJXSKlG6LCfWHoqmN0uFdy5i/+YFz+fTfhV4= | pack.ag/amqp v0.11.0/go.mod h1:4/cbmt4EJXSKlG6LCfWHoqmN0uFdy5i/+YFz+fTfhV4= | ||||||
| pack.ag/amqp v0.11.2/go.mod h1:4/cbmt4EJXSKlG6LCfWHoqmN0uFdy5i/+YFz+fTfhV4= | pack.ag/amqp v0.11.2/go.mod h1:4/cbmt4EJXSKlG6LCfWHoqmN0uFdy5i/+YFz+fTfhV4= | ||||||
|  | pgregory.net/rapid v0.3.3 h1:jCjBsY4ln4Atz78QoBWxUEvAHaFyNDQg9+WU62aCn1U= | ||||||
|  | pgregory.net/rapid v0.3.3/go.mod h1:UYpPVyjFHzYBGHIxLFoupi8vwk6rXNzRY9OMvVxFIOU= | ||||||
| rsc.io/binaryregexp v0.2.0/go.mod h1:qTv7/COck+e2FymRvadv62gMdZztPaShugOCi3I+8D8= | rsc.io/binaryregexp v0.2.0/go.mod h1:qTv7/COck+e2FymRvadv62gMdZztPaShugOCi3I+8D8= | ||||||
| rsc.io/letsencrypt v0.0.3/go.mod h1:buyQKZ6IXrRnB7TdkHP0RyEybLx18HHyOSoTyoOLqNY= | rsc.io/letsencrypt v0.0.3/go.mod h1:buyQKZ6IXrRnB7TdkHP0RyEybLx18HHyOSoTyoOLqNY= | ||||||
| rsc.io/quote/v3 v3.1.0/go.mod h1:yEA65RcK8LyAZtP9Kv3t0HmxON59tX3rD+tICJqUlj0= | rsc.io/quote/v3 v3.1.0/go.mod h1:yEA65RcK8LyAZtP9Kv3t0HmxON59tX3rD+tICJqUlj0= | ||||||
|  |  | ||||||
|  | @ -22,7 +22,7 @@ import ( | ||||||
| 	"strings" | 	"strings" | ||||||
| 	"time" | 	"time" | ||||||
| 
 | 
 | ||||||
| 	vegeta "github.com/tsenart/vegeta/lib" | 	vegeta "github.com/tsenart/vegeta/v12/lib" | ||||||
| ) | ) | ||||||
| 
 | 
 | ||||||
| // combinedPacer is a Pacer that combines multiple Pacers and runs them sequentially when being used for attack.
 | // combinedPacer is a Pacer that combines multiple Pacers and runs them sequentially when being used for attack.
 | ||||||
|  | @ -103,6 +103,12 @@ func (cp *combinedPacer) Pace(elapsedTime time.Duration, elapsedHits uint64) (ti | ||||||
| 	return curPacer.Pace(curElapsedTime, curElapsedHits) | 	return curPacer.Pace(curElapsedTime, curElapsedHits) | ||||||
| } | } | ||||||
| 
 | 
 | ||||||
|  | func (cp *combinedPacer) Rate(elapsedTime time.Duration) float64 { | ||||||
|  | 	curPacer := cp.pacers[cp.curPacerIndex] | ||||||
|  | 	curElapsedTime := time.Duration(uint64(elapsedTime) - cp.prevElapsedTime) | ||||||
|  | 	return curPacer.Rate(curElapsedTime) | ||||||
|  | } | ||||||
|  | 
 | ||||||
| // pacerIndex returns the index of pacer that pacerTimeOffset falls into
 | // pacerIndex returns the index of pacer that pacerTimeOffset falls into
 | ||||||
| func (cp *combinedPacer) pacerIndex(pacerTimeOffset uint64) uint { | func (cp *combinedPacer) pacerIndex(pacerTimeOffset uint64) uint { | ||||||
| 	i, j := 0, len(cp.stepDurations) | 	i, j := 0, len(cp.stepDurations) | ||||||
|  |  | ||||||
|  | @ -20,7 +20,7 @@ import ( | ||||||
| 	"testing" | 	"testing" | ||||||
| 	"time" | 	"time" | ||||||
| 
 | 
 | ||||||
| 	vegeta "github.com/tsenart/vegeta/lib" | 	vegeta "github.com/tsenart/vegeta/v12/lib" | ||||||
| ) | ) | ||||||
| 
 | 
 | ||||||
| func TestCombinedPacer(t *testing.T) { | func TestCombinedPacer(t *testing.T) { | ||||||
|  |  | ||||||
|  | @ -22,7 +22,7 @@ import ( | ||||||
| 	"math" | 	"math" | ||||||
| 	"time" | 	"time" | ||||||
| 
 | 
 | ||||||
| 	vegeta "github.com/tsenart/vegeta/lib" | 	vegeta "github.com/tsenart/vegeta/v12/lib" | ||||||
| ) | ) | ||||||
| 
 | 
 | ||||||
| // steadyUpPacer is a Pacer that describes attack request rates that increases in the beginning then becomes steady.
 | // steadyUpPacer is a Pacer that describes attack request rates that increases in the beginning then becomes steady.
 | ||||||
|  | @ -107,6 +107,12 @@ func (sup *steadyUpPacer) Pace(elapsedTime time.Duration, elapsedHits uint64) (t | ||||||
| 	return nextHitIn, false | 	return nextHitIn, false | ||||||
| } | } | ||||||
| 
 | 
 | ||||||
|  | // Rate returns a Pacer's instantaneous hit rate (per seconds) at the given elapsed
 | ||||||
|  | // duration of an attack.
 | ||||||
|  | func (sup *steadyUpPacer) Rate(elapsedTime time.Duration) float64 { | ||||||
|  | 	return sup.hitsPerNs(elapsedTime) * 1e9 | ||||||
|  | } | ||||||
|  | 
 | ||||||
| // hits returns the number of expected hits for this pacer during the given time.
 | // hits returns the number of expected hits for this pacer during the given time.
 | ||||||
| func (sup *steadyUpPacer) hits(t time.Duration) float64 { | func (sup *steadyUpPacer) hits(t time.Duration) float64 { | ||||||
| 	// If t is smaller than the upDuration, calculate the hits as a trapezoid.
 | 	// If t is smaller than the upDuration, calculate the hits as a trapezoid.
 | ||||||
|  |  | ||||||
|  | @ -20,7 +20,7 @@ import ( | ||||||
| 	"testing" | 	"testing" | ||||||
| 	"time" | 	"time" | ||||||
| 
 | 
 | ||||||
| 	vegeta "github.com/tsenart/vegeta/lib" | 	vegeta "github.com/tsenart/vegeta/v12/lib" | ||||||
| ) | ) | ||||||
| 
 | 
 | ||||||
| func TestSteadyUpPacer(t *testing.T) { | func TestSteadyUpPacer(t *testing.T) { | ||||||
|  |  | ||||||
|  | @ -1,6 +1,6 @@ | ||||||
| The MIT License (MIT) | The MIT License (MIT) | ||||||
| 
 | 
 | ||||||
| Copyright (c) 2013-2016 Tomás Senart | Copyright (c) 2013-2020 Tomás Senart | ||||||
| 
 | 
 | ||||||
| Permission is hereby granted, free of charge, to any person obtaining a copy of | Permission is hereby granted, free of charge, to any person obtaining a copy of | ||||||
| this software and associated documentation files (the "Software"), to deal in | this software and associated documentation files (the "Software"), to deal in | ||||||
|  | @ -0,0 +1,5 @@ | ||||||
|  | .root | ||||||
|  | *_easyjson.go | ||||||
|  | *.iml | ||||||
|  | .idea | ||||||
|  | *.swp | ||||||
|  | @ -0,0 +1,12 @@ | ||||||
|  | language: go | ||||||
|  | 
 | ||||||
|  | go: | ||||||
|  |   - tip | ||||||
|  |   - stable | ||||||
|  | 
 | ||||||
|  | matrix: | ||||||
|  |   allow_failures: | ||||||
|  |     - go: tip | ||||||
|  | 
 | ||||||
|  | install: | ||||||
|  |   - go get golang.org/x/lint/golint | ||||||
|  | @ -0,0 +1,54 @@ | ||||||
|  | all: test | ||||||
|  | 
 | ||||||
|  | clean: | ||||||
|  | 	rm -rf bin | ||||||
|  | 	rm -rf tests/*_easyjson.go | ||||||
|  | 	rm -rf benchmark/*_easyjson.go | ||||||
|  | 
 | ||||||
|  | build: | ||||||
|  | 	go build -i -o ./bin/easyjson ./easyjson | ||||||
|  | 
 | ||||||
|  | generate: build | ||||||
|  | 	bin/easyjson -stubs \
 | ||||||
|  | 		./tests/snake.go \
 | ||||||
|  | 		./tests/data.go \
 | ||||||
|  | 		./tests/omitempty.go \
 | ||||||
|  | 		./tests/nothing.go \
 | ||||||
|  | 		./tests/named_type.go \
 | ||||||
|  | 		./tests/custom_map_key_type.go \
 | ||||||
|  | 		./tests/embedded_type.go \
 | ||||||
|  | 		./tests/reference_to_pointer.go \
 | ||||||
|  | 		./tests/html.go \
 | ||||||
|  | 
 | ||||||
|  | 	bin/easyjson -all ./tests/data.go | ||||||
|  | 	bin/easyjson -all ./tests/nothing.go | ||||||
|  | 	bin/easyjson -all ./tests/errors.go | ||||||
|  | 	bin/easyjson -all ./tests/html.go | ||||||
|  | 	bin/easyjson -snake_case ./tests/snake.go | ||||||
|  | 	bin/easyjson -omit_empty ./tests/omitempty.go | ||||||
|  | 	bin/easyjson -build_tags=use_easyjson ./benchmark/data.go | ||||||
|  | 	bin/easyjson ./tests/nested_easy.go | ||||||
|  | 	bin/easyjson ./tests/named_type.go | ||||||
|  | 	bin/easyjson ./tests/custom_map_key_type.go | ||||||
|  | 	bin/easyjson ./tests/embedded_type.go | ||||||
|  | 	bin/easyjson ./tests/reference_to_pointer.go | ||||||
|  | 	bin/easyjson ./tests/key_marshaler_map.go | ||||||
|  | 	bin/easyjson -disallow_unknown_fields ./tests/disallow_unknown.go | ||||||
|  | 
 | ||||||
|  | test: generate | ||||||
|  | 	go test \
 | ||||||
|  | 		./tests \
 | ||||||
|  | 		./jlexer \
 | ||||||
|  | 		./gen \
 | ||||||
|  | 		./buffer | ||||||
|  | 	cd benchmark && go test -benchmem -tags use_easyjson -bench . | ||||||
|  | 	golint -set_exit_status ./tests/*_easyjson.go | ||||||
|  | 
 | ||||||
|  | bench-other: generate | ||||||
|  | 	cd benchmark && make | ||||||
|  | 
 | ||||||
|  | bench-python: | ||||||
|  | 	benchmark/ujson.sh | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | .PHONY: clean generate test build | ||||||
|  | @ -0,0 +1,336 @@ | ||||||
|  | # easyjson [](https://travis-ci.org/mailru/easyjson) [](https://goreportcard.com/report/github.com/mailru/easyjson) | ||||||
|  | 
 | ||||||
|  | Package easyjson provides a fast and easy way to marshal/unmarshal Go structs | ||||||
|  | to/from JSON without the use of reflection. In performance tests, easyjson | ||||||
|  | outperforms the standard `encoding/json` package by a factor of 4-5x, and other | ||||||
|  | JSON encoding packages by a factor of 2-3x. | ||||||
|  | 
 | ||||||
|  | easyjson aims to keep generated Go code simple enough so that it can be easily | ||||||
|  | optimized or fixed. Another goal is to provide users with the ability to | ||||||
|  | customize the generated code by providing options not available with the | ||||||
|  | standard `encoding/json` package, such as generating "snake_case" names or | ||||||
|  | enabling `omitempty` behavior by default. | ||||||
|  | 
 | ||||||
|  | ## Usage | ||||||
|  | ```sh | ||||||
|  | # install | ||||||
|  | go get -u github.com/mailru/easyjson/... | ||||||
|  | 
 | ||||||
|  | # run | ||||||
|  | easyjson -all <file>.go | ||||||
|  | ``` | ||||||
|  | 
 | ||||||
|  | The above will generate `<file>_easyjson.go` containing the appropriate marshaler and | ||||||
|  | unmarshaler funcs for all structs contained in `<file>.go`. | ||||||
|  | 
 | ||||||
|  | Please note that easyjson requires a full Go build environment and the `GOPATH` | ||||||
|  | environment variable to be set. This is because easyjson code generation | ||||||
|  | invokes `go run` on a temporary file (an approach to code generation borrowed | ||||||
|  | from [ffjson](https://github.com/pquerna/ffjson)). | ||||||
|  | 
 | ||||||
|  | ## Options | ||||||
|  | ```txt | ||||||
|  | Usage of easyjson: | ||||||
|  |   -all | ||||||
|  |     	generate marshaler/unmarshalers for all structs in a file | ||||||
|  |   -build_tags string | ||||||
|  |     	build tags to add to generated file | ||||||
|  |   -leave_temps | ||||||
|  |     	do not delete temporary files | ||||||
|  |   -no_std_marshalers | ||||||
|  |     	don't generate MarshalJSON/UnmarshalJSON funcs | ||||||
|  |   -noformat | ||||||
|  |     	do not run 'gofmt -w' on output file | ||||||
|  |   -omit_empty | ||||||
|  |     	omit empty fields by default | ||||||
|  |   -output_filename string | ||||||
|  |     	specify the filename of the output | ||||||
|  |   -pkg | ||||||
|  |     	process the whole package instead of just the given file | ||||||
|  |   -snake_case | ||||||
|  |     	use snake_case names instead of CamelCase by default | ||||||
|  |   -lower_camel_case | ||||||
|  |         use lowerCamelCase instead of CamelCase by default | ||||||
|  |   -stubs | ||||||
|  |     	only generate stubs for marshaler/unmarshaler funcs | ||||||
|  |   -disallow_unknown_fields | ||||||
|  |         return error if some unknown field in json appeared | ||||||
|  | ``` | ||||||
|  | 
 | ||||||
|  | Using `-all` will generate marshalers/unmarshalers for all Go structs in the | ||||||
|  | file. If `-all` is not provided, then only those structs whose preceding | ||||||
|  | comment starts with `easyjson:json` will have marshalers/unmarshalers | ||||||
|  | generated. For example: | ||||||
|  | 
 | ||||||
|  | ```go | ||||||
|  | //easyjson:json | ||||||
|  | type A struct {} | ||||||
|  | ``` | ||||||
|  | 
 | ||||||
|  | Additional option notes: | ||||||
|  | 
 | ||||||
|  | * `-snake_case` tells easyjson to generate snake\_case field names by default | ||||||
|  |   (unless overridden by a field tag). The CamelCase to snake\_case conversion | ||||||
|  |   algorithm should work in most cases (ie, HTTPVersion will be converted to | ||||||
|  |   "http_version"). | ||||||
|  | 
 | ||||||
|  | * `-build_tags` will add the specified build tags to generated Go sources. | ||||||
|  | 
 | ||||||
|  | ## Generated Marshaler/Unmarshaler Funcs | ||||||
|  | 
 | ||||||
|  | For Go struct types, easyjson generates the funcs `MarshalEasyJSON` / | ||||||
|  | `UnmarshalEasyJSON` for marshaling/unmarshaling JSON. In turn, these satisify | ||||||
|  | the `easyjson.Marshaler` and `easyjson.Unmarshaler` interfaces and when used in | ||||||
|  | conjunction with `easyjson.Marshal` / `easyjson.Unmarshal` avoid unnecessary | ||||||
|  | reflection / type assertions during marshaling/unmarshaling to/from JSON for Go | ||||||
|  | structs. | ||||||
|  | 
 | ||||||
|  | easyjson also generates `MarshalJSON` and `UnmarshalJSON` funcs for Go struct | ||||||
|  | types compatible with the standard `json.Marshaler` and `json.Unmarshaler` | ||||||
|  | interfaces. Please be aware that using the standard `json.Marshal` / | ||||||
|  | `json.Unmarshal` for marshaling/unmarshaling will incur a significant | ||||||
|  | performance penalty when compared to using `easyjson.Marshal` / | ||||||
|  | `easyjson.Unmarshal`. | ||||||
|  | 
 | ||||||
|  | Additionally, easyjson exposes utility funcs that use the `MarshalEasyJSON` and | ||||||
|  | `UnmarshalEasyJSON` for marshaling/unmarshaling to and from standard readers | ||||||
|  | and writers. For example, easyjson provides `easyjson.MarshalToHTTPResponseWriter` | ||||||
|  | which marshals to the standard `http.ResponseWriter`. Please see the [GoDoc | ||||||
|  | listing](https://godoc.org/github.com/mailru/easyjson) for the full listing of | ||||||
|  | utility funcs that are available. | ||||||
|  | 
 | ||||||
|  | ## Controlling easyjson Marshaling and Unmarshaling Behavior | ||||||
|  | 
 | ||||||
|  | Go types can provide their own `MarshalEasyJSON` and `UnmarshalEasyJSON` funcs | ||||||
|  | that satisify the `easyjson.Marshaler` / `easyjson.Unmarshaler` interfaces. | ||||||
|  | These will be used by `easyjson.Marshal` and `easyjson.Unmarshal` when defined | ||||||
|  | for a Go type. | ||||||
|  | 
 | ||||||
|  | Go types can also satisify the `easyjson.Optional` interface, which allows the | ||||||
|  | type to define its own `omitempty` logic. | ||||||
|  | 
 | ||||||
|  | ## Type Wrappers | ||||||
|  | 
 | ||||||
|  | easyjson provides additional type wrappers defined in the `easyjson/opt` | ||||||
|  | package. These wrap the standard Go primitives and in turn satisify the | ||||||
|  | easyjson interfaces. | ||||||
|  | 
 | ||||||
|  | The `easyjson/opt` type wrappers are useful when needing to distinguish between | ||||||
|  | a missing value and/or when needing to specifying a default value. Type | ||||||
|  | wrappers allow easyjson to avoid additional pointers and heap allocations and | ||||||
|  | can significantly increase performance when used properly. | ||||||
|  | 
 | ||||||
|  | ## Memory Pooling | ||||||
|  | 
 | ||||||
|  | easyjson uses a buffer pool that allocates data in increasing chunks from 128 | ||||||
|  | to 32768 bytes. Chunks of 512 bytes and larger will be reused with the help of | ||||||
|  | `sync.Pool`. The maximum size of a chunk is bounded to reduce redundant memory | ||||||
|  | allocation and to allow larger reusable buffers. | ||||||
|  | 
 | ||||||
|  | easyjson's custom allocation buffer pool is defined in the `easyjson/buffer` | ||||||
|  | package, and the default behavior pool behavior can be modified (if necessary) | ||||||
|  | through a call to `buffer.Init()` prior to any marshaling or unmarshaling. | ||||||
|  | Please see the [GoDoc listing](https://godoc.org/github.com/mailru/easyjson/buffer) | ||||||
|  | for more information. | ||||||
|  | 
 | ||||||
|  | ## Issues, Notes, and Limitations | ||||||
|  | 
 | ||||||
|  | * easyjson is still early in its development. As such, there are likely to be | ||||||
|  |   bugs and missing features when compared to `encoding/json`. In the case of a | ||||||
|  |   missing feature or bug, please create a GitHub issue. Pull requests are | ||||||
|  |   welcome! | ||||||
|  | 
 | ||||||
|  | * Unlike `encoding/json`, object keys are case-sensitive. Case-insensitive | ||||||
|  |   matching is not currently provided due to the significant performance hit | ||||||
|  |   when doing case-insensitive key matching. In the future, case-insensitive | ||||||
|  |   object key matching may be provided via an option to the generator. | ||||||
|  | 
 | ||||||
|  | * easyjson makes use of `unsafe`, which simplifies the code and | ||||||
|  |   provides significant performance benefits by allowing no-copy | ||||||
|  |   conversion from `[]byte` to `string`. That said, `unsafe` is used | ||||||
|  |   only when unmarshaling and parsing JSON, and any `unsafe` operations | ||||||
|  |   / memory allocations done will be safely deallocated by | ||||||
|  |   easyjson. Set the build tag `easyjson_nounsafe` to compile it | ||||||
|  |   without `unsafe`. | ||||||
|  | 
 | ||||||
|  | * easyjson is compatible with Google App Engine. The `appengine` build | ||||||
|  |   tag (set by App Engine's environment) will automatically disable the | ||||||
|  |   use of `unsafe`, which is not allowed in App Engine's Standard | ||||||
|  |   Environment. Note that the use with App Engine is still experimental. | ||||||
|  | 
 | ||||||
|  | * Floats are formatted using the default precision from Go's `strconv` package. | ||||||
|  |   As such, easyjson will not correctly handle high precision floats when | ||||||
|  |   marshaling/unmarshaling JSON. Note, however, that there are very few/limited | ||||||
|  |   uses where this behavior is not sufficient for general use. That said, a | ||||||
|  |   different package may be needed if precise marshaling/unmarshaling of high | ||||||
|  |   precision floats to/from JSON is required. | ||||||
|  | 
 | ||||||
|  | * While unmarshaling, the JSON parser does the minimal amount of work needed to | ||||||
|  |   skip over unmatching parens, and as such full validation is not done for the | ||||||
|  |   entire JSON value being unmarshaled/parsed. | ||||||
|  | 
 | ||||||
|  | * Currently there is no true streaming support for encoding/decoding as | ||||||
|  |   typically for many uses/protocols the final, marshaled length of the JSON | ||||||
|  |   needs to be known prior to sending the data. Currently this is not possible | ||||||
|  |   with easyjson's architecture. | ||||||
|  |    | ||||||
|  | * easyjson parser and codegen based on reflection, so it wont works on `package main`  | ||||||
|  |   files, because they cant be imported by parser. | ||||||
|  | 
 | ||||||
|  | ## Benchmarks | ||||||
|  | 
 | ||||||
|  | Most benchmarks were done using the example | ||||||
|  | [13kB example JSON](https://dev.twitter.com/rest/reference/get/search/tweets) | ||||||
|  | (9k after eliminating whitespace). This example is similar to real-world data, | ||||||
|  | is well-structured, and contains a healthy variety of different types, making | ||||||
|  | it ideal for JSON serialization benchmarks. | ||||||
|  | 
 | ||||||
|  | Note: | ||||||
|  | 
 | ||||||
|  | * For small request benchmarks, an 80 byte portion of the above example was | ||||||
|  |   used. | ||||||
|  | 
 | ||||||
|  | * For large request marshaling benchmarks, a struct containing 50 regular | ||||||
|  |   samples was used, making a ~500kB output JSON. | ||||||
|  | 
 | ||||||
|  | * Benchmarks are showing the results of easyjson's default behaviour, | ||||||
|  |   which makes use of `unsafe`. | ||||||
|  | 
 | ||||||
|  | Benchmarks are available in the repository and can be run by invoking `make`. | ||||||
|  | 
 | ||||||
|  | ### easyjson vs. encoding/json | ||||||
|  | 
 | ||||||
|  | easyjson is roughly 5-6 times faster than the standard `encoding/json` for | ||||||
|  | unmarshaling, and 3-4 times faster for non-concurrent marshaling. Concurrent | ||||||
|  | marshaling is 6-7x faster if marshaling to a writer. | ||||||
|  | 
 | ||||||
|  | ### easyjson vs. ffjson | ||||||
|  | 
 | ||||||
|  | easyjson uses the same approach for JSON marshaling as | ||||||
|  | [ffjson](https://github.com/pquerna/ffjson), but takes a significantly | ||||||
|  | different approach to lexing and parsing JSON during unmarshaling. This means | ||||||
|  | easyjson is roughly 2-3x faster for unmarshaling and 1.5-2x faster for | ||||||
|  | non-concurrent unmarshaling. | ||||||
|  | 
 | ||||||
|  | As of this writing, `ffjson` seems to have issues when used concurrently: | ||||||
|  | specifically, large request pooling hurts `ffjson`'s performance and causes | ||||||
|  | scalability issues. These issues with `ffjson` can likely be fixed, but as of | ||||||
|  | writing remain outstanding/known issues with `ffjson`. | ||||||
|  | 
 | ||||||
|  | easyjson and `ffjson` have similar performance for small requests, however | ||||||
|  | easyjson outperforms `ffjson` by roughly 2-5x times for large requests when | ||||||
|  | used with a writer. | ||||||
|  | 
 | ||||||
|  | ### easyjson vs. go/codec | ||||||
|  | 
 | ||||||
|  | [go/codec](https://github.com/ugorji/go) provides | ||||||
|  | compile-time helpers for JSON generation. In this case, helpers do not work | ||||||
|  | like marshalers as they are encoding-independent. | ||||||
|  | 
 | ||||||
|  | easyjson is generally 2x faster than `go/codec` for non-concurrent benchmarks | ||||||
|  | and about 3x faster for concurrent encoding (without marshaling to a writer). | ||||||
|  | 
 | ||||||
|  | In an attempt to measure marshaling performance of `go/codec` (as opposed to | ||||||
|  | allocations/memcpy/writer interface invocations), a benchmark was done with | ||||||
|  | resetting length of a byte slice rather than resetting the whole slice to nil. | ||||||
|  | However, the optimization in this exact form may not be applicable in practice, | ||||||
|  | since the memory is not freed between marshaling operations. | ||||||
|  | 
 | ||||||
|  | ### easyjson vs 'ujson' python module | ||||||
|  | 
 | ||||||
|  | [ujson](https://github.com/esnme/ultrajson) is using C code for parsing, so it | ||||||
|  | is interesting to see how plain golang compares to that. It is imporant to note | ||||||
|  | that the resulting object for python is slower to access, since the library | ||||||
|  | parses JSON object into dictionaries. | ||||||
|  | 
 | ||||||
|  | easyjson is slightly faster for unmarshaling and 2-3x faster than `ujson` for | ||||||
|  | marshaling. | ||||||
|  | 
 | ||||||
|  | ### Benchmark Results | ||||||
|  | 
 | ||||||
|  | `ffjson` results are from February 4th, 2016, using the latest `ffjson` and go1.6. | ||||||
|  | `go/codec` results are from March 4th, 2016, using the latest `go/codec` and go1.6. | ||||||
|  | 
 | ||||||
|  | #### Unmarshaling | ||||||
|  | 
 | ||||||
|  | | lib      | json size | MB/s | allocs/op | B/op  | | ||||||
|  | |:---------|:----------|-----:|----------:|------:| | ||||||
|  | | standard | regular   | 22   | 218       | 10229 | | ||||||
|  | | standard | small     | 9.7  | 14        | 720   | | ||||||
|  | |          |           |      |           |       | | ||||||
|  | | easyjson | regular   | 125  | 128       | 9794  | | ||||||
|  | | easyjson | small     | 67   | 3         | 128   | | ||||||
|  | |          |           |      |           |       | | ||||||
|  | | ffjson   | regular   | 66   | 141       | 9985  | | ||||||
|  | | ffjson   | small     | 17.6 | 10        | 488   | | ||||||
|  | |          |           |      |           |       | | ||||||
|  | | codec    | regular   | 55   | 434       | 19299 | | ||||||
|  | | codec    | small     | 29   | 7         | 336   | | ||||||
|  | |          |           |      |           |       | | ||||||
|  | | ujson    | regular   | 103  | N/A       | N/A   | | ||||||
|  | 
 | ||||||
|  | #### Marshaling, one goroutine. | ||||||
|  | 
 | ||||||
|  | | lib       | json size | MB/s | allocs/op | B/op  | | ||||||
|  | |:----------|:----------|-----:|----------:|------:| | ||||||
|  | | standard  | regular   | 75   | 9         | 23256 | | ||||||
|  | | standard  | small     | 32   | 3         | 328   | | ||||||
|  | | standard  | large     | 80   | 17        | 1.2M  | | ||||||
|  | |           |           |      |           |       | | ||||||
|  | | easyjson  | regular   | 213  | 9         | 10260 | | ||||||
|  | | easyjson* | regular   | 263  | 8         | 742   | | ||||||
|  | | easyjson  | small     | 125  | 1         | 128   | | ||||||
|  | | easyjson  | large     | 212  | 33        | 490k  | | ||||||
|  | | easyjson* | large     | 262  | 25        | 2879  | | ||||||
|  | |           |           |      |           |       | | ||||||
|  | | ffjson    | regular   | 122  | 153       | 21340 | | ||||||
|  | | ffjson**  | regular   | 146  | 152       | 4897  | | ||||||
|  | | ffjson    | small     | 36   | 5         | 384   | | ||||||
|  | | ffjson**  | small     | 64   | 4         | 128   | | ||||||
|  | | ffjson    | large     | 134  | 7317      | 818k  | | ||||||
|  | | ffjson**  | large     | 125  | 7320      | 827k  | | ||||||
|  | |           |           |      |           |       | | ||||||
|  | | codec     | regular   | 80   | 17        | 33601 | | ||||||
|  | | codec***  | regular   | 108  | 9         | 1153  | | ||||||
|  | | codec     | small     | 42   | 3         | 304   | | ||||||
|  | | codec***  | small     | 56   | 1         | 48    | | ||||||
|  | | codec     | large     | 73   | 483       | 2.5M  | | ||||||
|  | | codec***  | large     | 103  | 451       | 66007 | | ||||||
|  | |           |           |      |           |       | | ||||||
|  | | ujson     | regular   | 92   | N/A       | N/A   | | ||||||
|  | 
 | ||||||
|  | \* marshaling to a writer, | ||||||
|  | \*\* using `ffjson.Pool()`, | ||||||
|  | \*\*\* reusing output slice instead of resetting it to nil | ||||||
|  | 
 | ||||||
|  | #### Marshaling, concurrent. | ||||||
|  | 
 | ||||||
|  | | lib       | json size | MB/s | allocs/op | B/op  | | ||||||
|  | |:----------|:----------|-----:|----------:|------:| | ||||||
|  | | standard  | regular   | 252  | 9         | 23257 | | ||||||
|  | | standard  | small     | 124  | 3         | 328   | | ||||||
|  | | standard  | large     | 289  | 17        | 1.2M  | | ||||||
|  | |           |           |      |           |       | | ||||||
|  | | easyjson  | regular   | 792  | 9         | 10597 | | ||||||
|  | | easyjson* | regular   | 1748 | 8         | 779   | | ||||||
|  | | easyjson  | small     | 333  | 1         | 128   | | ||||||
|  | | easyjson  | large     | 718  | 36        | 548k  | | ||||||
|  | | easyjson* | large     | 2134 | 25        | 4957  | | ||||||
|  | |           |           |      |           |       | | ||||||
|  | | ffjson    | regular   | 301  | 153       | 21629 | | ||||||
|  | | ffjson**  | regular   | 707  | 152       | 5148  | | ||||||
|  | | ffjson    | small     | 62   | 5         | 384   | | ||||||
|  | | ffjson**  | small     | 282  | 4         | 128   | | ||||||
|  | | ffjson    | large     | 438  | 7330      | 1.0M  | | ||||||
|  | | ffjson**  | large     | 131  | 7319      | 820k  | | ||||||
|  | |           |           |      |           |       | | ||||||
|  | | codec     | regular   | 183  | 17        | 33603 | | ||||||
|  | | codec***  | regular   | 671  | 9         | 1157  | | ||||||
|  | | codec     | small     | 147  | 3         | 304   | | ||||||
|  | | codec***  | small     | 299  | 1         | 48    | | ||||||
|  | | codec     | large     | 190  | 483       | 2.5M  | | ||||||
|  | | codec***  | large     | 752  | 451       | 77574 | | ||||||
|  | 
 | ||||||
|  | \* marshaling to a writer, | ||||||
|  | \*\* using `ffjson.Pool()`, | ||||||
|  | \*\*\* reusing output slice instead of resetting it to nil | ||||||
|  | @ -0,0 +1,3 @@ | ||||||
|  | module github.com/mailru/easyjson | ||||||
|  | 
 | ||||||
|  | go 1.12 | ||||||
|  | @ -0,0 +1,78 @@ | ||||||
|  | // Package easyjson contains marshaler/unmarshaler interfaces and helper functions.
 | ||||||
|  | package easyjson | ||||||
|  | 
 | ||||||
|  | import ( | ||||||
|  | 	"io" | ||||||
|  | 	"io/ioutil" | ||||||
|  | 	"net/http" | ||||||
|  | 	"strconv" | ||||||
|  | 
 | ||||||
|  | 	"github.com/mailru/easyjson/jlexer" | ||||||
|  | 	"github.com/mailru/easyjson/jwriter" | ||||||
|  | ) | ||||||
|  | 
 | ||||||
|  | // Marshaler is an easyjson-compatible marshaler interface.
 | ||||||
|  | type Marshaler interface { | ||||||
|  | 	MarshalEasyJSON(w *jwriter.Writer) | ||||||
|  | } | ||||||
|  | 
 | ||||||
|  | // Marshaler is an easyjson-compatible unmarshaler interface.
 | ||||||
|  | type Unmarshaler interface { | ||||||
|  | 	UnmarshalEasyJSON(w *jlexer.Lexer) | ||||||
|  | } | ||||||
|  | 
 | ||||||
|  | // Optional defines an undefined-test method for a type to integrate with 'omitempty' logic.
 | ||||||
|  | type Optional interface { | ||||||
|  | 	IsDefined() bool | ||||||
|  | } | ||||||
|  | 
 | ||||||
|  | // Marshal returns data as a single byte slice. Method is suboptimal as the data is likely to be copied
 | ||||||
|  | // from a chain of smaller chunks.
 | ||||||
|  | func Marshal(v Marshaler) ([]byte, error) { | ||||||
|  | 	w := jwriter.Writer{} | ||||||
|  | 	v.MarshalEasyJSON(&w) | ||||||
|  | 	return w.BuildBytes() | ||||||
|  | } | ||||||
|  | 
 | ||||||
|  | // MarshalToWriter marshals the data to an io.Writer.
 | ||||||
|  | func MarshalToWriter(v Marshaler, w io.Writer) (written int, err error) { | ||||||
|  | 	jw := jwriter.Writer{} | ||||||
|  | 	v.MarshalEasyJSON(&jw) | ||||||
|  | 	return jw.DumpTo(w) | ||||||
|  | } | ||||||
|  | 
 | ||||||
|  | // MarshalToHTTPResponseWriter sets Content-Length and Content-Type headers for the
 | ||||||
|  | // http.ResponseWriter, and send the data to the writer. started will be equal to
 | ||||||
|  | // false if an error occurred before any http.ResponseWriter methods were actually
 | ||||||
|  | // invoked (in this case a 500 reply is possible).
 | ||||||
|  | func MarshalToHTTPResponseWriter(v Marshaler, w http.ResponseWriter) (started bool, written int, err error) { | ||||||
|  | 	jw := jwriter.Writer{} | ||||||
|  | 	v.MarshalEasyJSON(&jw) | ||||||
|  | 	if jw.Error != nil { | ||||||
|  | 		return false, 0, jw.Error | ||||||
|  | 	} | ||||||
|  | 	w.Header().Set("Content-Type", "application/json") | ||||||
|  | 	w.Header().Set("Content-Length", strconv.Itoa(jw.Size())) | ||||||
|  | 
 | ||||||
|  | 	started = true | ||||||
|  | 	written, err = jw.DumpTo(w) | ||||||
|  | 	return | ||||||
|  | } | ||||||
|  | 
 | ||||||
|  | // Unmarshal decodes the JSON in data into the object.
 | ||||||
|  | func Unmarshal(data []byte, v Unmarshaler) error { | ||||||
|  | 	l := jlexer.Lexer{Data: data} | ||||||
|  | 	v.UnmarshalEasyJSON(&l) | ||||||
|  | 	return l.Error() | ||||||
|  | } | ||||||
|  | 
 | ||||||
|  | // UnmarshalFromReader reads all the data in the reader and decodes as JSON into the object.
 | ||||||
|  | func UnmarshalFromReader(r io.Reader, v Unmarshaler) error { | ||||||
|  | 	data, err := ioutil.ReadAll(r) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return err | ||||||
|  | 	} | ||||||
|  | 	l := jlexer.Lexer{Data: data} | ||||||
|  | 	v.UnmarshalEasyJSON(&l) | ||||||
|  | 	return l.Error() | ||||||
|  | } | ||||||
|  | @ -0,0 +1,45 @@ | ||||||
|  | package easyjson | ||||||
|  | 
 | ||||||
|  | import ( | ||||||
|  | 	"github.com/mailru/easyjson/jlexer" | ||||||
|  | 	"github.com/mailru/easyjson/jwriter" | ||||||
|  | ) | ||||||
|  | 
 | ||||||
|  | // RawMessage is a raw piece of JSON (number, string, bool, object, array or
 | ||||||
|  | // null) that is extracted without parsing and output as is during marshaling.
 | ||||||
|  | type RawMessage []byte | ||||||
|  | 
 | ||||||
|  | // MarshalEasyJSON does JSON marshaling using easyjson interface.
 | ||||||
|  | func (v *RawMessage) MarshalEasyJSON(w *jwriter.Writer) { | ||||||
|  | 	if len(*v) == 0 { | ||||||
|  | 		w.RawString("null") | ||||||
|  | 	} else { | ||||||
|  | 		w.Raw(*v, nil) | ||||||
|  | 	} | ||||||
|  | } | ||||||
|  | 
 | ||||||
|  | // UnmarshalEasyJSON does JSON unmarshaling using easyjson interface.
 | ||||||
|  | func (v *RawMessage) UnmarshalEasyJSON(l *jlexer.Lexer) { | ||||||
|  | 	*v = RawMessage(l.Raw()) | ||||||
|  | } | ||||||
|  | 
 | ||||||
|  | // UnmarshalJSON implements encoding/json.Unmarshaler interface.
 | ||||||
|  | func (v *RawMessage) UnmarshalJSON(data []byte) error { | ||||||
|  | 	*v = data | ||||||
|  | 	return nil | ||||||
|  | } | ||||||
|  | 
 | ||||||
|  | var nullBytes = []byte("null") | ||||||
|  | 
 | ||||||
|  | // MarshalJSON implements encoding/json.Marshaler interface.
 | ||||||
|  | func (v RawMessage) MarshalJSON() ([]byte, error) { | ||||||
|  | 	if len(v) == 0 { | ||||||
|  | 		return nullBytes, nil | ||||||
|  | 	} | ||||||
|  | 	return v, nil | ||||||
|  | } | ||||||
|  | 
 | ||||||
|  | // IsDefined is required for integration with omitempty easyjson logic.
 | ||||||
|  | func (v *RawMessage) IsDefined() bool { | ||||||
|  | 	return len(*v) > 0 | ||||||
|  | } | ||||||
|  | @ -1,165 +0,0 @@ | ||||||
| // This file has been modified from the original generated code to make it work with
 |  | ||||||
| // type alias jsonResult so that the methods aren't exposed in Result.
 |  | ||||||
| 
 |  | ||||||
| package vegeta |  | ||||||
| 
 |  | ||||||
| import ( |  | ||||||
| 	"time" |  | ||||||
| 
 |  | ||||||
| 	"github.com/mailru/easyjson/jlexer" |  | ||||||
| 	"github.com/mailru/easyjson/jwriter" |  | ||||||
| ) |  | ||||||
| 
 |  | ||||||
| type jsonResult Result |  | ||||||
| 
 |  | ||||||
| func (r *jsonResult) decode(in *jlexer.Lexer) { |  | ||||||
| 	isTopLevel := in.IsStart() |  | ||||||
| 	if in.IsNull() { |  | ||||||
| 		if isTopLevel { |  | ||||||
| 			in.Consumed() |  | ||||||
| 		} |  | ||||||
| 		in.Skip() |  | ||||||
| 		return |  | ||||||
| 	} |  | ||||||
| 	in.Delim('{') |  | ||||||
| 	for !in.IsDelim('}') { |  | ||||||
| 		key := in.UnsafeString() |  | ||||||
| 		in.WantColon() |  | ||||||
| 		if in.IsNull() { |  | ||||||
| 			in.Skip() |  | ||||||
| 			in.WantComma() |  | ||||||
| 			continue |  | ||||||
| 		} |  | ||||||
| 		switch key { |  | ||||||
| 		case "attack": |  | ||||||
| 			r.Attack = string(in.String()) |  | ||||||
| 		case "seq": |  | ||||||
| 			r.Seq = uint64(in.Uint64()) |  | ||||||
| 		case "code": |  | ||||||
| 			r.Code = uint16(in.Uint16()) |  | ||||||
| 		case "timestamp": |  | ||||||
| 			if data := in.Raw(); in.Ok() { |  | ||||||
| 				in.AddError((r.Timestamp).UnmarshalJSON(data)) |  | ||||||
| 			} |  | ||||||
| 		case "latency": |  | ||||||
| 			r.Latency = time.Duration(in.Int64()) |  | ||||||
| 		case "bytes_out": |  | ||||||
| 			r.BytesOut = uint64(in.Uint64()) |  | ||||||
| 		case "bytes_in": |  | ||||||
| 			r.BytesIn = uint64(in.Uint64()) |  | ||||||
| 		case "error": |  | ||||||
| 			r.Error = string(in.String()) |  | ||||||
| 		case "body": |  | ||||||
| 			if in.IsNull() { |  | ||||||
| 				in.Skip() |  | ||||||
| 				r.Body = nil |  | ||||||
| 			} else { |  | ||||||
| 				r.Body = in.Bytes() |  | ||||||
| 			} |  | ||||||
| 		default: |  | ||||||
| 			in.SkipRecursive() |  | ||||||
| 		} |  | ||||||
| 		in.WantComma() |  | ||||||
| 	} |  | ||||||
| 	in.Delim('}') |  | ||||||
| 	if isTopLevel { |  | ||||||
| 		in.Consumed() |  | ||||||
| 	} |  | ||||||
| } |  | ||||||
| 
 |  | ||||||
| func (r jsonResult) encode(out *jwriter.Writer) { |  | ||||||
| 	out.RawByte('{') |  | ||||||
| 	first := true |  | ||||||
| 	_ = first |  | ||||||
| 	{ |  | ||||||
| 		const prefix string = ",\"attack\":" |  | ||||||
| 		if first { |  | ||||||
| 			first = false |  | ||||||
| 			out.RawString(prefix[1:]) |  | ||||||
| 		} else { |  | ||||||
| 			out.RawString(prefix) |  | ||||||
| 		} |  | ||||||
| 		out.String(string(r.Attack)) |  | ||||||
| 	} |  | ||||||
| 	{ |  | ||||||
| 		const prefix string = ",\"seq\":" |  | ||||||
| 		if first { |  | ||||||
| 			first = false |  | ||||||
| 			out.RawString(prefix[1:]) |  | ||||||
| 		} else { |  | ||||||
| 			out.RawString(prefix) |  | ||||||
| 		} |  | ||||||
| 		out.Uint64(uint64(r.Seq)) |  | ||||||
| 	} |  | ||||||
| 	{ |  | ||||||
| 		const prefix string = ",\"code\":" |  | ||||||
| 		if first { |  | ||||||
| 			first = false |  | ||||||
| 			out.RawString(prefix[1:]) |  | ||||||
| 		} else { |  | ||||||
| 			out.RawString(prefix) |  | ||||||
| 		} |  | ||||||
| 		out.Uint16(uint16(r.Code)) |  | ||||||
| 	} |  | ||||||
| 	{ |  | ||||||
| 		const prefix string = ",\"timestamp\":" |  | ||||||
| 		if first { |  | ||||||
| 			first = false |  | ||||||
| 			out.RawString(prefix[1:]) |  | ||||||
| 		} else { |  | ||||||
| 			out.RawString(prefix) |  | ||||||
| 		} |  | ||||||
| 		out.Raw((r.Timestamp).MarshalJSON()) |  | ||||||
| 	} |  | ||||||
| 	{ |  | ||||||
| 		const prefix string = ",\"latency\":" |  | ||||||
| 		if first { |  | ||||||
| 			first = false |  | ||||||
| 			out.RawString(prefix[1:]) |  | ||||||
| 		} else { |  | ||||||
| 			out.RawString(prefix) |  | ||||||
| 		} |  | ||||||
| 		out.Int64(int64(r.Latency)) |  | ||||||
| 	} |  | ||||||
| 	{ |  | ||||||
| 		const prefix string = ",\"bytes_out\":" |  | ||||||
| 		if first { |  | ||||||
| 			first = false |  | ||||||
| 			out.RawString(prefix[1:]) |  | ||||||
| 		} else { |  | ||||||
| 			out.RawString(prefix) |  | ||||||
| 		} |  | ||||||
| 		out.Uint64(uint64(r.BytesOut)) |  | ||||||
| 	} |  | ||||||
| 	{ |  | ||||||
| 		const prefix string = ",\"bytes_in\":" |  | ||||||
| 		if first { |  | ||||||
| 			first = false |  | ||||||
| 			out.RawString(prefix[1:]) |  | ||||||
| 		} else { |  | ||||||
| 			out.RawString(prefix) |  | ||||||
| 		} |  | ||||||
| 		out.Uint64(uint64(r.BytesIn)) |  | ||||||
| 	} |  | ||||||
| 	{ |  | ||||||
| 		const prefix string = ",\"error\":" |  | ||||||
| 		if first { |  | ||||||
| 			first = false |  | ||||||
| 			out.RawString(prefix[1:]) |  | ||||||
| 		} else { |  | ||||||
| 			out.RawString(prefix) |  | ||||||
| 		} |  | ||||||
| 		out.String(string(r.Error)) |  | ||||||
| 	} |  | ||||||
| 	{ |  | ||||||
| 		const prefix string = ",\"body\":" |  | ||||||
| 		if first { |  | ||||||
| 			first = false |  | ||||||
| 			out.RawString(prefix[1:]) |  | ||||||
| 		} else { |  | ||||||
| 			out.RawString(prefix) |  | ||||||
| 		} |  | ||||||
| 		out.Base64Bytes(r.Body) |  | ||||||
| 	} |  | ||||||
| 	out.RawByte('}') |  | ||||||
| } |  | ||||||
|  | @ -1,6 +1,6 @@ | ||||||
| The MIT License (MIT) | The MIT License (MIT) | ||||||
| 
 | 
 | ||||||
| Copyright (c) 2013-2016 Tomás Senart | Copyright (c) 2013-2020 Tomás Senart | ||||||
| 
 | 
 | ||||||
| Permission is hereby granted, free of charge, to any person obtaining a copy of | Permission is hereby granted, free of charge, to any person obtaining a copy of | ||||||
| this software and associated documentation files (the "Software"), to deal in | this software and associated documentation files (the "Software"), to deal in | ||||||
|  | @ -10,6 +10,7 @@ import ( | ||||||
| 	"net" | 	"net" | ||||||
| 	"net/http" | 	"net/http" | ||||||
| 	"net/url" | 	"net/url" | ||||||
|  | 	"strconv" | ||||||
| 	"sync" | 	"sync" | ||||||
| 	"time" | 	"time" | ||||||
| 
 | 
 | ||||||
|  | @ -28,6 +29,7 @@ type Attacker struct { | ||||||
| 	seqmu      sync.Mutex | 	seqmu      sync.Mutex | ||||||
| 	seq        uint64 | 	seq        uint64 | ||||||
| 	began      time.Time | 	began      time.Time | ||||||
|  | 	chunked    bool | ||||||
| } | } | ||||||
| 
 | 
 | ||||||
| const ( | const ( | ||||||
|  | @ -40,6 +42,9 @@ const ( | ||||||
| 	// DefaultConnections is the default amount of max open idle connections per
 | 	// DefaultConnections is the default amount of max open idle connections per
 | ||||||
| 	// target host.
 | 	// target host.
 | ||||||
| 	DefaultConnections = 10000 | 	DefaultConnections = 10000 | ||||||
|  | 	// DefaultMaxConnections is the default amount of connections per target
 | ||||||
|  | 	// host.
 | ||||||
|  | 	DefaultMaxConnections = 0 | ||||||
| 	// DefaultWorkers is the default initial number of workers used to carry an attack.
 | 	// DefaultWorkers is the default initial number of workers used to carry an attack.
 | ||||||
| 	DefaultWorkers = 10 | 	DefaultWorkers = 10 | ||||||
| 	// DefaultMaxWorkers is the default maximum number of workers used to carry an attack.
 | 	// DefaultMaxWorkers is the default maximum number of workers used to carry an attack.
 | ||||||
|  | @ -81,6 +86,7 @@ func NewAttacker(opts ...func(*Attacker)) *Attacker { | ||||||
| 			Dial:                a.dialer.Dial, | 			Dial:                a.dialer.Dial, | ||||||
| 			TLSClientConfig:     DefaultTLSConfig, | 			TLSClientConfig:     DefaultTLSConfig, | ||||||
| 			MaxIdleConnsPerHost: DefaultConnections, | 			MaxIdleConnsPerHost: DefaultConnections, | ||||||
|  | 			MaxConnsPerHost:     DefaultMaxConnections, | ||||||
| 		}, | 		}, | ||||||
| 	} | 	} | ||||||
| 
 | 
 | ||||||
|  | @ -113,6 +119,21 @@ func Connections(n int) func(*Attacker) { | ||||||
| 	} | 	} | ||||||
| } | } | ||||||
| 
 | 
 | ||||||
|  | // MaxConnections returns a functional option which sets the number of maximum
 | ||||||
|  | // connections per target host.
 | ||||||
|  | func MaxConnections(n int) func(*Attacker) { | ||||||
|  | 	return func(a *Attacker) { | ||||||
|  | 		tr := a.client.Transport.(*http.Transport) | ||||||
|  | 		tr.MaxConnsPerHost = n | ||||||
|  | 	} | ||||||
|  | } | ||||||
|  | 
 | ||||||
|  | // ChunkedBody returns a functional option which makes the attacker send the
 | ||||||
|  | // body of each request with the chunked transfer encoding.
 | ||||||
|  | func ChunkedBody(b bool) func(*Attacker) { | ||||||
|  | 	return func(a *Attacker) { a.chunked = b } | ||||||
|  | } | ||||||
|  | 
 | ||||||
| // Redirects returns a functional option which sets the maximum
 | // Redirects returns a functional option which sets the maximum
 | ||||||
| // number of redirects an Attacker will follow.
 | // number of redirects an Attacker will follow.
 | ||||||
| func Redirects(n int) func(*Attacker) { | func Redirects(n int) func(*Attacker) { | ||||||
|  | @ -229,6 +250,16 @@ func Client(c *http.Client) func(*Attacker) { | ||||||
| 	return func(a *Attacker) { a.client = *c } | 	return func(a *Attacker) { a.client = *c } | ||||||
| } | } | ||||||
| 
 | 
 | ||||||
|  | // ProxyHeader returns a functional option that allows you to add your own
 | ||||||
|  | // Proxy CONNECT headers
 | ||||||
|  | func ProxyHeader(h http.Header) func(*Attacker) { | ||||||
|  | 	return func(a *Attacker) { | ||||||
|  | 		if tr, ok := a.client.Transport.(*http.Transport); ok { | ||||||
|  | 			tr.ProxyConnectHeader = h | ||||||
|  | 		} | ||||||
|  | 	} | ||||||
|  | } | ||||||
|  | 
 | ||||||
| // Attack reads its Targets from the passed Targeter and attacks them at
 | // Attack reads its Targets from the passed Targeter and attacks them at
 | ||||||
| // the rate specified by the Pacer. When the duration is zero the attack
 | // the rate specified by the Pacer. When the duration is zero the attack
 | ||||||
| // runs until Stop is called. Results are sent to the returned channel as soon
 | // runs until Stop is called. Results are sent to the returned channel as soon
 | ||||||
|  | @ -336,11 +367,24 @@ func (a *Attacker) hit(tr Targeter, name string) *Result { | ||||||
| 		return &res | 		return &res | ||||||
| 	} | 	} | ||||||
| 
 | 
 | ||||||
|  | 	res.Method = tgt.Method | ||||||
|  | 	res.URL = tgt.URL | ||||||
|  | 
 | ||||||
| 	req, err := tgt.Request() | 	req, err := tgt.Request() | ||||||
| 	if err != nil { | 	if err != nil { | ||||||
| 		return &res | 		return &res | ||||||
| 	} | 	} | ||||||
| 
 | 
 | ||||||
|  | 	if name != "" { | ||||||
|  | 		req.Header.Set("X-Vegeta-Attack", name) | ||||||
|  | 	} | ||||||
|  | 
 | ||||||
|  | 	req.Header.Set("X-Vegeta-Seq", strconv.FormatUint(res.Seq, 10)) | ||||||
|  | 
 | ||||||
|  | 	if a.chunked { | ||||||
|  | 		req.TransferEncoding = append(req.TransferEncoding, "chunked") | ||||||
|  | 	} | ||||||
|  | 
 | ||||||
| 	r, err := a.client.Do(req) | 	r, err := a.client.Do(req) | ||||||
| 	if err != nil { | 	if err != nil { | ||||||
| 		return &res | 		return &res | ||||||
|  | @ -368,5 +412,7 @@ func (a *Attacker) hit(tr Targeter, name string) *Result { | ||||||
| 		res.Error = r.Status | 		res.Error = r.Status | ||||||
| 	} | 	} | ||||||
| 
 | 
 | ||||||
|  | 	res.Headers = r.Header | ||||||
|  | 
 | ||||||
| 	return &res | 	return &res | ||||||
| } | } | ||||||
|  | @ -0,0 +1,154 @@ | ||||||
|  | // +build gofuzz
 | ||||||
|  | 
 | ||||||
|  | package vegeta | ||||||
|  | 
 | ||||||
|  | import ( | ||||||
|  | 	"encoding/binary" | ||||||
|  | 	"fmt" | ||||||
|  | 	"io/ioutil" | ||||||
|  | 	"net" | ||||||
|  | 	"net/http" | ||||||
|  | 	"os" | ||||||
|  | 	"time" | ||||||
|  | ) | ||||||
|  | 
 | ||||||
|  | // FuzzAttackerTCP fuzzes binary responses to attacker.
 | ||||||
|  | func FuzzAttackerTCP(fuzz []byte) int { | ||||||
|  | 	// Ignore empty fuzz
 | ||||||
|  | 	if len(fuzz) == 0 { | ||||||
|  | 		return -1 | ||||||
|  | 	} | ||||||
|  | 
 | ||||||
|  | 	// Start server
 | ||||||
|  | 	directory, err := ioutil.TempDir("/tmp", "fuzz") | ||||||
|  | 	if err != nil { | ||||||
|  | 		panic(err.Error()) | ||||||
|  | 	} | ||||||
|  | 	socket := fmt.Sprintf("%s/attacker.sock", directory) | ||||||
|  | 	listener, err := net.Listen("unix", socket) | ||||||
|  | 	if err != nil { | ||||||
|  | 		panic(err.Error()) | ||||||
|  | 	} | ||||||
|  | 	go func() { | ||||||
|  | 		connection, err := listener.Accept() | ||||||
|  | 		if err != nil { | ||||||
|  | 			panic(err.Error()) | ||||||
|  | 		} | ||||||
|  | 		_, err = connection.Write(fuzz) | ||||||
|  | 		if err != nil { | ||||||
|  | 			panic(err.Error()) | ||||||
|  | 		} | ||||||
|  | 		err = connection.Close() | ||||||
|  | 		if err != nil { | ||||||
|  | 			panic(err.Error()) | ||||||
|  | 		} | ||||||
|  | 	}() | ||||||
|  | 	defer listener.Close() | ||||||
|  | 	defer os.RemoveAll(directory) | ||||||
|  | 
 | ||||||
|  | 	// Setup targeter
 | ||||||
|  | 	targeter := Targeter(func(target *Target) error { | ||||||
|  | 		target.Method = "GET" | ||||||
|  | 		target.URL = "http://vegeta.test" | ||||||
|  | 		return nil | ||||||
|  | 	}) | ||||||
|  | 
 | ||||||
|  | 	// Deliver a single hit
 | ||||||
|  | 	attacker := NewAttacker( | ||||||
|  | 		UnixSocket(socket), | ||||||
|  | 		Workers(1), | ||||||
|  | 		MaxWorkers(1), | ||||||
|  | 		Timeout(time.Second), | ||||||
|  | 		KeepAlive(false), | ||||||
|  | 	) | ||||||
|  | 	result := attacker.hit(targeter, "fuzz") | ||||||
|  | 	if result.Error != "" { | ||||||
|  | 		return 0 | ||||||
|  | 	} | ||||||
|  | 	return 1 | ||||||
|  | } | ||||||
|  | 
 | ||||||
|  | // FuzzAttackerHTTP fuzzes valid HTTP responses to attacker.
 | ||||||
|  | func FuzzAttackerHTTP(fuzz []byte) int { | ||||||
|  | 	// Decode response
 | ||||||
|  | 	code, headers, body, ok := decodeFuzzResponse(fuzz) | ||||||
|  | 	if !ok { | ||||||
|  | 		return -1 | ||||||
|  | 	} | ||||||
|  | 
 | ||||||
|  | 	// Start server
 | ||||||
|  | 	directory, err := ioutil.TempDir("/tmp", "fuzz") | ||||||
|  | 	if err != nil { | ||||||
|  | 		panic(err.Error()) | ||||||
|  | 	} | ||||||
|  | 	socket := fmt.Sprintf("%s/attacker.sock", directory) | ||||||
|  | 	listener, err := net.Listen("unix", socket) | ||||||
|  | 	if err != nil { | ||||||
|  | 		panic(err.Error()) | ||||||
|  | 	} | ||||||
|  | 	handler := func(response http.ResponseWriter, request *http.Request) { | ||||||
|  | 		for name, values := range headers { | ||||||
|  | 			for _, value := range values { | ||||||
|  | 				response.Header().Add(name, value) | ||||||
|  | 			} | ||||||
|  | 		} | ||||||
|  | 		response.WriteHeader(int(code)) | ||||||
|  | 		_, err := response.Write(body) | ||||||
|  | 		if err != nil { | ||||||
|  | 			panic(err.Error()) | ||||||
|  | 		} | ||||||
|  | 	} | ||||||
|  | 	server := http.Server{ | ||||||
|  | 		Handler: http.HandlerFunc(handler), | ||||||
|  | 	} | ||||||
|  | 	defer server.Close() | ||||||
|  | 	defer listener.Close() | ||||||
|  | 	defer os.RemoveAll(directory) | ||||||
|  | 	go server.Serve(listener) | ||||||
|  | 
 | ||||||
|  | 	// Setup targeter
 | ||||||
|  | 	targeter := Targeter(func(target *Target) error { | ||||||
|  | 		target.Method = "GET" | ||||||
|  | 		target.URL = "http://vegeta.test" | ||||||
|  | 		return nil | ||||||
|  | 	}) | ||||||
|  | 
 | ||||||
|  | 	// Deliver a single hit
 | ||||||
|  | 	attacker := NewAttacker( | ||||||
|  | 		UnixSocket(socket), | ||||||
|  | 		Workers(1), | ||||||
|  | 		MaxWorkers(1), | ||||||
|  | 		Timeout(time.Second), | ||||||
|  | 		KeepAlive(false), | ||||||
|  | 	) | ||||||
|  | 	result := attacker.hit(targeter, "fuzz") | ||||||
|  | 	if result.Error != "" { | ||||||
|  | 		return 0 | ||||||
|  | 	} | ||||||
|  | 	return 1 | ||||||
|  | } | ||||||
|  | 
 | ||||||
|  | func decodeFuzzResponse(fuzz []byte) ( | ||||||
|  | 	code int, | ||||||
|  | 	headers map[string][]string, | ||||||
|  | 	body []byte, | ||||||
|  | 	ok bool, | ||||||
|  | ) { | ||||||
|  | 	if len(fuzz) < 2 { | ||||||
|  | 		return | ||||||
|  | 	} | ||||||
|  | 	headers = make(map[string][]string) | ||||||
|  | 	body = []byte{} | ||||||
|  | 	code = int(binary.LittleEndian.Uint16(fuzz[0:2])) | ||||||
|  | 	if len(fuzz) == 2 { | ||||||
|  | 		ok = true | ||||||
|  | 		return | ||||||
|  | 	} | ||||||
|  | 	fuzz, ok = decodeFuzzHeaders(fuzz[2:], headers) | ||||||
|  | 	if !ok { | ||||||
|  | 		return | ||||||
|  | 	} | ||||||
|  | 	body = fuzz | ||||||
|  | 	ok = true | ||||||
|  | 	return | ||||||
|  | } | ||||||
|  | @ -68,11 +68,15 @@ func (bs *Buckets) UnmarshalText(value []byte) error { | ||||||
| 	if len(value) < 2 || value[0] != '[' || value[len(value)-1] != ']' { | 	if len(value) < 2 || value[0] != '[' || value[len(value)-1] != ']' { | ||||||
| 		return fmt.Errorf("bad buckets: %s", value) | 		return fmt.Errorf("bad buckets: %s", value) | ||||||
| 	} | 	} | ||||||
| 	for _, v := range strings.Split(string(value[1:len(value)-1]), ",") { | 	for i, v := range strings.Split(string(value[1:len(value)-1]), ",") { | ||||||
| 		d, err := time.ParseDuration(strings.TrimSpace(v)) | 		d, err := time.ParseDuration(strings.TrimSpace(v)) | ||||||
| 		if err != nil { | 		if err != nil { | ||||||
| 			return err | 			return err | ||||||
| 		} | 		} | ||||||
|  | 		// add a default range of [0-Buckets[0]) if needed
 | ||||||
|  | 		if i == 0 && d > 0 { | ||||||
|  | 			*bs = append(*bs, 0) | ||||||
|  | 		} | ||||||
| 		*bs = append(*bs, d) | 		*bs = append(*bs, d) | ||||||
| 	} | 	} | ||||||
| 	if len(*bs) == 0 { | 	if len(*bs) == 0 { | ||||||
|  | @ -89,6 +89,11 @@ func (m *Metrics) Add(r *Result) { | ||||||
| // derived summary metrics which don't need to be run on every Add call.
 | // derived summary metrics which don't need to be run on every Add call.
 | ||||||
| func (m *Metrics) Close() { | func (m *Metrics) Close() { | ||||||
| 	m.init() | 	m.init() | ||||||
|  | 
 | ||||||
|  | 	if m.Requests == 0 { | ||||||
|  | 		return | ||||||
|  | 	} | ||||||
|  | 
 | ||||||
| 	m.Rate = float64(m.Requests) | 	m.Rate = float64(m.Requests) | ||||||
| 	m.Throughput = float64(m.success) | 	m.Throughput = float64(m.success) | ||||||
| 	m.Duration = m.Latest.Sub(m.Earliest) | 	m.Duration = m.Latest.Sub(m.Earliest) | ||||||
|  | @ -104,6 +109,7 @@ func (m *Metrics) Close() { | ||||||
| 	m.Success = float64(m.success) / float64(m.Requests) | 	m.Success = float64(m.success) / float64(m.Requests) | ||||||
| 	m.Latencies.Mean = time.Duration(float64(m.Latencies.Total) / float64(m.Requests)) | 	m.Latencies.Mean = time.Duration(float64(m.Latencies.Total) / float64(m.Requests)) | ||||||
| 	m.Latencies.P50 = m.Latencies.Quantile(0.50) | 	m.Latencies.P50 = m.Latencies.Quantile(0.50) | ||||||
|  | 	m.Latencies.P90 = m.Latencies.Quantile(0.90) | ||||||
| 	m.Latencies.P95 = m.Latencies.Quantile(0.95) | 	m.Latencies.P95 = m.Latencies.Quantile(0.95) | ||||||
| 	m.Latencies.P99 = m.Latencies.Quantile(0.99) | 	m.Latencies.P99 = m.Latencies.Quantile(0.99) | ||||||
| } | } | ||||||
|  | @ -130,12 +136,16 @@ type LatencyMetrics struct { | ||||||
| 	Mean time.Duration `json:"mean"` | 	Mean time.Duration `json:"mean"` | ||||||
| 	// P50 is the 50th percentile request latency.
 | 	// P50 is the 50th percentile request latency.
 | ||||||
| 	P50 time.Duration `json:"50th"` | 	P50 time.Duration `json:"50th"` | ||||||
|  | 	// P90 is the 90th percentile request latency.
 | ||||||
|  | 	P90 time.Duration `json:"90th"` | ||||||
| 	// P95 is the 95th percentile request latency.
 | 	// P95 is the 95th percentile request latency.
 | ||||||
| 	P95 time.Duration `json:"95th"` | 	P95 time.Duration `json:"95th"` | ||||||
| 	// P99 is the 99th percentile request latency.
 | 	// P99 is the 99th percentile request latency.
 | ||||||
| 	P99 time.Duration `json:"99th"` | 	P99 time.Duration `json:"99th"` | ||||||
| 	// Max is the maximum observed request latency.
 | 	// Max is the maximum observed request latency.
 | ||||||
| 	Max time.Duration `json:"max"` | 	Max time.Duration `json:"max"` | ||||||
|  | 	// Min is the minimum observed request latency.
 | ||||||
|  | 	Min time.Duration `json:"min"` | ||||||
| 
 | 
 | ||||||
| 	estimator estimator | 	estimator estimator | ||||||
| } | } | ||||||
|  | @ -146,6 +156,9 @@ func (l *LatencyMetrics) Add(latency time.Duration) { | ||||||
| 	if l.Total += latency; latency > l.Max { | 	if l.Total += latency; latency > l.Max { | ||||||
| 		l.Max = latency | 		l.Max = latency | ||||||
| 	} | 	} | ||||||
|  | 	if latency < l.Min || l.Min == 0 { | ||||||
|  | 		l.Min = latency | ||||||
|  | 	} | ||||||
| 	l.estimator.Add(float64(latency)) | 	l.estimator.Add(float64(latency)) | ||||||
| } | } | ||||||
| 
 | 
 | ||||||
|  | @ -6,12 +6,17 @@ import ( | ||||||
| 	"time" | 	"time" | ||||||
| ) | ) | ||||||
| 
 | 
 | ||||||
| // A Pacer defines the rate of hits during an Attack by
 | // A Pacer defines the rate of hits during an Attack.
 | ||||||
| // returning the duration an Attacker should wait until
 |  | ||||||
| // hitting the next Target. If the second return value
 |  | ||||||
| // is true, the attack will terminate.
 |  | ||||||
| type Pacer interface { | type Pacer interface { | ||||||
|  | 	// Pace returns the duration an Attacker should wait until
 | ||||||
|  | 	// hitting the next Target, given an already elapsed duration and
 | ||||||
|  | 	// completed hits. If the second return value is true, an attacker
 | ||||||
|  | 	// should stop sending hits.
 | ||||||
| 	Pace(elapsed time.Duration, hits uint64) (wait time.Duration, stop bool) | 	Pace(elapsed time.Duration, hits uint64) (wait time.Duration, stop bool) | ||||||
|  | 
 | ||||||
|  | 	// Rate returns a Pacer's instantaneous hit rate (per seconds)
 | ||||||
|  | 	// at the given elapsed duration of an attack.
 | ||||||
|  | 	Rate(elapsed time.Duration) float64 | ||||||
| } | } | ||||||
| 
 | 
 | ||||||
| // A PacerFunc is a function adapter type that implements
 | // A PacerFunc is a function adapter type that implements
 | ||||||
|  | @ -65,6 +70,13 @@ func (cp ConstantPacer) Pace(elapsed time.Duration, hits uint64) (time.Duration, | ||||||
| 	return delta - elapsed, false | 	return delta - elapsed, false | ||||||
| } | } | ||||||
| 
 | 
 | ||||||
|  | // Rate returns a ConstantPacer's instantaneous hit rate (i.e. requests per second)
 | ||||||
|  | // at the given elapsed duration of an attack. Since it's constant, the return
 | ||||||
|  | // value is independent of the given elapsed duration.
 | ||||||
|  | func (cp ConstantPacer) Rate(elapsed time.Duration) float64 { | ||||||
|  | 	return cp.hitsPerNs() * 1e9 | ||||||
|  | } | ||||||
|  | 
 | ||||||
| // hitsPerNs returns the attack rate this ConstantPacer represents, in
 | // hitsPerNs returns the attack rate this ConstantPacer represents, in
 | ||||||
| // fractional hits per nanosecond.
 | // fractional hits per nanosecond.
 | ||||||
| func (cp ConstantPacer) hitsPerNs() float64 { | func (cp ConstantPacer) hitsPerNs() float64 { | ||||||
|  | @ -184,6 +196,12 @@ func (sp SinePacer) Pace(elapsedTime time.Duration, elapsedHits uint64) (time.Du | ||||||
| 	return nextHitIn, false | 	return nextHitIn, false | ||||||
| } | } | ||||||
| 
 | 
 | ||||||
|  | // Rate returns a SinePacer's instantaneous hit rate (i.e. requests per second)
 | ||||||
|  | // at the given elapsed duration of an attack.
 | ||||||
|  | func (sp SinePacer) Rate(elapsed time.Duration) float64 { | ||||||
|  | 	return sp.hitsPerNs(elapsed) * 1e9 | ||||||
|  | } | ||||||
|  | 
 | ||||||
| // ampHits returns AP/2𝛑, which is the number of hits added or subtracted
 | // ampHits returns AP/2𝛑, which is the number of hits added or subtracted
 | ||||||
| // from the Mean due to the Amplitude over a quarter of the Period,
 | // from the Mean due to the Amplitude over a quarter of the Period,
 | ||||||
| // i.e. from 0 → 𝛑/2 radians
 | // i.e. from 0 → 𝛑/2 radians
 | ||||||
|  | @ -240,7 +258,7 @@ func (p LinearPacer) Pace(elapsed time.Duration, hits uint64) (time.Duration, bo | ||||||
| 		return 0, false | 		return 0, false | ||||||
| 	} | 	} | ||||||
| 
 | 
 | ||||||
| 	rate := p.rate(elapsed) | 	rate := p.Rate(elapsed) | ||||||
| 	interval := math.Round(1e9 / rate) | 	interval := math.Round(1e9 / rate) | ||||||
| 
 | 
 | ||||||
| 	if n := uint64(interval); n != 0 && math.MaxInt64/n < hits { | 	if n := uint64(interval); n != 0 && math.MaxInt64/n < hits { | ||||||
|  | @ -254,6 +272,15 @@ func (p LinearPacer) Pace(elapsed time.Duration, hits uint64) (time.Duration, bo | ||||||
| 	return wait, false | 	return wait, false | ||||||
| } | } | ||||||
| 
 | 
 | ||||||
|  | // Rate returns a LinearPacer's instantaneous hit rate (i.e. requests per second)
 | ||||||
|  | // at the given elapsed duration of an attack.
 | ||||||
|  | func (p LinearPacer) Rate(elapsed time.Duration) float64 { | ||||||
|  | 	a := p.Slope | ||||||
|  | 	x := elapsed.Seconds() | ||||||
|  | 	b := p.StartAt.hitsPerNs() * 1e9 | ||||||
|  | 	return a*x + b | ||||||
|  | } | ||||||
|  | 
 | ||||||
| // hits returns the number of hits that have been sent during an attack
 | // hits returns the number of hits that have been sent during an attack
 | ||||||
| // lasting t nanoseconds. It returns a float so we can tell exactly how
 | // lasting t nanoseconds. It returns a float so we can tell exactly how
 | ||||||
| // much we've missed our target by when solving numerically in Pace.
 | // much we've missed our target by when solving numerically in Pace.
 | ||||||
|  | @ -268,12 +295,3 @@ func (p LinearPacer) hits(t time.Duration) float64 { | ||||||
| 
 | 
 | ||||||
| 	return (a*math.Pow(x, 2))/2 + b*x | 	return (a*math.Pow(x, 2))/2 + b*x | ||||||
| } | } | ||||||
| 
 |  | ||||||
| // rate calculates the instantaneous rate of attack at
 |  | ||||||
| // t nanoseconds after the attack began.
 |  | ||||||
| func (p LinearPacer) rate(t time.Duration) float64 { |  | ||||||
| 	a := p.Slope |  | ||||||
| 	x := t.Seconds() |  | ||||||
| 	b := p.StartAt.hitsPerNs() * 1e9 |  | ||||||
| 	return a*x + b |  | ||||||
| } |  | ||||||
|  | @ -57,7 +57,7 @@ func NewHistogramReporter(h *Histogram) Reporter { | ||||||
| func NewTextReporter(m *Metrics) Reporter { | func NewTextReporter(m *Metrics) Reporter { | ||||||
| 	const fmtstr = "Requests\t[total, rate, throughput]\t%d, %.2f, %.2f\n" + | 	const fmtstr = "Requests\t[total, rate, throughput]\t%d, %.2f, %.2f\n" + | ||||||
| 		"Duration\t[total, attack, wait]\t%s, %s, %s\n" + | 		"Duration\t[total, attack, wait]\t%s, %s, %s\n" + | ||||||
| 		"Latencies\t[mean, 50, 95, 99, max]\t%s, %s, %s, %s, %s\n" + | 		"Latencies\t[min, mean, 50, 90, 95, 99, max]\t%s, %s, %s, %s, %s, %s, %s\n" + | ||||||
| 		"Bytes In\t[total, mean]\t%d, %.2f\n" + | 		"Bytes In\t[total, mean]\t%d, %.2f\n" + | ||||||
| 		"Bytes Out\t[total, mean]\t%d, %.2f\n" + | 		"Bytes Out\t[total, mean]\t%d, %.2f\n" + | ||||||
| 		"Success\t[ratio]\t%.2f%%\n" + | 		"Success\t[ratio]\t%.2f%%\n" + | ||||||
|  | @ -67,8 +67,16 @@ func NewTextReporter(m *Metrics) Reporter { | ||||||
| 		tw := tabwriter.NewWriter(w, 0, 8, 2, ' ', tabwriter.StripEscape) | 		tw := tabwriter.NewWriter(w, 0, 8, 2, ' ', tabwriter.StripEscape) | ||||||
| 		if _, err = fmt.Fprintf(tw, fmtstr, | 		if _, err = fmt.Fprintf(tw, fmtstr, | ||||||
| 			m.Requests, m.Rate, m.Throughput, | 			m.Requests, m.Rate, m.Throughput, | ||||||
| 			m.Duration+m.Wait, m.Duration, m.Wait, | 			round(m.Duration+m.Wait), | ||||||
| 			m.Latencies.Mean, m.Latencies.P50, m.Latencies.P95, m.Latencies.P99, m.Latencies.Max, | 			round(m.Duration), | ||||||
|  | 			round(m.Wait), | ||||||
|  | 			round(m.Latencies.Min), | ||||||
|  | 			round(m.Latencies.Mean), | ||||||
|  | 			round(m.Latencies.P50), | ||||||
|  | 			round(m.Latencies.P90), | ||||||
|  | 			round(m.Latencies.P95), | ||||||
|  | 			round(m.Latencies.P99), | ||||||
|  | 			round(m.Latencies.Max), | ||||||
| 			m.BytesIn.Total, m.BytesIn.Mean, | 			m.BytesIn.Total, m.BytesIn.Mean, | ||||||
| 			m.BytesOut.Total, m.BytesOut.Mean, | 			m.BytesOut.Total, m.BytesOut.Mean, | ||||||
| 			m.Success*100, | 			m.Success*100, | ||||||
|  | @ -104,6 +112,25 @@ func NewTextReporter(m *Metrics) Reporter { | ||||||
| 	} | 	} | ||||||
| } | } | ||||||
| 
 | 
 | ||||||
|  | var durations = [...]time.Duration{ | ||||||
|  | 	time.Hour, | ||||||
|  | 	time.Minute, | ||||||
|  | 	time.Second, | ||||||
|  | 	time.Millisecond, | ||||||
|  | 	time.Microsecond, | ||||||
|  | 	time.Nanosecond, | ||||||
|  | } | ||||||
|  | 
 | ||||||
|  | // round to the next most precise unit
 | ||||||
|  | func round(d time.Duration) time.Duration { | ||||||
|  | 	for i, unit := range durations { | ||||||
|  | 		if d >= unit && i < len(durations)-1 { | ||||||
|  | 			return d.Round(durations[i+1]) | ||||||
|  | 		} | ||||||
|  | 	} | ||||||
|  | 	return d | ||||||
|  | } | ||||||
|  | 
 | ||||||
| // NewJSONReporter returns a Reporter that writes out Metrics as JSON.
 | // NewJSONReporter returns a Reporter that writes out Metrics as JSON.
 | ||||||
| func NewJSONReporter(m *Metrics) Reporter { | func NewJSONReporter(m *Metrics) Reporter { | ||||||
| 	return func(w io.Writer) error { | 	return func(w io.Writer) error { | ||||||
|  | @ -7,12 +7,15 @@ import ( | ||||||
| 	"encoding/csv" | 	"encoding/csv" | ||||||
| 	"encoding/gob" | 	"encoding/gob" | ||||||
| 	"io" | 	"io" | ||||||
|  | 	"net/http" | ||||||
|  | 	"net/textproto" | ||||||
| 	"sort" | 	"sort" | ||||||
| 	"strconv" | 	"strconv" | ||||||
|  | 	"strings" | ||||||
| 	"time" | 	"time" | ||||||
| 
 | 
 | ||||||
| 	"github.com/mailru/easyjson/jlexer" | 	"github.com/mailru/easyjson/jlexer" | ||||||
| 	jwriter "github.com/mailru/easyjson/jwriter" | 	"github.com/mailru/easyjson/jwriter" | ||||||
| ) | ) | ||||||
| 
 | 
 | ||||||
| func init() { | func init() { | ||||||
|  | @ -30,6 +33,9 @@ type Result struct { | ||||||
| 	BytesIn   uint64        `json:"bytes_in"` | 	BytesIn   uint64        `json:"bytes_in"` | ||||||
| 	Error     string        `json:"error"` | 	Error     string        `json:"error"` | ||||||
| 	Body      []byte        `json:"body"` | 	Body      []byte        `json:"body"` | ||||||
|  | 	Method    string        `json:"method"` | ||||||
|  | 	URL       string        `json:"url"` | ||||||
|  | 	Headers   http.Header   `json:"headers"` | ||||||
| } | } | ||||||
| 
 | 
 | ||||||
| // End returns the time at which a Result ended.
 | // End returns the time at which a Result ended.
 | ||||||
|  | @ -45,7 +51,32 @@ func (r Result) Equal(other Result) bool { | ||||||
| 		r.BytesIn == other.BytesIn && | 		r.BytesIn == other.BytesIn && | ||||||
| 		r.BytesOut == other.BytesOut && | 		r.BytesOut == other.BytesOut && | ||||||
| 		r.Error == other.Error && | 		r.Error == other.Error && | ||||||
| 		bytes.Equal(r.Body, other.Body) | 		bytes.Equal(r.Body, other.Body) && | ||||||
|  | 		r.Method == other.Method && | ||||||
|  | 		r.URL == other.URL && | ||||||
|  | 		headerEqual(r.Headers, other.Headers) | ||||||
|  | } | ||||||
|  | 
 | ||||||
|  | func headerEqual(h1, h2 http.Header) bool { | ||||||
|  | 	if len(h1) != len(h2) { | ||||||
|  | 		return false | ||||||
|  | 	} | ||||||
|  | 	if h1 == nil || h2 == nil { | ||||||
|  | 		return h1 == nil && h2 == nil | ||||||
|  | 	} | ||||||
|  | 	for key, values1 := range h1 { | ||||||
|  | 		values2 := h2[key] | ||||||
|  | 		if len(values1) != len(values2) { | ||||||
|  | 			return false | ||||||
|  | 		} | ||||||
|  | 		for i := range values1 { | ||||||
|  | 			if values1[i] != values2[i] { | ||||||
|  | 				return false | ||||||
|  | 			} | ||||||
|  | 		} | ||||||
|  | 	} | ||||||
|  | 
 | ||||||
|  | 	return true | ||||||
| } | } | ||||||
| 
 | 
 | ||||||
| // Results is a slice of Result type elements.
 | // Results is a slice of Result type elements.
 | ||||||
|  | @ -150,8 +181,10 @@ func NewCSVEncoder(w io.Writer) Encoder { | ||||||
| 			base64.StdEncoding.EncodeToString(r.Body), | 			base64.StdEncoding.EncodeToString(r.Body), | ||||||
| 			r.Attack, | 			r.Attack, | ||||||
| 			strconv.FormatUint(r.Seq, 10), | 			strconv.FormatUint(r.Seq, 10), | ||||||
|  | 			r.Method, | ||||||
|  | 			r.URL, | ||||||
|  | 			base64.StdEncoding.EncodeToString(headerBytes(r.Headers)), | ||||||
| 		}) | 		}) | ||||||
| 
 |  | ||||||
| 		if err != nil { | 		if err != nil { | ||||||
| 			return err | 			return err | ||||||
| 		} | 		} | ||||||
|  | @ -162,10 +195,19 @@ func NewCSVEncoder(w io.Writer) Encoder { | ||||||
| 	} | 	} | ||||||
| } | } | ||||||
| 
 | 
 | ||||||
|  | func headerBytes(h http.Header) []byte { | ||||||
|  | 	if h == nil { | ||||||
|  | 		return nil | ||||||
|  | 	} | ||||||
|  | 	var hdr bytes.Buffer | ||||||
|  | 	_ = h.Write(&hdr) | ||||||
|  | 	return append(hdr.Bytes(), '\r', '\n') | ||||||
|  | } | ||||||
|  | 
 | ||||||
| // NewCSVDecoder returns a Decoder that decodes CSV encoded Results.
 | // NewCSVDecoder returns a Decoder that decodes CSV encoded Results.
 | ||||||
| func NewCSVDecoder(rd io.Reader) Decoder { | func NewCSVDecoder(r io.Reader) Decoder { | ||||||
| 	dec := csv.NewReader(rd) | 	dec := csv.NewReader(r) | ||||||
| 	dec.FieldsPerRecord = 9 | 	dec.FieldsPerRecord = 12 | ||||||
| 	dec.TrimLeadingSpace = true | 	dec.TrimLeadingSpace = true | ||||||
| 
 | 
 | ||||||
| 	return func(r *Result) error { | 	return func(r *Result) error { | ||||||
|  | @ -201,23 +243,42 @@ func NewCSVDecoder(rd io.Reader) Decoder { | ||||||
| 		} | 		} | ||||||
| 
 | 
 | ||||||
| 		r.Error = rec[5] | 		r.Error = rec[5] | ||||||
| 		r.Body, err = base64.StdEncoding.DecodeString(rec[6]) | 		if r.Body, err = base64.StdEncoding.DecodeString(rec[6]); err != nil { | ||||||
|  | 			return err | ||||||
|  | 		} | ||||||
| 
 | 
 | ||||||
| 		r.Attack = rec[7] | 		r.Attack = rec[7] | ||||||
| 		if r.Seq, err = strconv.ParseUint(rec[8], 10, 64); err != nil { | 		if r.Seq, err = strconv.ParseUint(rec[8], 10, 64); err != nil { | ||||||
| 			return err | 			return err | ||||||
| 		} | 		} | ||||||
| 
 | 
 | ||||||
|  | 		r.Method = rec[9] | ||||||
|  | 		r.URL = rec[10] | ||||||
|  | 
 | ||||||
|  | 		if rec[11] != "" { | ||||||
|  | 			pr := textproto.NewReader(bufio.NewReader( | ||||||
|  | 				base64.NewDecoder(base64.StdEncoding, strings.NewReader(rec[11])))) | ||||||
|  | 			hdr, err := pr.ReadMIMEHeader() | ||||||
|  | 			if err != nil { | ||||||
|  | 				return err | ||||||
|  | 			} | ||||||
|  | 			r.Headers = http.Header(hdr) | ||||||
|  | 		} | ||||||
|  | 
 | ||||||
| 		return err | 		return err | ||||||
| 	} | 	} | ||||||
| } | } | ||||||
| 
 | 
 | ||||||
|  | //go:generate easyjson -no_std_marshalers -output_filename results_easyjson.go results.go
 | ||||||
|  | //easyjson:json
 | ||||||
|  | type jsonResult Result | ||||||
|  | 
 | ||||||
| // NewJSONEncoder returns an Encoder that dumps the given *Results as a JSON
 | // NewJSONEncoder returns an Encoder that dumps the given *Results as a JSON
 | ||||||
| // object.
 | // object.
 | ||||||
| func NewJSONEncoder(w io.Writer) Encoder { | func NewJSONEncoder(w io.Writer) Encoder { | ||||||
| 	var jw jwriter.Writer | 	var jw jwriter.Writer | ||||||
| 	return func(r *Result) error { | 	return func(r *Result) error { | ||||||
| 		(*jsonResult)(r).encode(&jw) | 		(*jsonResult)(r).MarshalEasyJSON(&jw) | ||||||
| 		if jw.Error != nil { | 		if jw.Error != nil { | ||||||
| 			return jw.Error | 			return jw.Error | ||||||
| 		} | 		} | ||||||
|  | @ -232,10 +293,10 @@ func NewJSONDecoder(r io.Reader) Decoder { | ||||||
| 	rd := bufio.NewReader(r) | 	rd := bufio.NewReader(r) | ||||||
| 	return func(r *Result) (err error) { | 	return func(r *Result) (err error) { | ||||||
| 		var jl jlexer.Lexer | 		var jl jlexer.Lexer | ||||||
| 		if jl.Data, err = rd.ReadSlice('\n'); err != nil { | 		if jl.Data, err = rd.ReadBytes('\n'); err != nil { | ||||||
| 			return err | 			return err | ||||||
| 		} | 		} | ||||||
| 		(*jsonResult)(r).decode(&jl) | 		(*jsonResult)(r).UnmarshalEasyJSON(&jl) | ||||||
| 		return jl.Error() | 		return jl.Error() | ||||||
| 	} | 	} | ||||||
| } | } | ||||||
|  | @ -0,0 +1,223 @@ | ||||||
|  | // Code generated by easyjson for marshaling/unmarshaling. DO NOT EDIT.
 | ||||||
|  | 
 | ||||||
|  | package vegeta | ||||||
|  | 
 | ||||||
|  | import ( | ||||||
|  | 	json "encoding/json" | ||||||
|  | 	easyjson "github.com/mailru/easyjson" | ||||||
|  | 	jlexer "github.com/mailru/easyjson/jlexer" | ||||||
|  | 	jwriter "github.com/mailru/easyjson/jwriter" | ||||||
|  | 	http "net/http" | ||||||
|  | 	time "time" | ||||||
|  | ) | ||||||
|  | 
 | ||||||
|  | // suppress unused package warning
 | ||||||
|  | var ( | ||||||
|  | 	_ *json.RawMessage | ||||||
|  | 	_ *jlexer.Lexer | ||||||
|  | 	_ *jwriter.Writer | ||||||
|  | 	_ easyjson.Marshaler | ||||||
|  | ) | ||||||
|  | 
 | ||||||
|  | func easyjsonBd1621b8DecodeGithubComTsenartVegetaV12Lib(in *jlexer.Lexer, out *jsonResult) { | ||||||
|  | 	isTopLevel := in.IsStart() | ||||||
|  | 	if in.IsNull() { | ||||||
|  | 		if isTopLevel { | ||||||
|  | 			in.Consumed() | ||||||
|  | 		} | ||||||
|  | 		in.Skip() | ||||||
|  | 		return | ||||||
|  | 	} | ||||||
|  | 	in.Delim('{') | ||||||
|  | 	for !in.IsDelim('}') { | ||||||
|  | 		key := in.UnsafeString() | ||||||
|  | 		in.WantColon() | ||||||
|  | 		if in.IsNull() { | ||||||
|  | 			in.Skip() | ||||||
|  | 			in.WantComma() | ||||||
|  | 			continue | ||||||
|  | 		} | ||||||
|  | 		switch key { | ||||||
|  | 		case "attack": | ||||||
|  | 			out.Attack = string(in.String()) | ||||||
|  | 		case "seq": | ||||||
|  | 			out.Seq = uint64(in.Uint64()) | ||||||
|  | 		case "code": | ||||||
|  | 			out.Code = uint16(in.Uint16()) | ||||||
|  | 		case "timestamp": | ||||||
|  | 			if data := in.Raw(); in.Ok() { | ||||||
|  | 				in.AddError((out.Timestamp).UnmarshalJSON(data)) | ||||||
|  | 			} | ||||||
|  | 		case "latency": | ||||||
|  | 			out.Latency = time.Duration(in.Int64()) | ||||||
|  | 		case "bytes_out": | ||||||
|  | 			out.BytesOut = uint64(in.Uint64()) | ||||||
|  | 		case "bytes_in": | ||||||
|  | 			out.BytesIn = uint64(in.Uint64()) | ||||||
|  | 		case "error": | ||||||
|  | 			out.Error = string(in.String()) | ||||||
|  | 		case "body": | ||||||
|  | 			if in.IsNull() { | ||||||
|  | 				in.Skip() | ||||||
|  | 				out.Body = nil | ||||||
|  | 			} else { | ||||||
|  | 				out.Body = in.Bytes() | ||||||
|  | 			} | ||||||
|  | 		case "method": | ||||||
|  | 			out.Method = string(in.String()) | ||||||
|  | 		case "url": | ||||||
|  | 			out.URL = string(in.String()) | ||||||
|  | 		case "headers": | ||||||
|  | 			if in.IsNull() { | ||||||
|  | 				in.Skip() | ||||||
|  | 			} else { | ||||||
|  | 				in.Delim('{') | ||||||
|  | 				if !in.IsDelim('}') { | ||||||
|  | 					out.Headers = make(http.Header) | ||||||
|  | 				} else { | ||||||
|  | 					out.Headers = nil | ||||||
|  | 				} | ||||||
|  | 				for !in.IsDelim('}') { | ||||||
|  | 					key := string(in.String()) | ||||||
|  | 					in.WantColon() | ||||||
|  | 					var v2 []string | ||||||
|  | 					if in.IsNull() { | ||||||
|  | 						in.Skip() | ||||||
|  | 						v2 = nil | ||||||
|  | 					} else { | ||||||
|  | 						in.Delim('[') | ||||||
|  | 						if v2 == nil { | ||||||
|  | 							if !in.IsDelim(']') { | ||||||
|  | 								v2 = make([]string, 0, 4) | ||||||
|  | 							} else { | ||||||
|  | 								v2 = []string{} | ||||||
|  | 							} | ||||||
|  | 						} else { | ||||||
|  | 							v2 = (v2)[:0] | ||||||
|  | 						} | ||||||
|  | 						for !in.IsDelim(']') { | ||||||
|  | 							var v3 string | ||||||
|  | 							v3 = string(in.String()) | ||||||
|  | 							v2 = append(v2, v3) | ||||||
|  | 							in.WantComma() | ||||||
|  | 						} | ||||||
|  | 						in.Delim(']') | ||||||
|  | 					} | ||||||
|  | 					(out.Headers)[key] = v2 | ||||||
|  | 					in.WantComma() | ||||||
|  | 				} | ||||||
|  | 				in.Delim('}') | ||||||
|  | 			} | ||||||
|  | 		default: | ||||||
|  | 			in.SkipRecursive() | ||||||
|  | 		} | ||||||
|  | 		in.WantComma() | ||||||
|  | 	} | ||||||
|  | 	in.Delim('}') | ||||||
|  | 	if isTopLevel { | ||||||
|  | 		in.Consumed() | ||||||
|  | 	} | ||||||
|  | } | ||||||
|  | func easyjsonBd1621b8EncodeGithubComTsenartVegetaV12Lib(out *jwriter.Writer, in jsonResult) { | ||||||
|  | 	out.RawByte('{') | ||||||
|  | 	first := true | ||||||
|  | 	_ = first | ||||||
|  | 	{ | ||||||
|  | 		const prefix string = ",\"attack\":" | ||||||
|  | 		out.RawString(prefix[1:]) | ||||||
|  | 		out.String(string(in.Attack)) | ||||||
|  | 	} | ||||||
|  | 	{ | ||||||
|  | 		const prefix string = ",\"seq\":" | ||||||
|  | 		out.RawString(prefix) | ||||||
|  | 		out.Uint64(uint64(in.Seq)) | ||||||
|  | 	} | ||||||
|  | 	{ | ||||||
|  | 		const prefix string = ",\"code\":" | ||||||
|  | 		out.RawString(prefix) | ||||||
|  | 		out.Uint16(uint16(in.Code)) | ||||||
|  | 	} | ||||||
|  | 	{ | ||||||
|  | 		const prefix string = ",\"timestamp\":" | ||||||
|  | 		out.RawString(prefix) | ||||||
|  | 		out.Raw((in.Timestamp).MarshalJSON()) | ||||||
|  | 	} | ||||||
|  | 	{ | ||||||
|  | 		const prefix string = ",\"latency\":" | ||||||
|  | 		out.RawString(prefix) | ||||||
|  | 		out.Int64(int64(in.Latency)) | ||||||
|  | 	} | ||||||
|  | 	{ | ||||||
|  | 		const prefix string = ",\"bytes_out\":" | ||||||
|  | 		out.RawString(prefix) | ||||||
|  | 		out.Uint64(uint64(in.BytesOut)) | ||||||
|  | 	} | ||||||
|  | 	{ | ||||||
|  | 		const prefix string = ",\"bytes_in\":" | ||||||
|  | 		out.RawString(prefix) | ||||||
|  | 		out.Uint64(uint64(in.BytesIn)) | ||||||
|  | 	} | ||||||
|  | 	{ | ||||||
|  | 		const prefix string = ",\"error\":" | ||||||
|  | 		out.RawString(prefix) | ||||||
|  | 		out.String(string(in.Error)) | ||||||
|  | 	} | ||||||
|  | 	{ | ||||||
|  | 		const prefix string = ",\"body\":" | ||||||
|  | 		out.RawString(prefix) | ||||||
|  | 		out.Base64Bytes(in.Body) | ||||||
|  | 	} | ||||||
|  | 	{ | ||||||
|  | 		const prefix string = ",\"method\":" | ||||||
|  | 		out.RawString(prefix) | ||||||
|  | 		out.String(string(in.Method)) | ||||||
|  | 	} | ||||||
|  | 	{ | ||||||
|  | 		const prefix string = ",\"url\":" | ||||||
|  | 		out.RawString(prefix) | ||||||
|  | 		out.String(string(in.URL)) | ||||||
|  | 	} | ||||||
|  | 	{ | ||||||
|  | 		const prefix string = ",\"headers\":" | ||||||
|  | 		out.RawString(prefix) | ||||||
|  | 		if in.Headers == nil && (out.Flags&jwriter.NilMapAsEmpty) == 0 { | ||||||
|  | 			out.RawString(`null`) | ||||||
|  | 		} else { | ||||||
|  | 			out.RawByte('{') | ||||||
|  | 			v6First := true | ||||||
|  | 			for v6Name, v6Value := range in.Headers { | ||||||
|  | 				if v6First { | ||||||
|  | 					v6First = false | ||||||
|  | 				} else { | ||||||
|  | 					out.RawByte(',') | ||||||
|  | 				} | ||||||
|  | 				out.String(string(v6Name)) | ||||||
|  | 				out.RawByte(':') | ||||||
|  | 				if v6Value == nil && (out.Flags&jwriter.NilSliceAsEmpty) == 0 { | ||||||
|  | 					out.RawString("null") | ||||||
|  | 				} else { | ||||||
|  | 					out.RawByte('[') | ||||||
|  | 					for v7, v8 := range v6Value { | ||||||
|  | 						if v7 > 0 { | ||||||
|  | 							out.RawByte(',') | ||||||
|  | 						} | ||||||
|  | 						out.String(string(v8)) | ||||||
|  | 					} | ||||||
|  | 					out.RawByte(']') | ||||||
|  | 				} | ||||||
|  | 			} | ||||||
|  | 			out.RawByte('}') | ||||||
|  | 		} | ||||||
|  | 	} | ||||||
|  | 	out.RawByte('}') | ||||||
|  | } | ||||||
|  | 
 | ||||||
|  | // MarshalEasyJSON supports easyjson.Marshaler interface
 | ||||||
|  | func (v jsonResult) MarshalEasyJSON(w *jwriter.Writer) { | ||||||
|  | 	easyjsonBd1621b8EncodeGithubComTsenartVegetaV12Lib(w, v) | ||||||
|  | } | ||||||
|  | 
 | ||||||
|  | // UnmarshalEasyJSON supports easyjson.Unmarshaler interface
 | ||||||
|  | func (v *jsonResult) UnmarshalEasyJSON(l *jlexer.Lexer) { | ||||||
|  | 	easyjsonBd1621b8DecodeGithubComTsenartVegetaV12Lib(l, v) | ||||||
|  | } | ||||||
|  | @ -0,0 +1,63 @@ | ||||||
|  | // +build gofuzz
 | ||||||
|  | 
 | ||||||
|  | package vegeta | ||||||
|  | 
 | ||||||
|  | import ( | ||||||
|  | 	"bytes" | ||||||
|  | 	"io" | ||||||
|  | ) | ||||||
|  | 
 | ||||||
|  | // FuzzResultsFormatDetection tests result list format detection.
 | ||||||
|  | func FuzzResultsFormatDetection(fuzz []byte) int { | ||||||
|  | 	decoder := DecoderFor(bytes.NewReader(fuzz)) | ||||||
|  | 	if decoder == nil { | ||||||
|  | 		return 0 | ||||||
|  | 	} | ||||||
|  | 	ok := readAllResults(decoder) | ||||||
|  | 	if !ok { | ||||||
|  | 		return 0 | ||||||
|  | 	} | ||||||
|  | 	return 1 | ||||||
|  | } | ||||||
|  | 
 | ||||||
|  | // FuzzGobDecoder tests decoding a gob format result list.
 | ||||||
|  | func FuzzGobDecoder(fuzz []byte) int { | ||||||
|  | 	decoder := NewDecoder(bytes.NewReader(fuzz)) | ||||||
|  | 	ok := readAllResults(decoder) | ||||||
|  | 	if !ok { | ||||||
|  | 		return 0 | ||||||
|  | 	} | ||||||
|  | 	return 1 | ||||||
|  | } | ||||||
|  | 
 | ||||||
|  | // FuzzCSVDecoder tests decoding a CSV format result list.
 | ||||||
|  | func FuzzCSVDecoder(fuzz []byte) int { | ||||||
|  | 	decoder := NewCSVDecoder(bytes.NewReader(fuzz)) | ||||||
|  | 	ok := readAllResults(decoder) | ||||||
|  | 	if !ok { | ||||||
|  | 		return 0 | ||||||
|  | 	} | ||||||
|  | 	return 1 | ||||||
|  | } | ||||||
|  | 
 | ||||||
|  | // FuzzJSONDecoder tests decoding a JSON format result list.
 | ||||||
|  | func FuzzJSONDecoder(fuzz []byte) int { | ||||||
|  | 	decoder := NewJSONDecoder(bytes.NewReader(fuzz)) | ||||||
|  | 	ok := readAllResults(decoder) | ||||||
|  | 	if !ok { | ||||||
|  | 		return 0 | ||||||
|  | 	} | ||||||
|  | 	return 1 | ||||||
|  | } | ||||||
|  | 
 | ||||||
|  | func readAllResults(decoder Decoder) (ok bool) { | ||||||
|  | 	for { | ||||||
|  | 		result := &Result{} | ||||||
|  | 		err := decoder.Decode(result) | ||||||
|  | 		if err == io.EOF { | ||||||
|  | 			return true | ||||||
|  | 		} else if err != nil { | ||||||
|  | 			return false | ||||||
|  | 		} | ||||||
|  | 	} | ||||||
|  | } | ||||||
|  | @ -270,7 +270,7 @@ func NewHTTPTargeter(src io.Reader, body []byte, hdr http.Header) Targeter { | ||||||
| 			} | 			} | ||||||
| 			line = strings.TrimSpace(sc.Text()) | 			line = strings.TrimSpace(sc.Text()) | ||||||
| 
 | 
 | ||||||
| 			if len(line) != 0 && line[0] != '#'{ | 			if len(line) != 0 && line[0] != '#' { | ||||||
| 				break | 				break | ||||||
| 			} | 			} | ||||||
| 		} | 		} | ||||||
|  | @ -300,6 +300,8 @@ func NewHTTPTargeter(src io.Reader, body []byte, hdr http.Header) Targeter { | ||||||
| 		for sc.Scan() { | 		for sc.Scan() { | ||||||
| 			if line = strings.TrimSpace(sc.Text()); line == "" { | 			if line = strings.TrimSpace(sc.Text()); line == "" { | ||||||
| 				break | 				break | ||||||
|  | 			} else if strings.HasPrefix(line, "#") { | ||||||
|  | 				continue | ||||||
| 			} else if strings.HasPrefix(line, "@") { | 			} else if strings.HasPrefix(line, "@") { | ||||||
| 				if tgt.Body, err = ioutil.ReadFile(line[1:]); err != nil { | 				if tgt.Body, err = ioutil.ReadFile(line[1:]); err != nil { | ||||||
| 					return fmt.Errorf("bad body: %s", err) | 					return fmt.Errorf("bad body: %s", err) | ||||||
|  | @ -327,7 +329,7 @@ func NewHTTPTargeter(src io.Reader, body []byte, hdr http.Header) Targeter { | ||||||
| 	} | 	} | ||||||
| } | } | ||||||
| 
 | 
 | ||||||
| var httpMethodChecker = regexp.MustCompile("^[A-Z]+\\s") | var httpMethodChecker = regexp.MustCompile(`^[A-Z]+\s`) | ||||||
| 
 | 
 | ||||||
| // A line starts with an http method when the first word is uppercase ascii
 | // A line starts with an http method when the first word is uppercase ascii
 | ||||||
| // followed by a space.
 | // followed by a space.
 | ||||||
|  | @ -0,0 +1,68 @@ | ||||||
|  | // +build gofuzz
 | ||||||
|  | 
 | ||||||
|  | package vegeta | ||||||
|  | 
 | ||||||
|  | import ( | ||||||
|  | 	"bytes" | ||||||
|  | 	"net/http" | ||||||
|  | ) | ||||||
|  | 
 | ||||||
|  | // FuzzHTTPTargeter tests decoding an HTTP encoded target list.
 | ||||||
|  | func FuzzHTTPTargeter(fuzz []byte) int { | ||||||
|  | 	headers, body, fuzz, ok := decodeFuzzTargetDefaults(fuzz) | ||||||
|  | 	if !ok { | ||||||
|  | 		return -1 | ||||||
|  | 	} | ||||||
|  | 	targeter := NewHTTPTargeter( | ||||||
|  | 		bytes.NewReader(fuzz), | ||||||
|  | 		body, | ||||||
|  | 		headers, | ||||||
|  | 	) | ||||||
|  | 	_, err := ReadAllTargets(targeter) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return 0 | ||||||
|  | 	} | ||||||
|  | 	return 1 | ||||||
|  | } | ||||||
|  | 
 | ||||||
|  | // FuzzJSONTargeter tests decoding a JSON encoded target list.
 | ||||||
|  | func FuzzJSONTargeter(fuzz []byte) int { | ||||||
|  | 	headers, body, fuzz, ok := decodeFuzzTargetDefaults(fuzz) | ||||||
|  | 	if !ok { | ||||||
|  | 		return -1 | ||||||
|  | 	} | ||||||
|  | 	targeter := NewJSONTargeter( | ||||||
|  | 		bytes.NewReader(fuzz), | ||||||
|  | 		body, | ||||||
|  | 		headers, | ||||||
|  | 	) | ||||||
|  | 	_, err := ReadAllTargets(targeter) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return 0 | ||||||
|  | 	} | ||||||
|  | 	return 1 | ||||||
|  | } | ||||||
|  | 
 | ||||||
|  | func decodeFuzzTargetDefaults(fuzz []byte) ( | ||||||
|  | 	headers http.Header, | ||||||
|  | 	body []byte, | ||||||
|  | 	rest []byte, | ||||||
|  | 	ok bool, | ||||||
|  | ) { | ||||||
|  | 	if len(fuzz) < 2 { | ||||||
|  | 		return | ||||||
|  | 	} | ||||||
|  | 	headers = make(map[string][]string) | ||||||
|  | 	body = []byte{} | ||||||
|  | 	rest = []byte{} | ||||||
|  | 	rest, ok = decodeFuzzHeaders(fuzz, headers) | ||||||
|  | 	if !ok { | ||||||
|  | 		return | ||||||
|  | 	} | ||||||
|  | 	if len(rest) == 0 { | ||||||
|  | 		ok = true | ||||||
|  | 		return | ||||||
|  | 	} | ||||||
|  | 	body, rest, ok = extractFuzzByteString(rest) | ||||||
|  | 	return | ||||||
|  | } | ||||||
|  | @ -0,0 +1,120 @@ | ||||||
|  | // +build gofuzz
 | ||||||
|  | 
 | ||||||
|  | package vegeta | ||||||
|  | 
 | ||||||
|  | func decodeFuzzHeaders(fuzz []byte, headers map[string][]string) ( | ||||||
|  | 	rest []byte, | ||||||
|  | 	ok bool, | ||||||
|  | ) { | ||||||
|  | 	rest = fuzz | ||||||
|  | 	for { | ||||||
|  | 		if len(rest) == 0 { | ||||||
|  | 			// Consumed all fuzz
 | ||||||
|  | 			ok = true | ||||||
|  | 			return | ||||||
|  | 		} | ||||||
|  | 		if fuzz[0] == 0 { | ||||||
|  | 			// Headers terminated
 | ||||||
|  | 			if len(rest) == 1 { | ||||||
|  | 				rest = []byte{} | ||||||
|  | 			} else { | ||||||
|  | 				rest = rest[1:] | ||||||
|  | 			} | ||||||
|  | 			ok = true | ||||||
|  | 			return | ||||||
|  | 		} | ||||||
|  | 		if len(fuzz) == 1 { | ||||||
|  | 			// Invalid headers encoding
 | ||||||
|  | 			return | ||||||
|  | 		} | ||||||
|  | 		rest, ok = decodeFuzzHeader(rest[1:], headers) | ||||||
|  | 		if !ok { | ||||||
|  | 			return | ||||||
|  | 		} | ||||||
|  | 	} | ||||||
|  | } | ||||||
|  | 
 | ||||||
|  | func decodeFuzzHeader(fuzz []byte, headers map[string][]string) ( | ||||||
|  | 	rest []byte, | ||||||
|  | 	ok bool, | ||||||
|  | ) { | ||||||
|  | 	if len(fuzz) == 0 { | ||||||
|  | 		ok = true | ||||||
|  | 		return | ||||||
|  | 	} | ||||||
|  | 	name, rest, ok := extractFuzzString(fuzz) | ||||||
|  | 	if !ok { | ||||||
|  | 		return | ||||||
|  | 	} | ||||||
|  | 	value, rest, ok := extractFuzzString(rest) | ||||||
|  | 	if !ok { | ||||||
|  | 		return | ||||||
|  | 	} | ||||||
|  | 	if header, ok := headers[name]; ok { | ||||||
|  | 		headers[name] = append(header, value) | ||||||
|  | 	} else { | ||||||
|  | 		headers[name] = []string{value} | ||||||
|  | 	} | ||||||
|  | 	ok = true | ||||||
|  | 	return | ||||||
|  | } | ||||||
|  | 
 | ||||||
|  | func extractFuzzString(fuzz []byte) ( | ||||||
|  | 	value string, | ||||||
|  | 	rest []byte, | ||||||
|  | 	ok bool, | ||||||
|  | ) { | ||||||
|  | 	if len(fuzz) < 2 { | ||||||
|  | 		// Invalid string encoding
 | ||||||
|  | 		return | ||||||
|  | 	} | ||||||
|  | 	length := int(fuzz[0]) | ||||||
|  | 	if length == 0 { | ||||||
|  | 		// Invalid length
 | ||||||
|  | 		return | ||||||
|  | 	} | ||||||
|  | 	if len(fuzz) < (length + 1) { | ||||||
|  | 		// Insufficient fuzz
 | ||||||
|  | 		return | ||||||
|  | 	} | ||||||
|  | 	value = string(fuzz[1 : length+1]) | ||||||
|  | 	if len(fuzz) == (length + 1) { | ||||||
|  | 		// Consumed all fuzz
 | ||||||
|  | 		rest = []byte{} | ||||||
|  | 	} else { | ||||||
|  | 		// More fuzz
 | ||||||
|  | 		rest = fuzz[length+1:] | ||||||
|  | 	} | ||||||
|  | 	ok = true | ||||||
|  | 	return | ||||||
|  | } | ||||||
|  | 
 | ||||||
|  | func extractFuzzByteString(fuzz []byte) ( | ||||||
|  | 	value []byte, | ||||||
|  | 	rest []byte, | ||||||
|  | 	ok bool, | ||||||
|  | ) { | ||||||
|  | 	if len(fuzz) < 2 { | ||||||
|  | 		// Invalid byte string encoding
 | ||||||
|  | 		return | ||||||
|  | 	} | ||||||
|  | 	length := int(fuzz[0]) | ||||||
|  | 	if length == 0 { | ||||||
|  | 		// Invalid length
 | ||||||
|  | 		return | ||||||
|  | 	} | ||||||
|  | 	if len(fuzz) < (length + 1) { | ||||||
|  | 		// Insufficient fuzz
 | ||||||
|  | 		return | ||||||
|  | 	} | ||||||
|  | 	value = fuzz[1 : length+1] | ||||||
|  | 	if len(fuzz) == (length + 1) { | ||||||
|  | 		// Consumed all fuzz
 | ||||||
|  | 		rest = []byte{} | ||||||
|  | 	} else { | ||||||
|  | 		// More fuzz
 | ||||||
|  | 		rest = fuzz[length+1:] | ||||||
|  | 	} | ||||||
|  | 	ok = true | ||||||
|  | 	return | ||||||
|  | } | ||||||
|  | @ -206,6 +206,7 @@ github.com/kelseyhightower/envconfig | ||||||
| # github.com/konsorten/go-windows-terminal-sequences v1.0.3 | # github.com/konsorten/go-windows-terminal-sequences v1.0.3 | ||||||
| github.com/konsorten/go-windows-terminal-sequences | github.com/konsorten/go-windows-terminal-sequences | ||||||
| # github.com/mailru/easyjson v0.7.1-0.20191009090205-6c0755d89d1e | # github.com/mailru/easyjson v0.7.1-0.20191009090205-6c0755d89d1e | ||||||
|  | github.com/mailru/easyjson | ||||||
| github.com/mailru/easyjson/buffer | github.com/mailru/easyjson/buffer | ||||||
| github.com/mailru/easyjson/jlexer | github.com/mailru/easyjson/jlexer | ||||||
| github.com/mailru/easyjson/jwriter | github.com/mailru/easyjson/jwriter | ||||||
|  | @ -260,9 +261,9 @@ github.com/sirupsen/logrus | ||||||
| # github.com/spf13/pflag v1.0.5 | # github.com/spf13/pflag v1.0.5 | ||||||
| ## explicit | ## explicit | ||||||
| github.com/spf13/pflag | github.com/spf13/pflag | ||||||
| # github.com/tsenart/vegeta v12.7.1-0.20190725001342-b5f4fca92137+incompatible | # github.com/tsenart/vegeta/v12 v12.8.4 | ||||||
| ## explicit | ## explicit | ||||||
| github.com/tsenart/vegeta/lib | github.com/tsenart/vegeta/v12/lib | ||||||
| # go.opencensus.io v0.22.4 | # go.opencensus.io v0.22.4 | ||||||
| ## explicit | ## explicit | ||||||
| go.opencensus.io | go.opencensus.io | ||||||
|  |  | ||||||
		Loading…
	
		Reference in New Issue