Fix akamai cache purger bugs (#2443)

Fixes two bugs in the Akamai cache purging library and one in the `ocsp-updater` and adds some tests to the Akamai library.

* The first was that the backoff logic was broken, the backoff was calculated but discarded as it was assumed the sleep happened inside `core.RetryBackoff` instead of it returning the amount of time to backoff.
* The second was that the internal HTTP client would only log errors if they were fatal which was superfluous as the caller would also log the fatal errors and masked what the actual issue was during retries.
* The last in `ocsp-updater` was that `path.Join` was used to create a URL which is not an intended use of the method as it attempts to clean paths. This meant that the scheme prefix `http://` would be 'cleaned' to `http:/`, since Akamai has no idea what the malformed URLs referred to it would return 403 Forbidden which we could consider a temporary error and retry until failure.
This commit is contained in:
Roland Bracewell Shoemaker 2016-12-21 06:05:49 -08:00 committed by Daniel McCarney
parent d710dc9a2f
commit 5ca43d2985
3 changed files with 107 additions and 6 deletions

View File

@ -205,8 +205,11 @@ func (cpc *CachePurgeClient) purge(urls []string) error {
if err != nil {
return err
}
if purgeInfo.HTTPStatus != 201 || resp.StatusCode != 201 {
return fmt.Errorf("Incorrect HTTP status code: %s", string(body))
if purgeInfo.HTTPStatus != http.StatusCreated || resp.StatusCode != http.StatusCreated {
if purgeInfo.HTTPStatus == http.StatusForbidden {
return errFatal(fmt.Sprintf("Unauthorized to purge URLs %q", urls))
}
return fmt.Errorf("Unexpected HTTP status code '%d': %s", resp.StatusCode, string(body))
}
cpc.log.Info(fmt.Sprintf(
@ -224,15 +227,15 @@ func (cpc *CachePurgeClient) purge(urls []string) error {
func (cpc *CachePurgeClient) Purge(urls []string) error {
successful := false
for i := 0; i <= cpc.retries; i++ {
core.RetryBackoff(i, cpc.retryBackoff, time.Minute, 1.3)
cpc.clk.Sleep(core.RetryBackoff(i, cpc.retryBackoff, time.Minute, 1.3))
err := cpc.purge(urls)
if err != nil {
if _, ok := err.(errFatal); ok {
cpc.log.AuditErr(err.Error())
cpc.stats.Inc("FatalFailures", 1)
return err
}
cpc.log.AuditErr(fmt.Sprintf("Akamai cache purge failed, retrying: %s", err.Error()))
cpc.stats.Inc("RetryableFailures", 1)
continue
}

View File

@ -2,13 +2,18 @@ package akamai
import (
"bytes"
"encoding/json"
"fmt"
"io/ioutil"
"net/http"
"net/http/httptest"
"strings"
"testing"
"time"
"github.com/jmhodges/clock"
blog "github.com/letsencrypt/boulder/log"
"github.com/letsencrypt/boulder/metrics"
"github.com/letsencrypt/boulder/test"
)
@ -49,3 +54,93 @@ func TestConstructAuthHeader(t *testing.T) {
test.AssertNotError(t, err, "Failed to create authorization header")
test.AssertEquals(t, authHeader, expectedHeader)
}
type akamaiServer struct {
responseCode int
}
func (as *akamaiServer) akamaiHandler(w http.ResponseWriter, r *http.Request) {
var req struct {
Objects []string
Type string
Action string
}
body, err := ioutil.ReadAll(r.Body)
if err != nil {
fmt.Printf("Failed to read request body: %s\n", err)
w.WriteHeader(http.StatusInternalServerError)
return
}
err = json.Unmarshal(body, &req)
if err != nil {
fmt.Printf("Failed to unmarshal request body: %s\n", err)
w.WriteHeader(http.StatusInternalServerError)
return
}
fmt.Printf("Request: %#v\n", req)
var resp struct {
HTTPStatus int
Detail string
EstimatedSeconds int
PurgeID string
}
resp.HTTPStatus = as.responseCode
resp.Detail = "?"
resp.EstimatedSeconds = 10
resp.PurgeID = "?"
for _, testURL := range req.Objects {
if !strings.HasPrefix(testURL, "http://") {
resp.HTTPStatus = http.StatusForbidden
break
}
}
respBytes, err := json.Marshal(resp)
if err != nil {
fmt.Printf("Failed to marshal response body: %s\n", err)
w.WriteHeader(http.StatusInternalServerError)
return
}
w.WriteHeader(as.responseCode)
w.Write(respBytes)
}
func TestPurge(t *testing.T) {
log := blog.NewMock()
as := akamaiServer{responseCode: http.StatusCreated}
m := http.NewServeMux()
server := httptest.NewUnstartedServer(m)
m.HandleFunc("/", as.akamaiHandler)
server.Start()
client, err := NewCachePurgeClient(
server.URL,
"token",
"secret",
"accessToken",
3,
time.Second,
log,
metrics.NewNoopScope(),
)
test.AssertNotError(t, err, "Failed to create CachePurgeClient")
fc := clock.NewFake()
client.clk = fc
err = client.Purge([]string{"http://test.com"})
test.AssertNotError(t, err, "Purge failed with 201 response")
started := fc.Now()
as.responseCode = http.StatusInternalServerError
err = client.Purge([]string{"http://test.com"})
test.AssertError(t, err, "Purge didn't fail with 400 response")
test.Assert(t, fc.Since(started) > (time.Second*4), "Retries should've taken at least 4.4 seconds")
started = fc.Now()
as.responseCode = http.StatusCreated
err = client.Purge([]string{"http:/test.com"})
test.AssertError(t, err, "Purge didn't fail with 403 response from malformed URL")
test.Assert(t, fc.Since(started) < time.Second, "Purge should've failed out immediately")
}

View File

@ -9,7 +9,7 @@ import (
"fmt"
"net/url"
"os"
"path"
"strings"
"time"
"github.com/jmhodges/clock"
@ -218,9 +218,12 @@ func (updater *OCSPUpdater) sendPurge(der []byte) {
// do GET)
urls := []string{}
for _, ocspServer := range cert.OCSPServer {
if !strings.HasSuffix(ocspServer, "/") {
ocspServer += "/"
}
urls = append(
urls,
path.Join(ocspServer, url.QueryEscape(base64.StdEncoding.EncodeToString(req))),
fmt.Sprintf("%s%s", ocspServer, url.QueryEscape(base64.StdEncoding.EncodeToString(req))),
)
}