mirror of https://github.com/knative/caching.git
Auto-update dependencies (#233)
Produced via: `./hack/update-deps.sh --upgrade && ./hack/update-codegen.sh` /assign n3wscott vagababov /cc n3wscott vagababov
This commit is contained in:
parent
565e010991
commit
0f1d90796c
|
@ -966,7 +966,7 @@
|
|||
|
||||
[[projects]]
|
||||
branch = "master"
|
||||
digest = "1:73d6a1f3f7a178275499102ae27b5177a91aaccc57e3a0328e628cb3ced708f8"
|
||||
digest = "1:7fce3b5a8d117763efbf4b6b9862940a5472d276022b0b324b39b70ef4fa77bd"
|
||||
name = "knative.dev/pkg"
|
||||
packages = [
|
||||
"apis",
|
||||
|
@ -986,18 +986,18 @@
|
|||
"reconciler",
|
||||
]
|
||||
pruneopts = "T"
|
||||
revision = "be54585f8f04fe39ce6b6d28d78809818b724734"
|
||||
revision = "42d1b005c814b8f9ea786ff89c90c91448c1417a"
|
||||
|
||||
[[projects]]
|
||||
branch = "master"
|
||||
digest = "1:115d2f6e72ee06327259bdba21d26f28c359c62464afd73538fbab66ae2b7698"
|
||||
digest = "1:3f2366ce9a05503ac8da902b58e898c285cc9a972e0e89fda0b2a2fedcd4fb46"
|
||||
name = "knative.dev/test-infra"
|
||||
packages = [
|
||||
"scripts",
|
||||
"tools/dep-collector",
|
||||
]
|
||||
pruneopts = "UT"
|
||||
revision = "01c075fbeae4b089793fcca6fc855d31e1628cad"
|
||||
revision = "e7f947d615d5eb623e80824f71d139a958c4019f"
|
||||
|
||||
[[projects]]
|
||||
digest = "1:8730e0150dfb2b7e173890c8b9868e7a273082ef8e39f4940e3506a481cf895c"
|
||||
|
|
|
@ -1350,14 +1350,14 @@
|
|||
|
||||
[[projects]]
|
||||
branch = "master"
|
||||
digest = "1:e5bd21467544cbd14cb25553c5c78eb2e0e93baf9288a3c2ebaf1cf1fdb0c95f"
|
||||
digest = "1:115d2f6e72ee06327259bdba21d26f28c359c62464afd73538fbab66ae2b7698"
|
||||
name = "knative.dev/test-infra"
|
||||
packages = [
|
||||
"scripts",
|
||||
"tools/dep-collector",
|
||||
]
|
||||
pruneopts = "UT"
|
||||
revision = "3f96c9e98f31595fe33eaa2d95f5e2170522acd7"
|
||||
revision = "01c075fbeae4b089793fcca6fc855d31e1628cad"
|
||||
|
||||
[[projects]]
|
||||
digest = "1:8730e0150dfb2b7e173890c8b9868e7a273082ef8e39f4940e3506a481cf895c"
|
||||
|
|
|
@ -70,10 +70,11 @@ integration tests).
|
|||
Use the flags `--build-tests`, `--unit-tests` and `--integration-tests` to run a
|
||||
specific set of tests.
|
||||
|
||||
To run a specific program as a test, use the `--run-test` flag, and provide the
|
||||
To run specific programs as a test, use the `--run-test` flag, and provide the
|
||||
program as the argument. If arguments are required for the program, pass
|
||||
everything as a single quotes argument. For example,
|
||||
`./presubmit-tests.sh --run-test "test/my/test data"`.
|
||||
`./presubmit-tests.sh --run-test "test/my/test data"`. This flag can be used
|
||||
repeatedly, and each one will be ran in sequential order.
|
||||
|
||||
The script will automatically skip all presubmit tests for PRs where all changed
|
||||
files are exempt of tests (e.g., a PR changing only the `OWNERS` file).
|
||||
|
|
|
@ -327,6 +327,13 @@ function capture_output() {
|
|||
return ${failed}
|
||||
}
|
||||
|
||||
# Print failed step, which could be highlighted by spyglass.
|
||||
# Parameters: $1...n - description of step that failed
|
||||
function step_failed() {
|
||||
local spyglass_token="Step failed:"
|
||||
echo "${spyglass_token} $@"
|
||||
}
|
||||
|
||||
# Create a temporary file with the given extension in a way that works on both Linux and macOS.
|
||||
# Parameters: $1 - file name without extension (e.g. 'myfile_XXXX')
|
||||
# $2 - file extension (e.g. 'xml')
|
||||
|
|
|
@ -93,19 +93,19 @@ function run_build_tests() {
|
|||
local failed=0
|
||||
# Run pre-build tests, if any
|
||||
if function_exists pre_build_tests; then
|
||||
pre_build_tests || failed=1
|
||||
pre_build_tests || { failed=1; step_failed "pre_build_tests"; }
|
||||
fi
|
||||
# Don't run build tests if pre-build tests failed
|
||||
if (( ! failed )); then
|
||||
if function_exists build_tests; then
|
||||
build_tests || failed=1
|
||||
build_tests || { failed=1; step_failed "build_tests"; }
|
||||
else
|
||||
default_build_test_runner || failed=1
|
||||
default_build_test_runner || { failed=1; step_failed "default_build_test_runner"; }
|
||||
fi
|
||||
fi
|
||||
# Don't run post-build tests if pre/build tests failed
|
||||
if (( ! failed )) && function_exists post_build_tests; then
|
||||
post_build_tests || failed=1
|
||||
post_build_tests || { failed=1; step_failed "post_build_tests"; }
|
||||
fi
|
||||
results_banner "Build" ${failed}
|
||||
return ${failed}
|
||||
|
@ -213,19 +213,19 @@ function run_unit_tests() {
|
|||
local failed=0
|
||||
# Run pre-unit tests, if any
|
||||
if function_exists pre_unit_tests; then
|
||||
pre_unit_tests || failed=1
|
||||
pre_unit_tests || { failed=1; step_failed "pre_unit_tests"; }
|
||||
fi
|
||||
# Don't run unit tests if pre-unit tests failed
|
||||
if (( ! failed )); then
|
||||
if function_exists unit_tests; then
|
||||
unit_tests || failed=1
|
||||
unit_tests || { failed=1; step_failed "unit_tests"; }
|
||||
else
|
||||
default_unit_test_runner || failed=1
|
||||
default_unit_test_runner || { failed=1; step_failed "default_unit_test_runner"; }
|
||||
fi
|
||||
fi
|
||||
# Don't run post-unit tests if pre/unit tests failed
|
||||
if (( ! failed )) && function_exists post_unit_tests; then
|
||||
post_unit_tests || failed=1
|
||||
post_unit_tests || { failed=1; step_failed "post_unit_tests"; }
|
||||
fi
|
||||
results_banner "Unit" ${failed}
|
||||
return ${failed}
|
||||
|
@ -249,19 +249,19 @@ function run_integration_tests() {
|
|||
local failed=0
|
||||
# Run pre-integration tests, if any
|
||||
if function_exists pre_integration_tests; then
|
||||
pre_integration_tests || failed=1
|
||||
pre_integration_tests || { failed=1; step_failed "pre_integration_tests"; }
|
||||
fi
|
||||
# Don't run integration tests if pre-integration tests failed
|
||||
if (( ! failed )); then
|
||||
if function_exists integration_tests; then
|
||||
integration_tests || failed=1
|
||||
integration_tests || { failed=1; step_failed "integration_tests"; }
|
||||
else
|
||||
default_integration_test_runner || failed=1
|
||||
default_integration_test_runner || { failed=1; step_failed "default_integration_test_runner"; }
|
||||
fi
|
||||
fi
|
||||
# Don't run integration tests if pre/integration tests failed
|
||||
if (( ! failed )) && function_exists post_integration_tests; then
|
||||
post_integration_tests || failed=1
|
||||
post_integration_tests || { failed=1; step_failed "post_integration_tests"; }
|
||||
fi
|
||||
results_banner "Integration" ${failed}
|
||||
return ${failed}
|
||||
|
@ -275,6 +275,7 @@ function default_integration_test_runner() {
|
|||
echo "Running integration test ${e2e_test}"
|
||||
if ! ${e2e_test} ${options}; then
|
||||
failed=1
|
||||
step_failed "${e2e_test} ${options}"
|
||||
fi
|
||||
done
|
||||
return ${failed}
|
||||
|
@ -327,7 +328,7 @@ function main() {
|
|||
|
||||
[[ -z $1 ]] && set -- "--all-tests"
|
||||
|
||||
local TEST_TO_RUN=""
|
||||
local TESTS_TO_RUN=()
|
||||
|
||||
while [[ $# -ne 0 ]]; do
|
||||
local parameter=$1
|
||||
|
@ -343,7 +344,7 @@ function main() {
|
|||
--run-test)
|
||||
shift
|
||||
[[ $# -ge 1 ]] || abort "missing executable after --run-test"
|
||||
TEST_TO_RUN="$1"
|
||||
TESTS_TO_RUN+=("$1")
|
||||
;;
|
||||
*) abort "error: unknown option ${parameter}" ;;
|
||||
esac
|
||||
|
@ -353,7 +354,7 @@ function main() {
|
|||
readonly RUN_BUILD_TESTS
|
||||
readonly RUN_UNIT_TESTS
|
||||
readonly RUN_INTEGRATION_TESTS
|
||||
readonly TEST_TO_RUN
|
||||
readonly TESTS_TO_RUN
|
||||
|
||||
cd ${REPO_ROOT_DIR}
|
||||
|
||||
|
@ -361,7 +362,7 @@ function main() {
|
|||
|
||||
local failed=0
|
||||
|
||||
if [[ -n "${TEST_TO_RUN}" ]]; then
|
||||
if [[ ${#TESTS_TO_RUN[@]} > 0 ]]; then
|
||||
if (( RUN_BUILD_TESTS || RUN_UNIT_TESTS || RUN_INTEGRATION_TESTS )); then
|
||||
abort "--run-test must be used alone"
|
||||
fi
|
||||
|
@ -370,17 +371,19 @@ function main() {
|
|||
header "Documentation only PR, skipping running custom test"
|
||||
exit 0
|
||||
fi
|
||||
${TEST_TO_RUN} || failed=1
|
||||
for test_to_run in "${TESTS_TO_RUN[@]}"; do
|
||||
${test_to_run} || { failed=1; step_failed "${test_to_run}"; }
|
||||
done
|
||||
fi
|
||||
|
||||
run_build_tests || failed=1
|
||||
run_build_tests || { failed=1; step_failed "run_build_tests"; }
|
||||
# If PRESUBMIT_TEST_FAIL_FAST is set to true, don't run unit tests if build tests failed
|
||||
if (( ! PRESUBMIT_TEST_FAIL_FAST )) || (( ! failed )); then
|
||||
run_unit_tests || failed=1
|
||||
run_unit_tests || { failed=1; step_failed "run_unit_tests"; }
|
||||
fi
|
||||
# If PRESUBMIT_TEST_FAIL_FAST is set to true, don't run integration tests if build/unit tests failed
|
||||
if (( ! PRESUBMIT_TEST_FAIL_FAST )) || (( ! failed )); then
|
||||
run_integration_tests || failed=1
|
||||
run_integration_tests || { failed=1; step_failed "run_integration_tests"; }
|
||||
fi
|
||||
|
||||
exit ${failed}
|
||||
|
|
Loading…
Reference in New Issue